i386.c (enum pta_flags): Move out of struct scope...
[gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
5 Hacked by Michael Tiemann (tiemann@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 /* Instruction reorganization pass.
25
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
33
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
38
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
43
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
48 is taken.
49
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
56
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
60
61 Three techniques for filling delay slots have been implemented so far:
62
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
71
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
84
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
93 branch.
94
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
104
105 Not yet implemented:
106
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
110
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
113
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "toplev.h"
119 #include "rtl.h"
120 #include "tm_p.h"
121 #include "expr.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
127 #include "regs.h"
128 #include "recog.h"
129 #include "flags.h"
130 #include "output.h"
131 #include "obstack.h"
132 #include "insn-attr.h"
133 #include "resource.h"
134 #include "except.h"
135 #include "params.h"
136 #include "timevar.h"
137 #include "target.h"
138 #include "tree-pass.h"
139
140 #ifdef DELAY_SLOTS
141
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
144 #endif
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
147 #endif
148
149 /* Insns which have delay slots that have not yet been filled. */
150
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
153
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
157
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
160
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
163
164 /* Points to the label before the end of the function. */
165 static rtx end_of_function_label;
166
167 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
168 not always monotonically increase. */
169 static int *uid_to_ruid;
170
171 /* Highest valid index in `uid_to_ruid'. */
172 static int max_uid;
173
174 static int stop_search_p (rtx, int);
175 static int resource_conflicts_p (struct resources *, struct resources *);
176 static int insn_references_resource_p (rtx, struct resources *, int);
177 static int insn_sets_resource_p (rtx, struct resources *, int);
178 static rtx find_end_label (void);
179 static rtx emit_delay_sequence (rtx, rtx, int);
180 static rtx add_to_delay_list (rtx, rtx);
181 static rtx delete_from_delay_slot (rtx);
182 static void delete_scheduled_jump (rtx);
183 static void note_delay_statistics (int, int);
184 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
185 static rtx optimize_skip (rtx);
186 #endif
187 static int get_jump_flags (rtx, rtx);
188 static int rare_destination (rtx);
189 static int mostly_true_jump (rtx, rtx);
190 static rtx get_branch_condition (rtx, rtx);
191 static int condition_dominates_p (rtx, rtx);
192 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
193 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
194 static int check_annul_list_true_false (int, rtx);
195 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
196 struct resources *,
197 struct resources *,
198 struct resources *,
199 int, int *, int *, rtx *);
200 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
201 struct resources *,
202 struct resources *,
203 struct resources *,
204 int, int *, int *);
205 static void try_merge_delay_insns (rtx, rtx);
206 static rtx redundant_insn (rtx, rtx, rtx);
207 static int own_thread_p (rtx, rtx, int);
208 static void update_block (rtx, rtx);
209 static int reorg_redirect_jump (rtx, rtx);
210 static void update_reg_dead_notes (rtx, rtx);
211 static void fix_reg_dead_note (rtx, rtx);
212 static void update_reg_unused_notes (rtx, rtx);
213 static void fill_simple_delay_slots (int);
214 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, int, int, int, int,
215 int *, rtx);
216 static void fill_eager_delay_slots (void);
217 static void relax_delay_slots (rtx);
218 #ifdef HAVE_return
219 static void make_return_insns (rtx);
220 #endif
221 \f
222 /* Return TRUE if this insn should stop the search for insn to fill delay
223 slots. LABELS_P indicates that labels should terminate the search.
224 In all cases, jumps terminate the search. */
225
226 static int
227 stop_search_p (rtx insn, int labels_p)
228 {
229 if (insn == 0)
230 return 1;
231
232 /* If the insn can throw an exception that is caught within the function,
233 it may effectively perform a jump from the viewpoint of the function.
234 Therefore act like for a jump. */
235 if (can_throw_internal (insn))
236 return 1;
237
238 switch (GET_CODE (insn))
239 {
240 case NOTE:
241 case CALL_INSN:
242 return 0;
243
244 case CODE_LABEL:
245 return labels_p;
246
247 case JUMP_INSN:
248 case BARRIER:
249 return 1;
250
251 case INSN:
252 /* OK unless it contains a delay slot or is an `asm' insn of some type.
253 We don't know anything about these. */
254 return (GET_CODE (PATTERN (insn)) == SEQUENCE
255 || GET_CODE (PATTERN (insn)) == ASM_INPUT
256 || asm_noperands (PATTERN (insn)) >= 0);
257
258 default:
259 gcc_unreachable ();
260 }
261 }
262 \f
263 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
264 resource set contains a volatile memory reference. Otherwise, return FALSE. */
265
266 static int
267 resource_conflicts_p (struct resources *res1, struct resources *res2)
268 {
269 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
270 || (res1->unch_memory && res2->unch_memory)
271 || res1->volatil || res2->volatil)
272 return 1;
273
274 #ifdef HARD_REG_SET
275 return (res1->regs & res2->regs) != HARD_CONST (0);
276 #else
277 {
278 int i;
279
280 for (i = 0; i < HARD_REG_SET_LONGS; i++)
281 if ((res1->regs[i] & res2->regs[i]) != 0)
282 return 1;
283 return 0;
284 }
285 #endif
286 }
287
288 /* Return TRUE if any resource marked in RES, a `struct resources', is
289 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
290 routine is using those resources.
291
292 We compute this by computing all the resources referenced by INSN and
293 seeing if this conflicts with RES. It might be faster to directly check
294 ourselves, and this is the way it used to work, but it means duplicating
295 a large block of complex code. */
296
297 static int
298 insn_references_resource_p (rtx insn, struct resources *res,
299 int include_delayed_effects)
300 {
301 struct resources insn_res;
302
303 CLEAR_RESOURCE (&insn_res);
304 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
305 return resource_conflicts_p (&insn_res, res);
306 }
307
308 /* Return TRUE if INSN modifies resources that are marked in RES.
309 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
310 included. CC0 is only modified if it is explicitly set; see comments
311 in front of mark_set_resources for details. */
312
313 static int
314 insn_sets_resource_p (rtx insn, struct resources *res,
315 int include_delayed_effects)
316 {
317 struct resources insn_sets;
318
319 CLEAR_RESOURCE (&insn_sets);
320 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
321 return resource_conflicts_p (&insn_sets, res);
322 }
323 \f
324 /* Find a label at the end of the function or before a RETURN. If there
325 is none, try to make one. If that fails, returns 0.
326
327 The property of such a label is that it is placed just before the
328 epilogue or a bare RETURN insn, so that another bare RETURN can be
329 turned into a jump to the label unconditionally. In particular, the
330 label cannot be placed before a RETURN insn with a filled delay slot.
331
332 ??? There may be a problem with the current implementation. Suppose
333 we start with a bare RETURN insn and call find_end_label. It may set
334 end_of_function_label just before the RETURN. Suppose the machinery
335 is able to fill the delay slot of the RETURN insn afterwards. Then
336 end_of_function_label is no longer valid according to the property
337 described above and find_end_label will still return it unmodified.
338 Note that this is probably mitigated by the following observation:
339 once end_of_function_label is made, it is very likely the target of
340 a jump, so filling the delay slot of the RETURN will be much more
341 difficult. */
342
343 static rtx
344 find_end_label (void)
345 {
346 rtx insn;
347
348 /* If we found one previously, return it. */
349 if (end_of_function_label)
350 return end_of_function_label;
351
352 /* Otherwise, see if there is a label at the end of the function. If there
353 is, it must be that RETURN insns aren't needed, so that is our return
354 label and we don't have to do anything else. */
355
356 insn = get_last_insn ();
357 while (NOTE_P (insn)
358 || (NONJUMP_INSN_P (insn)
359 && (GET_CODE (PATTERN (insn)) == USE
360 || GET_CODE (PATTERN (insn)) == CLOBBER)))
361 insn = PREV_INSN (insn);
362
363 /* When a target threads its epilogue we might already have a
364 suitable return insn. If so put a label before it for the
365 end_of_function_label. */
366 if (BARRIER_P (insn)
367 && JUMP_P (PREV_INSN (insn))
368 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
369 {
370 rtx temp = PREV_INSN (PREV_INSN (insn));
371 end_of_function_label = gen_label_rtx ();
372 LABEL_NUSES (end_of_function_label) = 0;
373
374 /* Put the label before an USE insns that may precede the RETURN insn. */
375 while (GET_CODE (temp) == USE)
376 temp = PREV_INSN (temp);
377
378 emit_label_after (end_of_function_label, temp);
379 }
380
381 else if (LABEL_P (insn))
382 end_of_function_label = insn;
383 else
384 {
385 end_of_function_label = gen_label_rtx ();
386 LABEL_NUSES (end_of_function_label) = 0;
387 /* If the basic block reorder pass moves the return insn to
388 some other place try to locate it again and put our
389 end_of_function_label there. */
390 while (insn && ! (JUMP_P (insn)
391 && (GET_CODE (PATTERN (insn)) == RETURN)))
392 insn = PREV_INSN (insn);
393 if (insn)
394 {
395 insn = PREV_INSN (insn);
396
397 /* Put the label before an USE insns that may proceed the
398 RETURN insn. */
399 while (GET_CODE (insn) == USE)
400 insn = PREV_INSN (insn);
401
402 emit_label_after (end_of_function_label, insn);
403 }
404 else
405 {
406 #ifdef HAVE_epilogue
407 if (HAVE_epilogue
408 #ifdef HAVE_return
409 && ! HAVE_return
410 #endif
411 )
412 {
413 /* The RETURN insn has its delay slot filled so we cannot
414 emit the label just before it. Since we already have
415 an epilogue and cannot emit a new RETURN, we cannot
416 emit the label at all. */
417 end_of_function_label = NULL_RTX;
418 return end_of_function_label;
419 }
420 #endif /* HAVE_epilogue */
421
422 /* Otherwise, make a new label and emit a RETURN and BARRIER,
423 if needed. */
424 emit_label (end_of_function_label);
425 #ifdef HAVE_return
426 /* We don't bother trying to create a return insn if the
427 epilogue has filled delay-slots; we would have to try and
428 move the delay-slot fillers to the delay-slots for the new
429 return insn or in front of the new return insn. */
430 if (current_function_epilogue_delay_list == NULL
431 && HAVE_return)
432 {
433 /* The return we make may have delay slots too. */
434 rtx insn = gen_return ();
435 insn = emit_jump_insn (insn);
436 emit_barrier ();
437 if (num_delay_slots (insn) > 0)
438 obstack_ptr_grow (&unfilled_slots_obstack, insn);
439 }
440 #endif
441 }
442 }
443
444 /* Show one additional use for this label so it won't go away until
445 we are done. */
446 ++LABEL_NUSES (end_of_function_label);
447
448 return end_of_function_label;
449 }
450 \f
451 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
452 the pattern of INSN with the SEQUENCE.
453
454 Chain the insns so that NEXT_INSN of each insn in the sequence points to
455 the next and NEXT_INSN of the last insn in the sequence points to
456 the first insn after the sequence. Similarly for PREV_INSN. This makes
457 it easier to scan all insns.
458
459 Returns the SEQUENCE that replaces INSN. */
460
461 static rtx
462 emit_delay_sequence (rtx insn, rtx list, int length)
463 {
464 int i = 1;
465 rtx li;
466 int had_barrier = 0;
467
468 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
469 rtvec seqv = rtvec_alloc (length + 1);
470 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
471 rtx seq_insn = make_insn_raw (seq);
472 rtx first = get_insns ();
473 rtx last = get_last_insn ();
474
475 /* Make a copy of the insn having delay slots. */
476 rtx delay_insn = copy_rtx (insn);
477
478 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
479 confuse further processing. Update LAST in case it was the last insn.
480 We will put the BARRIER back in later. */
481 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
482 {
483 delete_related_insns (NEXT_INSN (insn));
484 last = get_last_insn ();
485 had_barrier = 1;
486 }
487
488 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
489 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
490 PREV_INSN (seq_insn) = PREV_INSN (insn);
491
492 if (insn != last)
493 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
494
495 if (insn != first)
496 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
497
498 /* Note the calls to set_new_first_and_last_insn must occur after
499 SEQ_INSN has been completely spliced into the insn stream.
500
501 Otherwise CUR_INSN_UID will get set to an incorrect value because
502 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
503 if (insn == last)
504 set_new_first_and_last_insn (first, seq_insn);
505
506 if (insn == first)
507 set_new_first_and_last_insn (seq_insn, last);
508
509 /* Build our SEQUENCE and rebuild the insn chain. */
510 XVECEXP (seq, 0, 0) = delay_insn;
511 INSN_DELETED_P (delay_insn) = 0;
512 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
513
514 for (li = list; li; li = XEXP (li, 1), i++)
515 {
516 rtx tem = XEXP (li, 0);
517 rtx note, next;
518
519 /* Show that this copy of the insn isn't deleted. */
520 INSN_DELETED_P (tem) = 0;
521
522 XVECEXP (seq, 0, i) = tem;
523 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
524 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
525
526 /* SPARC assembler, for instance, emit warning when debug info is output
527 into the delay slot. */
528 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
529 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
530 INSN_LOCATOR (tem) = 0;
531
532 for (note = REG_NOTES (tem); note; note = next)
533 {
534 next = XEXP (note, 1);
535 switch (REG_NOTE_KIND (note))
536 {
537 case REG_DEAD:
538 /* Remove any REG_DEAD notes because we can't rely on them now
539 that the insn has been moved. */
540 remove_note (tem, note);
541 break;
542
543 case REG_LABEL:
544 /* Keep the label reference count up to date. */
545 if (LABEL_P (XEXP (note, 0)))
546 LABEL_NUSES (XEXP (note, 0)) ++;
547 break;
548
549 default:
550 break;
551 }
552 }
553 }
554
555 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
556
557 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
558 last insn in that SEQUENCE to point to us. Similarly for the first
559 insn in the following insn if it is a SEQUENCE. */
560
561 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
562 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
563 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
564 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
565 = seq_insn;
566
567 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
568 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
569 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
570
571 /* If there used to be a BARRIER, put it back. */
572 if (had_barrier)
573 emit_barrier_after (seq_insn);
574
575 gcc_assert (i == length + 1);
576
577 return seq_insn;
578 }
579
580 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
581 be in the order in which the insns are to be executed. */
582
583 static rtx
584 add_to_delay_list (rtx insn, rtx delay_list)
585 {
586 /* If we have an empty list, just make a new list element. If
587 INSN has its block number recorded, clear it since we may
588 be moving the insn to a new block. */
589
590 if (delay_list == 0)
591 {
592 clear_hashed_info_for_insn (insn);
593 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
594 }
595
596 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
597 list. */
598 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
599
600 return delay_list;
601 }
602 \f
603 /* Delete INSN from the delay slot of the insn that it is in, which may
604 produce an insn with no delay slots. Return the new insn. */
605
606 static rtx
607 delete_from_delay_slot (rtx insn)
608 {
609 rtx trial, seq_insn, seq, prev;
610 rtx delay_list = 0;
611 int i;
612 int had_barrier = 0;
613
614 /* We first must find the insn containing the SEQUENCE with INSN in its
615 delay slot. Do this by finding an insn, TRIAL, where
616 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
617
618 for (trial = insn;
619 PREV_INSN (NEXT_INSN (trial)) == trial;
620 trial = NEXT_INSN (trial))
621 ;
622
623 seq_insn = PREV_INSN (NEXT_INSN (trial));
624 seq = PATTERN (seq_insn);
625
626 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
627 had_barrier = 1;
628
629 /* Create a delay list consisting of all the insns other than the one
630 we are deleting (unless we were the only one). */
631 if (XVECLEN (seq, 0) > 2)
632 for (i = 1; i < XVECLEN (seq, 0); i++)
633 if (XVECEXP (seq, 0, i) != insn)
634 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
635
636 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
637 list, and rebuild the delay list if non-empty. */
638 prev = PREV_INSN (seq_insn);
639 trial = XVECEXP (seq, 0, 0);
640 delete_related_insns (seq_insn);
641 add_insn_after (trial, prev);
642
643 /* If there was a barrier after the old SEQUENCE, remit it. */
644 if (had_barrier)
645 emit_barrier_after (trial);
646
647 /* If there are any delay insns, remit them. Otherwise clear the
648 annul flag. */
649 if (delay_list)
650 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
651 else if (INSN_P (trial))
652 INSN_ANNULLED_BRANCH_P (trial) = 0;
653
654 INSN_FROM_TARGET_P (insn) = 0;
655
656 /* Show we need to fill this insn again. */
657 obstack_ptr_grow (&unfilled_slots_obstack, trial);
658
659 return trial;
660 }
661 \f
662 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
663 the insn that sets CC0 for it and delete it too. */
664
665 static void
666 delete_scheduled_jump (rtx insn)
667 {
668 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
669 delete the insn that sets the condition code, but it is hard to find it.
670 Since this case is rare anyway, don't bother trying; there would likely
671 be other insns that became dead anyway, which we wouldn't know to
672 delete. */
673
674 #ifdef HAVE_cc0
675 if (reg_mentioned_p (cc0_rtx, insn))
676 {
677 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
678
679 /* If a reg-note was found, it points to an insn to set CC0. This
680 insn is in the delay list of some other insn. So delete it from
681 the delay list it was in. */
682 if (note)
683 {
684 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
685 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
686 delete_from_delay_slot (XEXP (note, 0));
687 }
688 else
689 {
690 /* The insn setting CC0 is our previous insn, but it may be in
691 a delay slot. It will be the last insn in the delay slot, if
692 it is. */
693 rtx trial = previous_insn (insn);
694 if (NOTE_P (trial))
695 trial = prev_nonnote_insn (trial);
696 if (sets_cc0_p (PATTERN (trial)) != 1
697 || FIND_REG_INC_NOTE (trial, NULL_RTX))
698 return;
699 if (PREV_INSN (NEXT_INSN (trial)) == trial)
700 delete_related_insns (trial);
701 else
702 delete_from_delay_slot (trial);
703 }
704 }
705 #endif
706
707 delete_related_insns (insn);
708 }
709 \f
710 /* Counters for delay-slot filling. */
711
712 #define NUM_REORG_FUNCTIONS 2
713 #define MAX_DELAY_HISTOGRAM 3
714 #define MAX_REORG_PASSES 2
715
716 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
717
718 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
719
720 static int reorg_pass_number;
721
722 static void
723 note_delay_statistics (int slots_filled, int index)
724 {
725 num_insns_needing_delays[index][reorg_pass_number]++;
726 if (slots_filled > MAX_DELAY_HISTOGRAM)
727 slots_filled = MAX_DELAY_HISTOGRAM;
728 num_filled_delays[index][slots_filled][reorg_pass_number]++;
729 }
730 \f
731 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
732
733 /* Optimize the following cases:
734
735 1. When a conditional branch skips over only one instruction,
736 use an annulling branch and put that insn in the delay slot.
737 Use either a branch that annuls when the condition if true or
738 invert the test with a branch that annuls when the condition is
739 false. This saves insns, since otherwise we must copy an insn
740 from the L1 target.
741
742 (orig) (skip) (otherwise)
743 Bcc.n L1 Bcc',a L1 Bcc,a L1'
744 insn insn insn2
745 L1: L1: L1:
746 insn2 insn2 insn2
747 insn3 insn3 L1':
748 insn3
749
750 2. When a conditional branch skips over only one instruction,
751 and after that, it unconditionally branches somewhere else,
752 perform the similar optimization. This saves executing the
753 second branch in the case where the inverted condition is true.
754
755 Bcc.n L1 Bcc',a L2
756 insn insn
757 L1: L1:
758 Bra L2 Bra L2
759
760 INSN is a JUMP_INSN.
761
762 This should be expanded to skip over N insns, where N is the number
763 of delay slots required. */
764
765 static rtx
766 optimize_skip (rtx insn)
767 {
768 rtx trial = next_nonnote_insn (insn);
769 rtx next_trial = next_active_insn (trial);
770 rtx delay_list = 0;
771 int flags;
772
773 flags = get_jump_flags (insn, JUMP_LABEL (insn));
774
775 if (trial == 0
776 || !NONJUMP_INSN_P (trial)
777 || GET_CODE (PATTERN (trial)) == SEQUENCE
778 || recog_memoized (trial) < 0
779 || (! eligible_for_annul_false (insn, 0, trial, flags)
780 && ! eligible_for_annul_true (insn, 0, trial, flags))
781 || can_throw_internal (trial))
782 return 0;
783
784 /* There are two cases where we are just executing one insn (we assume
785 here that a branch requires only one insn; this should be generalized
786 at some point): Where the branch goes around a single insn or where
787 we have one insn followed by a branch to the same label we branch to.
788 In both of these cases, inverting the jump and annulling the delay
789 slot give the same effect in fewer insns. */
790 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
791 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
792 || (next_trial != 0
793 && JUMP_P (next_trial)
794 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
795 && (simplejump_p (next_trial)
796 || GET_CODE (PATTERN (next_trial)) == RETURN)))
797 {
798 if (eligible_for_annul_false (insn, 0, trial, flags))
799 {
800 if (invert_jump (insn, JUMP_LABEL (insn), 1))
801 INSN_FROM_TARGET_P (trial) = 1;
802 else if (! eligible_for_annul_true (insn, 0, trial, flags))
803 return 0;
804 }
805
806 delay_list = add_to_delay_list (trial, NULL_RTX);
807 next_trial = next_active_insn (trial);
808 update_block (trial, trial);
809 delete_related_insns (trial);
810
811 /* Also, if we are targeting an unconditional
812 branch, thread our jump to the target of that branch. Don't
813 change this into a RETURN here, because it may not accept what
814 we have in the delay slot. We'll fix this up later. */
815 if (next_trial && JUMP_P (next_trial)
816 && (simplejump_p (next_trial)
817 || GET_CODE (PATTERN (next_trial)) == RETURN))
818 {
819 rtx target_label = JUMP_LABEL (next_trial);
820 if (target_label == 0)
821 target_label = find_end_label ();
822
823 if (target_label)
824 {
825 /* Recompute the flags based on TARGET_LABEL since threading
826 the jump to TARGET_LABEL may change the direction of the
827 jump (which may change the circumstances in which the
828 delay slot is nullified). */
829 flags = get_jump_flags (insn, target_label);
830 if (eligible_for_annul_true (insn, 0, trial, flags))
831 reorg_redirect_jump (insn, target_label);
832 }
833 }
834
835 INSN_ANNULLED_BRANCH_P (insn) = 1;
836 }
837
838 return delay_list;
839 }
840 #endif
841 \f
842 /* Encode and return branch direction and prediction information for
843 INSN assuming it will jump to LABEL.
844
845 Non conditional branches return no direction information and
846 are predicted as very likely taken. */
847
848 static int
849 get_jump_flags (rtx insn, rtx label)
850 {
851 int flags;
852
853 /* get_jump_flags can be passed any insn with delay slots, these may
854 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
855 direction information, and only if they are conditional jumps.
856
857 If LABEL is zero, then there is no way to determine the branch
858 direction. */
859 if (JUMP_P (insn)
860 && (condjump_p (insn) || condjump_in_parallel_p (insn))
861 && INSN_UID (insn) <= max_uid
862 && label != 0
863 && INSN_UID (label) <= max_uid)
864 flags
865 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
866 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
867 /* No valid direction information. */
868 else
869 flags = 0;
870
871 /* If insn is a conditional branch call mostly_true_jump to get
872 determine the branch prediction.
873
874 Non conditional branches are predicted as very likely taken. */
875 if (JUMP_P (insn)
876 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
877 {
878 int prediction;
879
880 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
881 switch (prediction)
882 {
883 case 2:
884 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
885 break;
886 case 1:
887 flags |= ATTR_FLAG_likely;
888 break;
889 case 0:
890 flags |= ATTR_FLAG_unlikely;
891 break;
892 case -1:
893 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
894 break;
895
896 default:
897 gcc_unreachable ();
898 }
899 }
900 else
901 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
902
903 return flags;
904 }
905
906 /* Return 1 if INSN is a destination that will be branched to rarely (the
907 return point of a function); return 2 if DEST will be branched to very
908 rarely (a call to a function that doesn't return). Otherwise,
909 return 0. */
910
911 static int
912 rare_destination (rtx insn)
913 {
914 int jump_count = 0;
915 rtx next;
916
917 for (; insn; insn = next)
918 {
919 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
920 insn = XVECEXP (PATTERN (insn), 0, 0);
921
922 next = NEXT_INSN (insn);
923
924 switch (GET_CODE (insn))
925 {
926 case CODE_LABEL:
927 return 0;
928 case BARRIER:
929 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
930 don't scan past JUMP_INSNs, so any barrier we find here must
931 have been after a CALL_INSN and hence mean the call doesn't
932 return. */
933 return 2;
934 case JUMP_INSN:
935 if (GET_CODE (PATTERN (insn)) == RETURN)
936 return 1;
937 else if (simplejump_p (insn)
938 && jump_count++ < 10)
939 next = JUMP_LABEL (insn);
940 else
941 return 0;
942
943 default:
944 break;
945 }
946 }
947
948 /* If we got here it means we hit the end of the function. So this
949 is an unlikely destination. */
950
951 return 1;
952 }
953
954 /* Return truth value of the statement that this branch
955 is mostly taken. If we think that the branch is extremely likely
956 to be taken, we return 2. If the branch is slightly more likely to be
957 taken, return 1. If the branch is slightly less likely to be taken,
958 return 0 and if the branch is highly unlikely to be taken, return -1.
959
960 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
961
962 static int
963 mostly_true_jump (rtx jump_insn, rtx condition)
964 {
965 rtx target_label = JUMP_LABEL (jump_insn);
966 rtx note;
967 int rare_dest, rare_fallthrough;
968
969 /* If branch probabilities are available, then use that number since it
970 always gives a correct answer. */
971 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
972 if (note)
973 {
974 int prob = INTVAL (XEXP (note, 0));
975
976 if (prob >= REG_BR_PROB_BASE * 9 / 10)
977 return 2;
978 else if (prob >= REG_BR_PROB_BASE / 2)
979 return 1;
980 else if (prob >= REG_BR_PROB_BASE / 10)
981 return 0;
982 else
983 return -1;
984 }
985
986 /* Look at the relative rarities of the fallthrough and destination. If
987 they differ, we can predict the branch that way. */
988 rare_dest = rare_destination (target_label);
989 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
990
991 switch (rare_fallthrough - rare_dest)
992 {
993 case -2:
994 return -1;
995 case -1:
996 return 0;
997 case 0:
998 break;
999 case 1:
1000 return 1;
1001 case 2:
1002 return 2;
1003 }
1004
1005 /* If we couldn't figure out what this jump was, assume it won't be
1006 taken. This should be rare. */
1007 if (condition == 0)
1008 return 0;
1009
1010 /* Predict backward branches usually take, forward branches usually not. If
1011 we don't know whether this is forward or backward, assume the branch
1012 will be taken, since most are. */
1013 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1014 || INSN_UID (target_label) > max_uid
1015 || (uid_to_ruid[INSN_UID (jump_insn)]
1016 > uid_to_ruid[INSN_UID (target_label)]));
1017 }
1018
1019 /* Return the condition under which INSN will branch to TARGET. If TARGET
1020 is zero, return the condition under which INSN will return. If INSN is
1021 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1022 type of jump, or it doesn't go to TARGET, return 0. */
1023
1024 static rtx
1025 get_branch_condition (rtx insn, rtx target)
1026 {
1027 rtx pat = PATTERN (insn);
1028 rtx src;
1029
1030 if (condjump_in_parallel_p (insn))
1031 pat = XVECEXP (pat, 0, 0);
1032
1033 if (GET_CODE (pat) == RETURN)
1034 return target == 0 ? const_true_rtx : 0;
1035
1036 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1037 return 0;
1038
1039 src = SET_SRC (pat);
1040 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1041 return const_true_rtx;
1042
1043 else if (GET_CODE (src) == IF_THEN_ELSE
1044 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1045 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1046 && XEXP (XEXP (src, 1), 0) == target))
1047 && XEXP (src, 2) == pc_rtx)
1048 return XEXP (src, 0);
1049
1050 else if (GET_CODE (src) == IF_THEN_ELSE
1051 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1052 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1053 && XEXP (XEXP (src, 2), 0) == target))
1054 && XEXP (src, 1) == pc_rtx)
1055 {
1056 enum rtx_code rev;
1057 rev = reversed_comparison_code (XEXP (src, 0), insn);
1058 if (rev != UNKNOWN)
1059 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1060 XEXP (XEXP (src, 0), 0),
1061 XEXP (XEXP (src, 0), 1));
1062 }
1063
1064 return 0;
1065 }
1066
1067 /* Return nonzero if CONDITION is more strict than the condition of
1068 INSN, i.e., if INSN will always branch if CONDITION is true. */
1069
1070 static int
1071 condition_dominates_p (rtx condition, rtx insn)
1072 {
1073 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1074 enum rtx_code code = GET_CODE (condition);
1075 enum rtx_code other_code;
1076
1077 if (rtx_equal_p (condition, other_condition)
1078 || other_condition == const_true_rtx)
1079 return 1;
1080
1081 else if (condition == const_true_rtx || other_condition == 0)
1082 return 0;
1083
1084 other_code = GET_CODE (other_condition);
1085 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1086 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1087 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1088 return 0;
1089
1090 return comparison_dominates_p (code, other_code);
1091 }
1092
1093 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1094 any insns already in the delay slot of JUMP. */
1095
1096 static int
1097 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1098 {
1099 int flags, i;
1100 rtx pat = PATTERN (seq);
1101
1102 /* Make sure all the delay slots of this jump would still
1103 be valid after threading the jump. If they are still
1104 valid, then return nonzero. */
1105
1106 flags = get_jump_flags (jump, newlabel);
1107 for (i = 1; i < XVECLEN (pat, 0); i++)
1108 if (! (
1109 #ifdef ANNUL_IFFALSE_SLOTS
1110 (INSN_ANNULLED_BRANCH_P (jump)
1111 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1112 ? eligible_for_annul_false (jump, i - 1,
1113 XVECEXP (pat, 0, i), flags) :
1114 #endif
1115 #ifdef ANNUL_IFTRUE_SLOTS
1116 (INSN_ANNULLED_BRANCH_P (jump)
1117 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1118 ? eligible_for_annul_true (jump, i - 1,
1119 XVECEXP (pat, 0, i), flags) :
1120 #endif
1121 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1122 break;
1123
1124 return (i == XVECLEN (pat, 0));
1125 }
1126
1127 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1128 any insns we wish to place in the delay slot of JUMP. */
1129
1130 static int
1131 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1132 {
1133 int flags, i;
1134 rtx li;
1135
1136 /* Make sure all the insns in DELAY_LIST would still be
1137 valid after threading the jump. If they are still
1138 valid, then return nonzero. */
1139
1140 flags = get_jump_flags (jump, newlabel);
1141 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1142 if (! (
1143 #ifdef ANNUL_IFFALSE_SLOTS
1144 (INSN_ANNULLED_BRANCH_P (jump)
1145 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1146 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1147 #endif
1148 #ifdef ANNUL_IFTRUE_SLOTS
1149 (INSN_ANNULLED_BRANCH_P (jump)
1150 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1151 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1152 #endif
1153 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1154 break;
1155
1156 return (li == NULL);
1157 }
1158
1159 /* DELAY_LIST is a list of insns that have already been placed into delay
1160 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1161 If not, return 0; otherwise return 1. */
1162
1163 static int
1164 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1165 {
1166 rtx temp;
1167
1168 if (delay_list)
1169 {
1170 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1171 {
1172 rtx trial = XEXP (temp, 0);
1173
1174 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1175 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1176 return 0;
1177 }
1178 }
1179
1180 return 1;
1181 }
1182 \f
1183 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1184 the condition tested by INSN is CONDITION and the resources shown in
1185 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1186 from SEQ's delay list, in addition to whatever insns it may execute
1187 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1188 needed while searching for delay slot insns. Return the concatenated
1189 delay list if possible, otherwise, return 0.
1190
1191 SLOTS_TO_FILL is the total number of slots required by INSN, and
1192 PSLOTS_FILLED points to the number filled so far (also the number of
1193 insns in DELAY_LIST). It is updated with the number that have been
1194 filled from the SEQUENCE, if any.
1195
1196 PANNUL_P points to a nonzero value if we already know that we need
1197 to annul INSN. If this routine determines that annulling is needed,
1198 it may set that value nonzero.
1199
1200 PNEW_THREAD points to a location that is to receive the place at which
1201 execution should continue. */
1202
1203 static rtx
1204 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1205 rtx delay_list, struct resources *sets,
1206 struct resources *needed,
1207 struct resources *other_needed,
1208 int slots_to_fill, int *pslots_filled,
1209 int *pannul_p, rtx *pnew_thread)
1210 {
1211 rtx temp;
1212 int slots_remaining = slots_to_fill - *pslots_filled;
1213 int total_slots_filled = *pslots_filled;
1214 rtx new_delay_list = 0;
1215 int must_annul = *pannul_p;
1216 int used_annul = 0;
1217 int i;
1218 struct resources cc_set;
1219
1220 /* We can't do anything if there are more delay slots in SEQ than we
1221 can handle, or if we don't know that it will be a taken branch.
1222 We know that it will be a taken branch if it is either an unconditional
1223 branch or a conditional branch with a stricter branch condition.
1224
1225 Also, exit if the branch has more than one set, since then it is computing
1226 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1227 ??? It may be possible to move other sets into INSN in addition to
1228 moving the instructions in the delay slots.
1229
1230 We can not steal the delay list if one of the instructions in the
1231 current delay_list modifies the condition codes and the jump in the
1232 sequence is a conditional jump. We can not do this because we can
1233 not change the direction of the jump because the condition codes
1234 will effect the direction of the jump in the sequence. */
1235
1236 CLEAR_RESOURCE (&cc_set);
1237 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1238 {
1239 rtx trial = XEXP (temp, 0);
1240
1241 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1242 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1243 return delay_list;
1244 }
1245
1246 if (XVECLEN (seq, 0) - 1 > slots_remaining
1247 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1248 || ! single_set (XVECEXP (seq, 0, 0)))
1249 return delay_list;
1250
1251 #ifdef MD_CAN_REDIRECT_BRANCH
1252 /* On some targets, branches with delay slots can have a limited
1253 displacement. Give the back end a chance to tell us we can't do
1254 this. */
1255 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1256 return delay_list;
1257 #endif
1258
1259 for (i = 1; i < XVECLEN (seq, 0); i++)
1260 {
1261 rtx trial = XVECEXP (seq, 0, i);
1262 int flags;
1263
1264 if (insn_references_resource_p (trial, sets, 0)
1265 || insn_sets_resource_p (trial, needed, 0)
1266 || insn_sets_resource_p (trial, sets, 0)
1267 #ifdef HAVE_cc0
1268 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1269 delay list. */
1270 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1271 #endif
1272 /* If TRIAL is from the fallthrough code of an annulled branch insn
1273 in SEQ, we cannot use it. */
1274 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1275 && ! INSN_FROM_TARGET_P (trial)))
1276 return delay_list;
1277
1278 /* If this insn was already done (usually in a previous delay slot),
1279 pretend we put it in our delay slot. */
1280 if (redundant_insn (trial, insn, new_delay_list))
1281 continue;
1282
1283 /* We will end up re-vectoring this branch, so compute flags
1284 based on jumping to the new label. */
1285 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1286
1287 if (! must_annul
1288 && ((condition == const_true_rtx
1289 || (! insn_sets_resource_p (trial, other_needed, 0)
1290 && ! may_trap_or_fault_p (PATTERN (trial)))))
1291 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1292 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1293 && (must_annul = 1,
1294 check_annul_list_true_false (0, delay_list)
1295 && check_annul_list_true_false (0, new_delay_list)
1296 && eligible_for_annul_false (insn, total_slots_filled,
1297 trial, flags)))
1298 {
1299 if (must_annul)
1300 used_annul = 1;
1301 temp = copy_rtx (trial);
1302 INSN_FROM_TARGET_P (temp) = 1;
1303 new_delay_list = add_to_delay_list (temp, new_delay_list);
1304 total_slots_filled++;
1305
1306 if (--slots_remaining == 0)
1307 break;
1308 }
1309 else
1310 return delay_list;
1311 }
1312
1313 /* Show the place to which we will be branching. */
1314 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1315
1316 /* Add any new insns to the delay list and update the count of the
1317 number of slots filled. */
1318 *pslots_filled = total_slots_filled;
1319 if (used_annul)
1320 *pannul_p = 1;
1321
1322 if (delay_list == 0)
1323 return new_delay_list;
1324
1325 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1326 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1327
1328 return delay_list;
1329 }
1330 \f
1331 /* Similar to steal_delay_list_from_target except that SEQ is on the
1332 fallthrough path of INSN. Here we only do something if the delay insn
1333 of SEQ is an unconditional branch. In that case we steal its delay slot
1334 for INSN since unconditional branches are much easier to fill. */
1335
1336 static rtx
1337 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1338 rtx delay_list, struct resources *sets,
1339 struct resources *needed,
1340 struct resources *other_needed,
1341 int slots_to_fill, int *pslots_filled,
1342 int *pannul_p)
1343 {
1344 int i;
1345 int flags;
1346 int must_annul = *pannul_p;
1347 int used_annul = 0;
1348
1349 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1350
1351 /* We can't do anything if SEQ's delay insn isn't an
1352 unconditional branch. */
1353
1354 if (! simplejump_p (XVECEXP (seq, 0, 0))
1355 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1356 return delay_list;
1357
1358 for (i = 1; i < XVECLEN (seq, 0); i++)
1359 {
1360 rtx trial = XVECEXP (seq, 0, i);
1361
1362 /* If TRIAL sets CC0, stealing it will move it too far from the use
1363 of CC0. */
1364 if (insn_references_resource_p (trial, sets, 0)
1365 || insn_sets_resource_p (trial, needed, 0)
1366 || insn_sets_resource_p (trial, sets, 0)
1367 #ifdef HAVE_cc0
1368 || sets_cc0_p (PATTERN (trial))
1369 #endif
1370 )
1371
1372 break;
1373
1374 /* If this insn was already done, we don't need it. */
1375 if (redundant_insn (trial, insn, delay_list))
1376 {
1377 delete_from_delay_slot (trial);
1378 continue;
1379 }
1380
1381 if (! must_annul
1382 && ((condition == const_true_rtx
1383 || (! insn_sets_resource_p (trial, other_needed, 0)
1384 && ! may_trap_or_fault_p (PATTERN (trial)))))
1385 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1386 : (must_annul || delay_list == NULL) && (must_annul = 1,
1387 check_annul_list_true_false (1, delay_list)
1388 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1389 {
1390 if (must_annul)
1391 used_annul = 1;
1392 delete_from_delay_slot (trial);
1393 delay_list = add_to_delay_list (trial, delay_list);
1394
1395 if (++(*pslots_filled) == slots_to_fill)
1396 break;
1397 }
1398 else
1399 break;
1400 }
1401
1402 if (used_annul)
1403 *pannul_p = 1;
1404 return delay_list;
1405 }
1406 \f
1407 /* Try merging insns starting at THREAD which match exactly the insns in
1408 INSN's delay list.
1409
1410 If all insns were matched and the insn was previously annulling, the
1411 annul bit will be cleared.
1412
1413 For each insn that is merged, if the branch is or will be non-annulling,
1414 we delete the merged insn. */
1415
1416 static void
1417 try_merge_delay_insns (rtx insn, rtx thread)
1418 {
1419 rtx trial, next_trial;
1420 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1421 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1422 int slot_number = 1;
1423 int num_slots = XVECLEN (PATTERN (insn), 0);
1424 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1425 struct resources set, needed;
1426 rtx merged_insns = 0;
1427 int i;
1428 int flags;
1429
1430 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1431
1432 CLEAR_RESOURCE (&needed);
1433 CLEAR_RESOURCE (&set);
1434
1435 /* If this is not an annulling branch, take into account anything needed in
1436 INSN's delay slot. This prevents two increments from being incorrectly
1437 folded into one. If we are annulling, this would be the correct
1438 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1439 will essentially disable this optimization. This method is somewhat of
1440 a kludge, but I don't see a better way.) */
1441 if (! annul_p)
1442 for (i = 1 ; i < num_slots; i++)
1443 if (XVECEXP (PATTERN (insn), 0, i))
1444 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1445
1446 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1447 {
1448 rtx pat = PATTERN (trial);
1449 rtx oldtrial = trial;
1450
1451 next_trial = next_nonnote_insn (trial);
1452
1453 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1454 if (NONJUMP_INSN_P (trial)
1455 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1456 continue;
1457
1458 if (GET_CODE (next_to_match) == GET_CODE (trial)
1459 #ifdef HAVE_cc0
1460 /* We can't share an insn that sets cc0. */
1461 && ! sets_cc0_p (pat)
1462 #endif
1463 && ! insn_references_resource_p (trial, &set, 1)
1464 && ! insn_sets_resource_p (trial, &set, 1)
1465 && ! insn_sets_resource_p (trial, &needed, 1)
1466 && (trial = try_split (pat, trial, 0)) != 0
1467 /* Update next_trial, in case try_split succeeded. */
1468 && (next_trial = next_nonnote_insn (trial))
1469 /* Likewise THREAD. */
1470 && (thread = oldtrial == thread ? trial : thread)
1471 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1472 /* Have to test this condition if annul condition is different
1473 from (and less restrictive than) non-annulling one. */
1474 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1475 {
1476
1477 if (! annul_p)
1478 {
1479 update_block (trial, thread);
1480 if (trial == thread)
1481 thread = next_active_insn (thread);
1482
1483 delete_related_insns (trial);
1484 INSN_FROM_TARGET_P (next_to_match) = 0;
1485 }
1486 else
1487 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1488
1489 if (++slot_number == num_slots)
1490 break;
1491
1492 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1493 }
1494
1495 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1496 mark_referenced_resources (trial, &needed, 1);
1497 }
1498
1499 /* See if we stopped on a filled insn. If we did, try to see if its
1500 delay slots match. */
1501 if (slot_number != num_slots
1502 && trial && NONJUMP_INSN_P (trial)
1503 && GET_CODE (PATTERN (trial)) == SEQUENCE
1504 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1505 {
1506 rtx pat = PATTERN (trial);
1507 rtx filled_insn = XVECEXP (pat, 0, 0);
1508
1509 /* Account for resources set/needed by the filled insn. */
1510 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1511 mark_referenced_resources (filled_insn, &needed, 1);
1512
1513 for (i = 1; i < XVECLEN (pat, 0); i++)
1514 {
1515 rtx dtrial = XVECEXP (pat, 0, i);
1516
1517 if (! insn_references_resource_p (dtrial, &set, 1)
1518 && ! insn_sets_resource_p (dtrial, &set, 1)
1519 && ! insn_sets_resource_p (dtrial, &needed, 1)
1520 #ifdef HAVE_cc0
1521 && ! sets_cc0_p (PATTERN (dtrial))
1522 #endif
1523 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1524 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1525 {
1526 if (! annul_p)
1527 {
1528 rtx new;
1529
1530 update_block (dtrial, thread);
1531 new = delete_from_delay_slot (dtrial);
1532 if (INSN_DELETED_P (thread))
1533 thread = new;
1534 INSN_FROM_TARGET_P (next_to_match) = 0;
1535 }
1536 else
1537 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1538 merged_insns);
1539
1540 if (++slot_number == num_slots)
1541 break;
1542
1543 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1544 }
1545 else
1546 {
1547 /* Keep track of the set/referenced resources for the delay
1548 slots of any trial insns we encounter. */
1549 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1550 mark_referenced_resources (dtrial, &needed, 1);
1551 }
1552 }
1553 }
1554
1555 /* If all insns in the delay slot have been matched and we were previously
1556 annulling the branch, we need not any more. In that case delete all the
1557 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1558 the delay list so that we know that it isn't only being used at the
1559 target. */
1560 if (slot_number == num_slots && annul_p)
1561 {
1562 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1563 {
1564 if (GET_MODE (merged_insns) == SImode)
1565 {
1566 rtx new;
1567
1568 update_block (XEXP (merged_insns, 0), thread);
1569 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1570 if (INSN_DELETED_P (thread))
1571 thread = new;
1572 }
1573 else
1574 {
1575 update_block (XEXP (merged_insns, 0), thread);
1576 delete_related_insns (XEXP (merged_insns, 0));
1577 }
1578 }
1579
1580 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1581
1582 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1583 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1584 }
1585 }
1586 \f
1587 /* See if INSN is redundant with an insn in front of TARGET. Often this
1588 is called when INSN is a candidate for a delay slot of TARGET.
1589 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1590 of INSN. Often INSN will be redundant with an insn in a delay slot of
1591 some previous insn. This happens when we have a series of branches to the
1592 same label; in that case the first insn at the target might want to go
1593 into each of the delay slots.
1594
1595 If we are not careful, this routine can take up a significant fraction
1596 of the total compilation time (4%), but only wins rarely. Hence we
1597 speed this routine up by making two passes. The first pass goes back
1598 until it hits a label and sees if it finds an insn with an identical
1599 pattern. Only in this (relatively rare) event does it check for
1600 data conflicts.
1601
1602 We do not split insns we encounter. This could cause us not to find a
1603 redundant insn, but the cost of splitting seems greater than the possible
1604 gain in rare cases. */
1605
1606 static rtx
1607 redundant_insn (rtx insn, rtx target, rtx delay_list)
1608 {
1609 rtx target_main = target;
1610 rtx ipat = PATTERN (insn);
1611 rtx trial, pat;
1612 struct resources needed, set;
1613 int i;
1614 unsigned insns_to_search;
1615
1616 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1617 are allowed to not actually assign to such a register. */
1618 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1619 return 0;
1620
1621 /* Scan backwards looking for a match. */
1622 for (trial = PREV_INSN (target),
1623 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1624 trial && insns_to_search > 0;
1625 trial = PREV_INSN (trial), --insns_to_search)
1626 {
1627 if (LABEL_P (trial))
1628 return 0;
1629
1630 if (! INSN_P (trial))
1631 continue;
1632
1633 pat = PATTERN (trial);
1634 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1635 continue;
1636
1637 if (GET_CODE (pat) == SEQUENCE)
1638 {
1639 /* Stop for a CALL and its delay slots because it is difficult to
1640 track its resource needs correctly. */
1641 if (CALL_P (XVECEXP (pat, 0, 0)))
1642 return 0;
1643
1644 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1645 slots because it is difficult to track its resource needs
1646 correctly. */
1647
1648 #ifdef INSN_SETS_ARE_DELAYED
1649 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1650 return 0;
1651 #endif
1652
1653 #ifdef INSN_REFERENCES_ARE_DELAYED
1654 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1655 return 0;
1656 #endif
1657
1658 /* See if any of the insns in the delay slot match, updating
1659 resource requirements as we go. */
1660 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1661 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1662 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1663 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1664 break;
1665
1666 /* If found a match, exit this loop early. */
1667 if (i > 0)
1668 break;
1669 }
1670
1671 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1672 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1673 break;
1674 }
1675
1676 /* If we didn't find an insn that matches, return 0. */
1677 if (trial == 0)
1678 return 0;
1679
1680 /* See what resources this insn sets and needs. If they overlap, or
1681 if this insn references CC0, it can't be redundant. */
1682
1683 CLEAR_RESOURCE (&needed);
1684 CLEAR_RESOURCE (&set);
1685 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1686 mark_referenced_resources (insn, &needed, 1);
1687
1688 /* If TARGET is a SEQUENCE, get the main insn. */
1689 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1690 target_main = XVECEXP (PATTERN (target), 0, 0);
1691
1692 if (resource_conflicts_p (&needed, &set)
1693 #ifdef HAVE_cc0
1694 || reg_mentioned_p (cc0_rtx, ipat)
1695 #endif
1696 /* The insn requiring the delay may not set anything needed or set by
1697 INSN. */
1698 || insn_sets_resource_p (target_main, &needed, 1)
1699 || insn_sets_resource_p (target_main, &set, 1))
1700 return 0;
1701
1702 /* Insns we pass may not set either NEEDED or SET, so merge them for
1703 simpler tests. */
1704 needed.memory |= set.memory;
1705 needed.unch_memory |= set.unch_memory;
1706 IOR_HARD_REG_SET (needed.regs, set.regs);
1707
1708 /* This insn isn't redundant if it conflicts with an insn that either is
1709 or will be in a delay slot of TARGET. */
1710
1711 while (delay_list)
1712 {
1713 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1714 return 0;
1715 delay_list = XEXP (delay_list, 1);
1716 }
1717
1718 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1719 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1720 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1721 return 0;
1722
1723 /* Scan backwards until we reach a label or an insn that uses something
1724 INSN sets or sets something insn uses or sets. */
1725
1726 for (trial = PREV_INSN (target),
1727 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1728 trial && !LABEL_P (trial) && insns_to_search > 0;
1729 trial = PREV_INSN (trial), --insns_to_search)
1730 {
1731 if (!INSN_P (trial))
1732 continue;
1733
1734 pat = PATTERN (trial);
1735 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1736 continue;
1737
1738 if (GET_CODE (pat) == SEQUENCE)
1739 {
1740 /* If this is a CALL_INSN and its delay slots, it is hard to track
1741 the resource needs properly, so give up. */
1742 if (CALL_P (XVECEXP (pat, 0, 0)))
1743 return 0;
1744
1745 /* If this is an INSN or JUMP_INSN with delayed effects, it
1746 is hard to track the resource needs properly, so give up. */
1747
1748 #ifdef INSN_SETS_ARE_DELAYED
1749 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1750 return 0;
1751 #endif
1752
1753 #ifdef INSN_REFERENCES_ARE_DELAYED
1754 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1755 return 0;
1756 #endif
1757
1758 /* See if any of the insns in the delay slot match, updating
1759 resource requirements as we go. */
1760 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1761 {
1762 rtx candidate = XVECEXP (pat, 0, i);
1763
1764 /* If an insn will be annulled if the branch is false, it isn't
1765 considered as a possible duplicate insn. */
1766 if (rtx_equal_p (PATTERN (candidate), ipat)
1767 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1768 && INSN_FROM_TARGET_P (candidate)))
1769 {
1770 /* Show that this insn will be used in the sequel. */
1771 INSN_FROM_TARGET_P (candidate) = 0;
1772 return candidate;
1773 }
1774
1775 /* Unless this is an annulled insn from the target of a branch,
1776 we must stop if it sets anything needed or set by INSN. */
1777 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1778 || ! INSN_FROM_TARGET_P (candidate))
1779 && insn_sets_resource_p (candidate, &needed, 1))
1780 return 0;
1781 }
1782
1783 /* If the insn requiring the delay slot conflicts with INSN, we
1784 must stop. */
1785 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1786 return 0;
1787 }
1788 else
1789 {
1790 /* See if TRIAL is the same as INSN. */
1791 pat = PATTERN (trial);
1792 if (rtx_equal_p (pat, ipat))
1793 return trial;
1794
1795 /* Can't go any further if TRIAL conflicts with INSN. */
1796 if (insn_sets_resource_p (trial, &needed, 1))
1797 return 0;
1798 }
1799 }
1800
1801 return 0;
1802 }
1803 \f
1804 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1805 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1806 is nonzero, we are allowed to fall into this thread; otherwise, we are
1807 not.
1808
1809 If LABEL is used more than one or we pass a label other than LABEL before
1810 finding an active insn, we do not own this thread. */
1811
1812 static int
1813 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1814 {
1815 rtx active_insn;
1816 rtx insn;
1817
1818 /* We don't own the function end. */
1819 if (thread == 0)
1820 return 0;
1821
1822 /* Get the first active insn, or THREAD, if it is an active insn. */
1823 active_insn = next_active_insn (PREV_INSN (thread));
1824
1825 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1826 if (LABEL_P (insn)
1827 && (insn != label || LABEL_NUSES (insn) != 1))
1828 return 0;
1829
1830 if (allow_fallthrough)
1831 return 1;
1832
1833 /* Ensure that we reach a BARRIER before any insn or label. */
1834 for (insn = prev_nonnote_insn (thread);
1835 insn == 0 || !BARRIER_P (insn);
1836 insn = prev_nonnote_insn (insn))
1837 if (insn == 0
1838 || LABEL_P (insn)
1839 || (NONJUMP_INSN_P (insn)
1840 && GET_CODE (PATTERN (insn)) != USE
1841 && GET_CODE (PATTERN (insn)) != CLOBBER))
1842 return 0;
1843
1844 return 1;
1845 }
1846 \f
1847 /* Called when INSN is being moved from a location near the target of a jump.
1848 We leave a marker of the form (use (INSN)) immediately in front
1849 of WHERE for mark_target_live_regs. These markers will be deleted when
1850 reorg finishes.
1851
1852 We used to try to update the live status of registers if WHERE is at
1853 the start of a basic block, but that can't work since we may remove a
1854 BARRIER in relax_delay_slots. */
1855
1856 static void
1857 update_block (rtx insn, rtx where)
1858 {
1859 /* Ignore if this was in a delay slot and it came from the target of
1860 a branch. */
1861 if (INSN_FROM_TARGET_P (insn))
1862 return;
1863
1864 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1865
1866 /* INSN might be making a value live in a block where it didn't use to
1867 be. So recompute liveness information for this block. */
1868
1869 incr_ticks_for_insn (insn);
1870 }
1871
1872 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1873 the basic block containing the jump. */
1874
1875 static int
1876 reorg_redirect_jump (rtx jump, rtx nlabel)
1877 {
1878 incr_ticks_for_insn (jump);
1879 return redirect_jump (jump, nlabel, 1);
1880 }
1881
1882 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1883 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1884 that reference values used in INSN. If we find one, then we move the
1885 REG_DEAD note to INSN.
1886
1887 This is needed to handle the case where a later insn (after INSN) has a
1888 REG_DEAD note for a register used by INSN, and this later insn subsequently
1889 gets moved before a CODE_LABEL because it is a redundant insn. In this
1890 case, mark_target_live_regs may be confused into thinking the register
1891 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1892
1893 static void
1894 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1895 {
1896 rtx p, link, next;
1897
1898 for (p = next_nonnote_insn (insn); p != delayed_insn;
1899 p = next_nonnote_insn (p))
1900 for (link = REG_NOTES (p); link; link = next)
1901 {
1902 next = XEXP (link, 1);
1903
1904 if (REG_NOTE_KIND (link) != REG_DEAD
1905 || !REG_P (XEXP (link, 0)))
1906 continue;
1907
1908 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1909 {
1910 /* Move the REG_DEAD note from P to INSN. */
1911 remove_note (p, link);
1912 XEXP (link, 1) = REG_NOTES (insn);
1913 REG_NOTES (insn) = link;
1914 }
1915 }
1916 }
1917
1918 /* Called when an insn redundant with start_insn is deleted. If there
1919 is a REG_DEAD note for the target of start_insn between start_insn
1920 and stop_insn, then the REG_DEAD note needs to be deleted since the
1921 value no longer dies there.
1922
1923 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1924 confused into thinking the register is dead. */
1925
1926 static void
1927 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1928 {
1929 rtx p, link, next;
1930
1931 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1932 p = next_nonnote_insn (p))
1933 for (link = REG_NOTES (p); link; link = next)
1934 {
1935 next = XEXP (link, 1);
1936
1937 if (REG_NOTE_KIND (link) != REG_DEAD
1938 || !REG_P (XEXP (link, 0)))
1939 continue;
1940
1941 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1942 {
1943 remove_note (p, link);
1944 return;
1945 }
1946 }
1947 }
1948
1949 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1950
1951 This handles the case of udivmodXi4 instructions which optimize their
1952 output depending on whether any REG_UNUSED notes are present.
1953 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1954 does. */
1955
1956 static void
1957 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1958 {
1959 rtx link, next;
1960
1961 for (link = REG_NOTES (insn); link; link = next)
1962 {
1963 next = XEXP (link, 1);
1964
1965 if (REG_NOTE_KIND (link) != REG_UNUSED
1966 || !REG_P (XEXP (link, 0)))
1967 continue;
1968
1969 if (! find_regno_note (redundant_insn, REG_UNUSED,
1970 REGNO (XEXP (link, 0))))
1971 remove_note (insn, link);
1972 }
1973 }
1974 \f
1975 /* Return the label before INSN, or put a new label there. */
1976
1977 static rtx
1978 get_label_before (rtx insn)
1979 {
1980 rtx label;
1981
1982 /* Find an existing label at this point
1983 or make a new one if there is none. */
1984 label = prev_nonnote_insn (insn);
1985
1986 if (label == 0 || !LABEL_P (label))
1987 {
1988 rtx prev = PREV_INSN (insn);
1989
1990 label = gen_label_rtx ();
1991 emit_label_after (label, prev);
1992 LABEL_NUSES (label) = 0;
1993 }
1994 return label;
1995 }
1996
1997 /* Scan a function looking for insns that need a delay slot and find insns to
1998 put into the delay slot.
1999
2000 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2001 as calls). We do these first since we don't want jump insns (that are
2002 easier to fill) to get the only insns that could be used for non-jump insns.
2003 When it is zero, only try to fill JUMP_INSNs.
2004
2005 When slots are filled in this manner, the insns (including the
2006 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2007 it is possible to tell whether a delay slot has really been filled
2008 or not. `final' knows how to deal with this, by communicating
2009 through FINAL_SEQUENCE. */
2010
2011 static void
2012 fill_simple_delay_slots (int non_jumps_p)
2013 {
2014 rtx insn, pat, trial, next_trial;
2015 int i;
2016 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2017 struct resources needed, set;
2018 int slots_to_fill, slots_filled;
2019 rtx delay_list;
2020
2021 for (i = 0; i < num_unfilled_slots; i++)
2022 {
2023 int flags;
2024 /* Get the next insn to fill. If it has already had any slots assigned,
2025 we can't do anything with it. Maybe we'll improve this later. */
2026
2027 insn = unfilled_slots_base[i];
2028 if (insn == 0
2029 || INSN_DELETED_P (insn)
2030 || (NONJUMP_INSN_P (insn)
2031 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2032 || (JUMP_P (insn) && non_jumps_p)
2033 || (!JUMP_P (insn) && ! non_jumps_p))
2034 continue;
2035
2036 /* It may have been that this insn used to need delay slots, but
2037 now doesn't; ignore in that case. This can happen, for example,
2038 on the HP PA RISC, where the number of delay slots depends on
2039 what insns are nearby. */
2040 slots_to_fill = num_delay_slots (insn);
2041
2042 /* Some machine description have defined instructions to have
2043 delay slots only in certain circumstances which may depend on
2044 nearby insns (which change due to reorg's actions).
2045
2046 For example, the PA port normally has delay slots for unconditional
2047 jumps.
2048
2049 However, the PA port claims such jumps do not have a delay slot
2050 if they are immediate successors of certain CALL_INSNs. This
2051 allows the port to favor filling the delay slot of the call with
2052 the unconditional jump. */
2053 if (slots_to_fill == 0)
2054 continue;
2055
2056 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2057 says how many. After initialization, first try optimizing
2058
2059 call _foo call _foo
2060 nop add %o7,.-L1,%o7
2061 b,a L1
2062 nop
2063
2064 If this case applies, the delay slot of the call is filled with
2065 the unconditional jump. This is done first to avoid having the
2066 delay slot of the call filled in the backward scan. Also, since
2067 the unconditional jump is likely to also have a delay slot, that
2068 insn must exist when it is subsequently scanned.
2069
2070 This is tried on each insn with delay slots as some machines
2071 have insns which perform calls, but are not represented as
2072 CALL_INSNs. */
2073
2074 slots_filled = 0;
2075 delay_list = 0;
2076
2077 if (JUMP_P (insn))
2078 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2079 else
2080 flags = get_jump_flags (insn, NULL_RTX);
2081
2082 if ((trial = next_active_insn (insn))
2083 && JUMP_P (trial)
2084 && simplejump_p (trial)
2085 && eligible_for_delay (insn, slots_filled, trial, flags)
2086 && no_labels_between_p (insn, trial)
2087 && ! can_throw_internal (trial))
2088 {
2089 rtx *tmp;
2090 slots_filled++;
2091 delay_list = add_to_delay_list (trial, delay_list);
2092
2093 /* TRIAL may have had its delay slot filled, then unfilled. When
2094 the delay slot is unfilled, TRIAL is placed back on the unfilled
2095 slots obstack. Unfortunately, it is placed on the end of the
2096 obstack, not in its original location. Therefore, we must search
2097 from entry i + 1 to the end of the unfilled slots obstack to
2098 try and find TRIAL. */
2099 tmp = &unfilled_slots_base[i + 1];
2100 while (*tmp != trial && tmp != unfilled_slots_next)
2101 tmp++;
2102
2103 /* Remove the unconditional jump from consideration for delay slot
2104 filling and unthread it. */
2105 if (*tmp == trial)
2106 *tmp = 0;
2107 {
2108 rtx next = NEXT_INSN (trial);
2109 rtx prev = PREV_INSN (trial);
2110 if (prev)
2111 NEXT_INSN (prev) = next;
2112 if (next)
2113 PREV_INSN (next) = prev;
2114 }
2115 }
2116
2117 /* Now, scan backwards from the insn to search for a potential
2118 delay-slot candidate. Stop searching when a label or jump is hit.
2119
2120 For each candidate, if it is to go into the delay slot (moved
2121 forward in execution sequence), it must not need or set any resources
2122 that were set by later insns and must not set any resources that
2123 are needed for those insns.
2124
2125 The delay slot insn itself sets resources unless it is a call
2126 (in which case the called routine, not the insn itself, is doing
2127 the setting). */
2128
2129 if (slots_filled < slots_to_fill)
2130 {
2131 CLEAR_RESOURCE (&needed);
2132 CLEAR_RESOURCE (&set);
2133 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2134 mark_referenced_resources (insn, &needed, 0);
2135
2136 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2137 trial = next_trial)
2138 {
2139 next_trial = prev_nonnote_insn (trial);
2140
2141 /* This must be an INSN or CALL_INSN. */
2142 pat = PATTERN (trial);
2143
2144 /* USE and CLOBBER at this level was just for flow; ignore it. */
2145 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2146 continue;
2147
2148 /* Check for resource conflict first, to avoid unnecessary
2149 splitting. */
2150 if (! insn_references_resource_p (trial, &set, 1)
2151 && ! insn_sets_resource_p (trial, &set, 1)
2152 && ! insn_sets_resource_p (trial, &needed, 1)
2153 #ifdef HAVE_cc0
2154 /* Can't separate set of cc0 from its use. */
2155 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2156 #endif
2157 && ! can_throw_internal (trial))
2158 {
2159 trial = try_split (pat, trial, 1);
2160 next_trial = prev_nonnote_insn (trial);
2161 if (eligible_for_delay (insn, slots_filled, trial, flags))
2162 {
2163 /* In this case, we are searching backward, so if we
2164 find insns to put on the delay list, we want
2165 to put them at the head, rather than the
2166 tail, of the list. */
2167
2168 update_reg_dead_notes (trial, insn);
2169 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2170 trial, delay_list);
2171 update_block (trial, trial);
2172 delete_related_insns (trial);
2173 if (slots_to_fill == ++slots_filled)
2174 break;
2175 continue;
2176 }
2177 }
2178
2179 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2180 mark_referenced_resources (trial, &needed, 1);
2181 }
2182 }
2183
2184 /* If all needed slots haven't been filled, we come here. */
2185
2186 /* Try to optimize case of jumping around a single insn. */
2187 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2188 if (slots_filled != slots_to_fill
2189 && delay_list == 0
2190 && JUMP_P (insn)
2191 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2192 {
2193 delay_list = optimize_skip (insn);
2194 if (delay_list)
2195 slots_filled += 1;
2196 }
2197 #endif
2198
2199 /* Try to get insns from beyond the insn needing the delay slot.
2200 These insns can neither set or reference resources set in insns being
2201 skipped, cannot set resources in the insn being skipped, and, if this
2202 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2203 call might not return).
2204
2205 There used to be code which continued past the target label if
2206 we saw all uses of the target label. This code did not work,
2207 because it failed to account for some instructions which were
2208 both annulled and marked as from the target. This can happen as a
2209 result of optimize_skip. Since this code was redundant with
2210 fill_eager_delay_slots anyways, it was just deleted. */
2211
2212 if (slots_filled != slots_to_fill
2213 /* If this instruction could throw an exception which is
2214 caught in the same function, then it's not safe to fill
2215 the delay slot with an instruction from beyond this
2216 point. For example, consider:
2217
2218 int i = 2;
2219
2220 try {
2221 f();
2222 i = 3;
2223 } catch (...) {}
2224
2225 return i;
2226
2227 Even though `i' is a local variable, we must be sure not
2228 to put `i = 3' in the delay slot if `f' might throw an
2229 exception.
2230
2231 Presumably, we should also check to see if we could get
2232 back to this function via `setjmp'. */
2233 && ! can_throw_internal (insn)
2234 && (!JUMP_P (insn)
2235 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2236 && ! simplejump_p (insn)
2237 && JUMP_LABEL (insn) != 0)))
2238 {
2239 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2240 label. Otherwise, zero. */
2241 rtx target = 0;
2242 int maybe_never = 0;
2243 rtx pat, trial_delay;
2244
2245 CLEAR_RESOURCE (&needed);
2246 CLEAR_RESOURCE (&set);
2247
2248 if (CALL_P (insn))
2249 {
2250 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2251 mark_referenced_resources (insn, &needed, 1);
2252 maybe_never = 1;
2253 }
2254 else
2255 {
2256 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2257 mark_referenced_resources (insn, &needed, 1);
2258 if (JUMP_P (insn))
2259 target = JUMP_LABEL (insn);
2260 }
2261
2262 if (target == 0)
2263 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2264 {
2265 next_trial = next_nonnote_insn (trial);
2266
2267 if (LABEL_P (trial)
2268 || BARRIER_P (trial))
2269 break;
2270
2271 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2272 pat = PATTERN (trial);
2273
2274 /* Stand-alone USE and CLOBBER are just for flow. */
2275 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2276 continue;
2277
2278 /* If this already has filled delay slots, get the insn needing
2279 the delay slots. */
2280 if (GET_CODE (pat) == SEQUENCE)
2281 trial_delay = XVECEXP (pat, 0, 0);
2282 else
2283 trial_delay = trial;
2284
2285 /* Stop our search when seeing an unconditional jump. */
2286 if (JUMP_P (trial_delay))
2287 break;
2288
2289 /* See if we have a resource problem before we try to
2290 split. */
2291 if (GET_CODE (pat) != SEQUENCE
2292 && ! insn_references_resource_p (trial, &set, 1)
2293 && ! insn_sets_resource_p (trial, &set, 1)
2294 && ! insn_sets_resource_p (trial, &needed, 1)
2295 #ifdef HAVE_cc0
2296 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2297 #endif
2298 && ! (maybe_never && may_trap_or_fault_p (pat))
2299 && (trial = try_split (pat, trial, 0))
2300 && eligible_for_delay (insn, slots_filled, trial, flags)
2301 && ! can_throw_internal(trial))
2302 {
2303 next_trial = next_nonnote_insn (trial);
2304 delay_list = add_to_delay_list (trial, delay_list);
2305
2306 #ifdef HAVE_cc0
2307 if (reg_mentioned_p (cc0_rtx, pat))
2308 link_cc0_insns (trial);
2309 #endif
2310
2311 delete_related_insns (trial);
2312 if (slots_to_fill == ++slots_filled)
2313 break;
2314 continue;
2315 }
2316
2317 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2318 mark_referenced_resources (trial, &needed, 1);
2319
2320 /* Ensure we don't put insns between the setting of cc and the
2321 comparison by moving a setting of cc into an earlier delay
2322 slot since these insns could clobber the condition code. */
2323 set.cc = 1;
2324
2325 /* If this is a call or jump, we might not get here. */
2326 if (CALL_P (trial_delay)
2327 || JUMP_P (trial_delay))
2328 maybe_never = 1;
2329 }
2330
2331 /* If there are slots left to fill and our search was stopped by an
2332 unconditional branch, try the insn at the branch target. We can
2333 redirect the branch if it works.
2334
2335 Don't do this if the insn at the branch target is a branch. */
2336 if (slots_to_fill != slots_filled
2337 && trial
2338 && JUMP_P (trial)
2339 && simplejump_p (trial)
2340 && (target == 0 || JUMP_LABEL (trial) == target)
2341 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2342 && ! (NONJUMP_INSN_P (next_trial)
2343 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2344 && !JUMP_P (next_trial)
2345 && ! insn_references_resource_p (next_trial, &set, 1)
2346 && ! insn_sets_resource_p (next_trial, &set, 1)
2347 && ! insn_sets_resource_p (next_trial, &needed, 1)
2348 #ifdef HAVE_cc0
2349 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2350 #endif
2351 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2352 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2353 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2354 && ! can_throw_internal (trial))
2355 {
2356 /* See comment in relax_delay_slots about necessity of using
2357 next_real_insn here. */
2358 rtx new_label = next_real_insn (next_trial);
2359
2360 if (new_label != 0)
2361 new_label = get_label_before (new_label);
2362 else
2363 new_label = find_end_label ();
2364
2365 if (new_label)
2366 {
2367 delay_list
2368 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2369 slots_filled++;
2370 reorg_redirect_jump (trial, new_label);
2371
2372 /* If we merged because we both jumped to the same place,
2373 redirect the original insn also. */
2374 if (target)
2375 reorg_redirect_jump (insn, new_label);
2376 }
2377 }
2378 }
2379
2380 /* If this is an unconditional jump, then try to get insns from the
2381 target of the jump. */
2382 if (JUMP_P (insn)
2383 && simplejump_p (insn)
2384 && slots_filled != slots_to_fill)
2385 delay_list
2386 = fill_slots_from_thread (insn, const_true_rtx,
2387 next_active_insn (JUMP_LABEL (insn)),
2388 NULL, 1, 1,
2389 own_thread_p (JUMP_LABEL (insn),
2390 JUMP_LABEL (insn), 0),
2391 slots_to_fill, &slots_filled,
2392 delay_list);
2393
2394 if (delay_list)
2395 unfilled_slots_base[i]
2396 = emit_delay_sequence (insn, delay_list, slots_filled);
2397
2398 if (slots_to_fill == slots_filled)
2399 unfilled_slots_base[i] = 0;
2400
2401 note_delay_statistics (slots_filled, 0);
2402 }
2403
2404 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2405 /* See if the epilogue needs any delay slots. Try to fill them if so.
2406 The only thing we can do is scan backwards from the end of the
2407 function. If we did this in a previous pass, it is incorrect to do it
2408 again. */
2409 if (current_function_epilogue_delay_list)
2410 return;
2411
2412 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2413 if (slots_to_fill == 0)
2414 return;
2415
2416 slots_filled = 0;
2417 CLEAR_RESOURCE (&set);
2418
2419 /* The frame pointer and stack pointer are needed at the beginning of
2420 the epilogue, so instructions setting them can not be put in the
2421 epilogue delay slot. However, everything else needed at function
2422 end is safe, so we don't want to use end_of_function_needs here. */
2423 CLEAR_RESOURCE (&needed);
2424 if (frame_pointer_needed)
2425 {
2426 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2427 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2428 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2429 #endif
2430 if (! EXIT_IGNORE_STACK
2431 || current_function_sp_is_unchanging)
2432 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2433 }
2434 else
2435 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2436
2437 #ifdef EPILOGUE_USES
2438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2439 {
2440 if (EPILOGUE_USES (i))
2441 SET_HARD_REG_BIT (needed.regs, i);
2442 }
2443 #endif
2444
2445 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2446 trial = PREV_INSN (trial))
2447 {
2448 if (NOTE_P (trial))
2449 continue;
2450 pat = PATTERN (trial);
2451 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2452 continue;
2453
2454 if (! insn_references_resource_p (trial, &set, 1)
2455 && ! insn_sets_resource_p (trial, &needed, 1)
2456 && ! insn_sets_resource_p (trial, &set, 1)
2457 #ifdef HAVE_cc0
2458 /* Don't want to mess with cc0 here. */
2459 && ! reg_mentioned_p (cc0_rtx, pat)
2460 #endif
2461 && ! can_throw_internal (trial))
2462 {
2463 trial = try_split (pat, trial, 1);
2464 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2465 {
2466 /* Here as well we are searching backward, so put the
2467 insns we find on the head of the list. */
2468
2469 current_function_epilogue_delay_list
2470 = gen_rtx_INSN_LIST (VOIDmode, trial,
2471 current_function_epilogue_delay_list);
2472 mark_end_of_function_resources (trial, 1);
2473 update_block (trial, trial);
2474 delete_related_insns (trial);
2475
2476 /* Clear deleted bit so final.c will output the insn. */
2477 INSN_DELETED_P (trial) = 0;
2478
2479 if (slots_to_fill == ++slots_filled)
2480 break;
2481 continue;
2482 }
2483 }
2484
2485 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2486 mark_referenced_resources (trial, &needed, 1);
2487 }
2488
2489 note_delay_statistics (slots_filled, 0);
2490 #endif
2491 }
2492 \f
2493 /* Follow any unconditional jump at LABEL;
2494 return the ultimate label reached by any such chain of jumps.
2495 Return null if the chain ultimately leads to a return instruction.
2496 If LABEL is not followed by a jump, return LABEL.
2497 If the chain loops or we can't find end, return LABEL,
2498 since that tells caller to avoid changing the insn. */
2499
2500 static rtx
2501 follow_jumps (rtx label)
2502 {
2503 rtx insn;
2504 rtx next;
2505 rtx value = label;
2506 int depth;
2507
2508 for (depth = 0;
2509 (depth < 10
2510 && (insn = next_active_insn (value)) != 0
2511 && JUMP_P (insn)
2512 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2513 && onlyjump_p (insn))
2514 || GET_CODE (PATTERN (insn)) == RETURN)
2515 && (next = NEXT_INSN (insn))
2516 && BARRIER_P (next));
2517 depth++)
2518 {
2519 rtx tem;
2520
2521 /* If we have found a cycle, make the insn jump to itself. */
2522 if (JUMP_LABEL (insn) == label)
2523 return label;
2524
2525 tem = next_active_insn (JUMP_LABEL (insn));
2526 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2527 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2528 break;
2529
2530 value = JUMP_LABEL (insn);
2531 }
2532 if (depth == 10)
2533 return label;
2534 return value;
2535 }
2536
2537 /* Try to find insns to place in delay slots.
2538
2539 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2540 or is an unconditional branch if CONDITION is const_true_rtx.
2541 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2542
2543 THREAD is a flow-of-control, either the insns to be executed if the
2544 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2545
2546 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2547 to see if any potential delay slot insns set things needed there.
2548
2549 LIKELY is nonzero if it is extremely likely that the branch will be
2550 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2551 end of a loop back up to the top.
2552
2553 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2554 thread. I.e., it is the fallthrough code of our jump or the target of the
2555 jump when we are the only jump going there.
2556
2557 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2558 case, we can only take insns from the head of the thread for our delay
2559 slot. We then adjust the jump to point after the insns we have taken. */
2560
2561 static rtx
2562 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2563 rtx opposite_thread, int likely, int thread_if_true,
2564 int own_thread, int slots_to_fill,
2565 int *pslots_filled, rtx delay_list)
2566 {
2567 rtx new_thread;
2568 struct resources opposite_needed, set, needed;
2569 rtx trial;
2570 int lose = 0;
2571 int must_annul = 0;
2572 int flags;
2573
2574 /* Validate our arguments. */
2575 gcc_assert(condition != const_true_rtx || thread_if_true);
2576 gcc_assert(own_thread || thread_if_true);
2577
2578 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2579
2580 /* If our thread is the end of subroutine, we can't get any delay
2581 insns from that. */
2582 if (thread == 0)
2583 return delay_list;
2584
2585 /* If this is an unconditional branch, nothing is needed at the
2586 opposite thread. Otherwise, compute what is needed there. */
2587 if (condition == const_true_rtx)
2588 CLEAR_RESOURCE (&opposite_needed);
2589 else
2590 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2591
2592 /* If the insn at THREAD can be split, do it here to avoid having to
2593 update THREAD and NEW_THREAD if it is done in the loop below. Also
2594 initialize NEW_THREAD. */
2595
2596 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2597
2598 /* Scan insns at THREAD. We are looking for an insn that can be removed
2599 from THREAD (it neither sets nor references resources that were set
2600 ahead of it and it doesn't set anything needs by the insns ahead of
2601 it) and that either can be placed in an annulling insn or aren't
2602 needed at OPPOSITE_THREAD. */
2603
2604 CLEAR_RESOURCE (&needed);
2605 CLEAR_RESOURCE (&set);
2606
2607 /* If we do not own this thread, we must stop as soon as we find
2608 something that we can't put in a delay slot, since all we can do
2609 is branch into THREAD at a later point. Therefore, labels stop
2610 the search if this is not the `true' thread. */
2611
2612 for (trial = thread;
2613 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2614 trial = next_nonnote_insn (trial))
2615 {
2616 rtx pat, old_trial;
2617
2618 /* If we have passed a label, we no longer own this thread. */
2619 if (LABEL_P (trial))
2620 {
2621 own_thread = 0;
2622 continue;
2623 }
2624
2625 pat = PATTERN (trial);
2626 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2627 continue;
2628
2629 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2630 don't separate or copy insns that set and use CC0. */
2631 if (! insn_references_resource_p (trial, &set, 1)
2632 && ! insn_sets_resource_p (trial, &set, 1)
2633 && ! insn_sets_resource_p (trial, &needed, 1)
2634 #ifdef HAVE_cc0
2635 && ! (reg_mentioned_p (cc0_rtx, pat)
2636 && (! own_thread || ! sets_cc0_p (pat)))
2637 #endif
2638 && ! can_throw_internal (trial))
2639 {
2640 rtx prior_insn;
2641
2642 /* If TRIAL is redundant with some insn before INSN, we don't
2643 actually need to add it to the delay list; we can merely pretend
2644 we did. */
2645 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2646 {
2647 fix_reg_dead_note (prior_insn, insn);
2648 if (own_thread)
2649 {
2650 update_block (trial, thread);
2651 if (trial == thread)
2652 {
2653 thread = next_active_insn (thread);
2654 if (new_thread == trial)
2655 new_thread = thread;
2656 }
2657
2658 delete_related_insns (trial);
2659 }
2660 else
2661 {
2662 update_reg_unused_notes (prior_insn, trial);
2663 new_thread = next_active_insn (trial);
2664 }
2665
2666 continue;
2667 }
2668
2669 /* There are two ways we can win: If TRIAL doesn't set anything
2670 needed at the opposite thread and can't trap, or if it can
2671 go into an annulled delay slot. */
2672 if (!must_annul
2673 && (condition == const_true_rtx
2674 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2675 && ! may_trap_or_fault_p (pat))))
2676 {
2677 old_trial = trial;
2678 trial = try_split (pat, trial, 0);
2679 if (new_thread == old_trial)
2680 new_thread = trial;
2681 if (thread == old_trial)
2682 thread = trial;
2683 pat = PATTERN (trial);
2684 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2685 goto winner;
2686 }
2687 else if (0
2688 #ifdef ANNUL_IFTRUE_SLOTS
2689 || ! thread_if_true
2690 #endif
2691 #ifdef ANNUL_IFFALSE_SLOTS
2692 || thread_if_true
2693 #endif
2694 )
2695 {
2696 old_trial = trial;
2697 trial = try_split (pat, trial, 0);
2698 if (new_thread == old_trial)
2699 new_thread = trial;
2700 if (thread == old_trial)
2701 thread = trial;
2702 pat = PATTERN (trial);
2703 if ((must_annul || delay_list == NULL) && (thread_if_true
2704 ? check_annul_list_true_false (0, delay_list)
2705 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2706 : check_annul_list_true_false (1, delay_list)
2707 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2708 {
2709 rtx temp;
2710
2711 must_annul = 1;
2712 winner:
2713
2714 #ifdef HAVE_cc0
2715 if (reg_mentioned_p (cc0_rtx, pat))
2716 link_cc0_insns (trial);
2717 #endif
2718
2719 /* If we own this thread, delete the insn. If this is the
2720 destination of a branch, show that a basic block status
2721 may have been updated. In any case, mark the new
2722 starting point of this thread. */
2723 if (own_thread)
2724 {
2725 rtx note;
2726
2727 update_block (trial, thread);
2728 if (trial == thread)
2729 {
2730 thread = next_active_insn (thread);
2731 if (new_thread == trial)
2732 new_thread = thread;
2733 }
2734
2735 /* We are moving this insn, not deleting it. We must
2736 temporarily increment the use count on any referenced
2737 label lest it be deleted by delete_related_insns. */
2738 note = find_reg_note (trial, REG_LABEL, 0);
2739 /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
2740 if (note && LABEL_P (XEXP (note, 0)))
2741 LABEL_NUSES (XEXP (note, 0))++;
2742
2743 delete_related_insns (trial);
2744
2745 if (note && LABEL_P (XEXP (note, 0)))
2746 LABEL_NUSES (XEXP (note, 0))--;
2747 }
2748 else
2749 new_thread = next_active_insn (trial);
2750
2751 temp = own_thread ? trial : copy_rtx (trial);
2752 if (thread_if_true)
2753 INSN_FROM_TARGET_P (temp) = 1;
2754
2755 delay_list = add_to_delay_list (temp, delay_list);
2756
2757 if (slots_to_fill == ++(*pslots_filled))
2758 {
2759 /* Even though we have filled all the slots, we
2760 may be branching to a location that has a
2761 redundant insn. Skip any if so. */
2762 while (new_thread && ! own_thread
2763 && ! insn_sets_resource_p (new_thread, &set, 1)
2764 && ! insn_sets_resource_p (new_thread, &needed, 1)
2765 && ! insn_references_resource_p (new_thread,
2766 &set, 1)
2767 && (prior_insn
2768 = redundant_insn (new_thread, insn,
2769 delay_list)))
2770 {
2771 /* We know we do not own the thread, so no need
2772 to call update_block and delete_insn. */
2773 fix_reg_dead_note (prior_insn, insn);
2774 update_reg_unused_notes (prior_insn, new_thread);
2775 new_thread = next_active_insn (new_thread);
2776 }
2777 break;
2778 }
2779
2780 continue;
2781 }
2782 }
2783 }
2784
2785 /* This insn can't go into a delay slot. */
2786 lose = 1;
2787 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2788 mark_referenced_resources (trial, &needed, 1);
2789
2790 /* Ensure we don't put insns between the setting of cc and the comparison
2791 by moving a setting of cc into an earlier delay slot since these insns
2792 could clobber the condition code. */
2793 set.cc = 1;
2794
2795 /* If this insn is a register-register copy and the next insn has
2796 a use of our destination, change it to use our source. That way,
2797 it will become a candidate for our delay slot the next time
2798 through this loop. This case occurs commonly in loops that
2799 scan a list.
2800
2801 We could check for more complex cases than those tested below,
2802 but it doesn't seem worth it. It might also be a good idea to try
2803 to swap the two insns. That might do better.
2804
2805 We can't do this if the next insn modifies our destination, because
2806 that would make the replacement into the insn invalid. We also can't
2807 do this if it modifies our source, because it might be an earlyclobber
2808 operand. This latter test also prevents updating the contents of
2809 a PRE_INC. We also can't do this if there's overlap of source and
2810 destination. Overlap may happen for larger-than-register-size modes. */
2811
2812 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2813 && REG_P (SET_SRC (pat))
2814 && REG_P (SET_DEST (pat))
2815 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2816 {
2817 rtx next = next_nonnote_insn (trial);
2818
2819 if (next && NONJUMP_INSN_P (next)
2820 && GET_CODE (PATTERN (next)) != USE
2821 && ! reg_set_p (SET_DEST (pat), next)
2822 && ! reg_set_p (SET_SRC (pat), next)
2823 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2824 && ! modified_in_p (SET_DEST (pat), next))
2825 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2826 }
2827 }
2828
2829 /* If we stopped on a branch insn that has delay slots, see if we can
2830 steal some of the insns in those slots. */
2831 if (trial && NONJUMP_INSN_P (trial)
2832 && GET_CODE (PATTERN (trial)) == SEQUENCE
2833 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2834 {
2835 /* If this is the `true' thread, we will want to follow the jump,
2836 so we can only do this if we have taken everything up to here. */
2837 if (thread_if_true && trial == new_thread)
2838 {
2839 delay_list
2840 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2841 delay_list, &set, &needed,
2842 &opposite_needed, slots_to_fill,
2843 pslots_filled, &must_annul,
2844 &new_thread);
2845 /* If we owned the thread and are told that it branched
2846 elsewhere, make sure we own the thread at the new location. */
2847 if (own_thread && trial != new_thread)
2848 own_thread = own_thread_p (new_thread, new_thread, 0);
2849 }
2850 else if (! thread_if_true)
2851 delay_list
2852 = steal_delay_list_from_fallthrough (insn, condition,
2853 PATTERN (trial),
2854 delay_list, &set, &needed,
2855 &opposite_needed, slots_to_fill,
2856 pslots_filled, &must_annul);
2857 }
2858
2859 /* If we haven't found anything for this delay slot and it is very
2860 likely that the branch will be taken, see if the insn at our target
2861 increments or decrements a register with an increment that does not
2862 depend on the destination register. If so, try to place the opposite
2863 arithmetic insn after the jump insn and put the arithmetic insn in the
2864 delay slot. If we can't do this, return. */
2865 if (delay_list == 0 && likely && new_thread
2866 && NONJUMP_INSN_P (new_thread)
2867 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2868 && asm_noperands (PATTERN (new_thread)) < 0)
2869 {
2870 rtx pat = PATTERN (new_thread);
2871 rtx dest;
2872 rtx src;
2873
2874 trial = new_thread;
2875 pat = PATTERN (trial);
2876
2877 if (!NONJUMP_INSN_P (trial)
2878 || GET_CODE (pat) != SET
2879 || ! eligible_for_delay (insn, 0, trial, flags)
2880 || can_throw_internal (trial))
2881 return 0;
2882
2883 dest = SET_DEST (pat), src = SET_SRC (pat);
2884 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2885 && rtx_equal_p (XEXP (src, 0), dest)
2886 && (!FLOAT_MODE_P (GET_MODE (src))
2887 || flag_unsafe_math_optimizations)
2888 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2889 && ! side_effects_p (pat))
2890 {
2891 rtx other = XEXP (src, 1);
2892 rtx new_arith;
2893 rtx ninsn;
2894
2895 /* If this is a constant adjustment, use the same code with
2896 the negated constant. Otherwise, reverse the sense of the
2897 arithmetic. */
2898 if (GET_CODE (other) == CONST_INT)
2899 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2900 negate_rtx (GET_MODE (src), other));
2901 else
2902 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2903 GET_MODE (src), dest, other);
2904
2905 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2906 insn);
2907
2908 if (recog_memoized (ninsn) < 0
2909 || (extract_insn (ninsn), ! constrain_operands (1)))
2910 {
2911 delete_related_insns (ninsn);
2912 return 0;
2913 }
2914
2915 if (own_thread)
2916 {
2917 update_block (trial, thread);
2918 if (trial == thread)
2919 {
2920 thread = next_active_insn (thread);
2921 if (new_thread == trial)
2922 new_thread = thread;
2923 }
2924 delete_related_insns (trial);
2925 }
2926 else
2927 new_thread = next_active_insn (trial);
2928
2929 ninsn = own_thread ? trial : copy_rtx (trial);
2930 if (thread_if_true)
2931 INSN_FROM_TARGET_P (ninsn) = 1;
2932
2933 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2934 (*pslots_filled)++;
2935 }
2936 }
2937
2938 if (delay_list && must_annul)
2939 INSN_ANNULLED_BRANCH_P (insn) = 1;
2940
2941 /* If we are to branch into the middle of this thread, find an appropriate
2942 label or make a new one if none, and redirect INSN to it. If we hit the
2943 end of the function, use the end-of-function label. */
2944 if (new_thread != thread)
2945 {
2946 rtx label;
2947
2948 gcc_assert (thread_if_true);
2949
2950 if (new_thread && JUMP_P (new_thread)
2951 && (simplejump_p (new_thread)
2952 || GET_CODE (PATTERN (new_thread)) == RETURN)
2953 && redirect_with_delay_list_safe_p (insn,
2954 JUMP_LABEL (new_thread),
2955 delay_list))
2956 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2957
2958 if (new_thread == 0)
2959 label = find_end_label ();
2960 else if (LABEL_P (new_thread))
2961 label = new_thread;
2962 else
2963 label = get_label_before (new_thread);
2964
2965 if (label)
2966 reorg_redirect_jump (insn, label);
2967 }
2968
2969 return delay_list;
2970 }
2971 \f
2972 /* Make another attempt to find insns to place in delay slots.
2973
2974 We previously looked for insns located in front of the delay insn
2975 and, for non-jump delay insns, located behind the delay insn.
2976
2977 Here only try to schedule jump insns and try to move insns from either
2978 the target or the following insns into the delay slot. If annulling is
2979 supported, we will be likely to do this. Otherwise, we can do this only
2980 if safe. */
2981
2982 static void
2983 fill_eager_delay_slots (void)
2984 {
2985 rtx insn;
2986 int i;
2987 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2988
2989 for (i = 0; i < num_unfilled_slots; i++)
2990 {
2991 rtx condition;
2992 rtx target_label, insn_at_target, fallthrough_insn;
2993 rtx delay_list = 0;
2994 int own_target;
2995 int own_fallthrough;
2996 int prediction, slots_to_fill, slots_filled;
2997
2998 insn = unfilled_slots_base[i];
2999 if (insn == 0
3000 || INSN_DELETED_P (insn)
3001 || !JUMP_P (insn)
3002 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3003 continue;
3004
3005 slots_to_fill = num_delay_slots (insn);
3006 /* Some machine description have defined instructions to have
3007 delay slots only in certain circumstances which may depend on
3008 nearby insns (which change due to reorg's actions).
3009
3010 For example, the PA port normally has delay slots for unconditional
3011 jumps.
3012
3013 However, the PA port claims such jumps do not have a delay slot
3014 if they are immediate successors of certain CALL_INSNs. This
3015 allows the port to favor filling the delay slot of the call with
3016 the unconditional jump. */
3017 if (slots_to_fill == 0)
3018 continue;
3019
3020 slots_filled = 0;
3021 target_label = JUMP_LABEL (insn);
3022 condition = get_branch_condition (insn, target_label);
3023
3024 if (condition == 0)
3025 continue;
3026
3027 /* Get the next active fallthrough and target insns and see if we own
3028 them. Then see whether the branch is likely true. We don't need
3029 to do a lot of this for unconditional branches. */
3030
3031 insn_at_target = next_active_insn (target_label);
3032 own_target = own_thread_p (target_label, target_label, 0);
3033
3034 if (condition == const_true_rtx)
3035 {
3036 own_fallthrough = 0;
3037 fallthrough_insn = 0;
3038 prediction = 2;
3039 }
3040 else
3041 {
3042 fallthrough_insn = next_active_insn (insn);
3043 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3044 prediction = mostly_true_jump (insn, condition);
3045 }
3046
3047 /* If this insn is expected to branch, first try to get insns from our
3048 target, then our fallthrough insns. If it is not expected to branch,
3049 try the other order. */
3050
3051 if (prediction > 0)
3052 {
3053 delay_list
3054 = fill_slots_from_thread (insn, condition, insn_at_target,
3055 fallthrough_insn, prediction == 2, 1,
3056 own_target,
3057 slots_to_fill, &slots_filled, delay_list);
3058
3059 if (delay_list == 0 && own_fallthrough)
3060 {
3061 /* Even though we didn't find anything for delay slots,
3062 we might have found a redundant insn which we deleted
3063 from the thread that was filled. So we have to recompute
3064 the next insn at the target. */
3065 target_label = JUMP_LABEL (insn);
3066 insn_at_target = next_active_insn (target_label);
3067
3068 delay_list
3069 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3070 insn_at_target, 0, 0,
3071 own_fallthrough,
3072 slots_to_fill, &slots_filled,
3073 delay_list);
3074 }
3075 }
3076 else
3077 {
3078 if (own_fallthrough)
3079 delay_list
3080 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3081 insn_at_target, 0, 0,
3082 own_fallthrough,
3083 slots_to_fill, &slots_filled,
3084 delay_list);
3085
3086 if (delay_list == 0)
3087 delay_list
3088 = fill_slots_from_thread (insn, condition, insn_at_target,
3089 next_active_insn (insn), 0, 1,
3090 own_target,
3091 slots_to_fill, &slots_filled,
3092 delay_list);
3093 }
3094
3095 if (delay_list)
3096 unfilled_slots_base[i]
3097 = emit_delay_sequence (insn, delay_list, slots_filled);
3098
3099 if (slots_to_fill == slots_filled)
3100 unfilled_slots_base[i] = 0;
3101
3102 note_delay_statistics (slots_filled, 1);
3103 }
3104 }
3105 \f
3106 static void delete_computation (rtx insn);
3107
3108 /* Recursively delete prior insns that compute the value (used only by INSN
3109 which the caller is deleting) stored in the register mentioned by NOTE
3110 which is a REG_DEAD note associated with INSN. */
3111
3112 static void
3113 delete_prior_computation (rtx note, rtx insn)
3114 {
3115 rtx our_prev;
3116 rtx reg = XEXP (note, 0);
3117
3118 for (our_prev = prev_nonnote_insn (insn);
3119 our_prev && (NONJUMP_INSN_P (our_prev)
3120 || CALL_P (our_prev));
3121 our_prev = prev_nonnote_insn (our_prev))
3122 {
3123 rtx pat = PATTERN (our_prev);
3124
3125 /* If we reach a CALL which is not calling a const function
3126 or the callee pops the arguments, then give up. */
3127 if (CALL_P (our_prev)
3128 && (! CONST_OR_PURE_CALL_P (our_prev)
3129 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3130 break;
3131
3132 /* If we reach a SEQUENCE, it is too complex to try to
3133 do anything with it, so give up. We can be run during
3134 and after reorg, so SEQUENCE rtl can legitimately show
3135 up here. */
3136 if (GET_CODE (pat) == SEQUENCE)
3137 break;
3138
3139 if (GET_CODE (pat) == USE
3140 && NONJUMP_INSN_P (XEXP (pat, 0)))
3141 /* reorg creates USEs that look like this. We leave them
3142 alone because reorg needs them for its own purposes. */
3143 break;
3144
3145 if (reg_set_p (reg, pat))
3146 {
3147 if (side_effects_p (pat) && !CALL_P (our_prev))
3148 break;
3149
3150 if (GET_CODE (pat) == PARALLEL)
3151 {
3152 /* If we find a SET of something else, we can't
3153 delete the insn. */
3154
3155 int i;
3156
3157 for (i = 0; i < XVECLEN (pat, 0); i++)
3158 {
3159 rtx part = XVECEXP (pat, 0, i);
3160
3161 if (GET_CODE (part) == SET
3162 && SET_DEST (part) != reg)
3163 break;
3164 }
3165
3166 if (i == XVECLEN (pat, 0))
3167 delete_computation (our_prev);
3168 }
3169 else if (GET_CODE (pat) == SET
3170 && REG_P (SET_DEST (pat)))
3171 {
3172 int dest_regno = REGNO (SET_DEST (pat));
3173 int dest_endregno = END_REGNO (SET_DEST (pat));
3174 int regno = REGNO (reg);
3175 int endregno = END_REGNO (reg);
3176
3177 if (dest_regno >= regno
3178 && dest_endregno <= endregno)
3179 delete_computation (our_prev);
3180
3181 /* We may have a multi-word hard register and some, but not
3182 all, of the words of the register are needed in subsequent
3183 insns. Write REG_UNUSED notes for those parts that were not
3184 needed. */
3185 else if (dest_regno <= regno
3186 && dest_endregno >= endregno)
3187 {
3188 int i;
3189
3190 REG_NOTES (our_prev)
3191 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
3192 REG_NOTES (our_prev));
3193
3194 for (i = dest_regno; i < dest_endregno; i++)
3195 if (! find_regno_note (our_prev, REG_UNUSED, i))
3196 break;
3197
3198 if (i == dest_endregno)
3199 delete_computation (our_prev);
3200 }
3201 }
3202
3203 break;
3204 }
3205
3206 /* If PAT references the register that dies here, it is an
3207 additional use. Hence any prior SET isn't dead. However, this
3208 insn becomes the new place for the REG_DEAD note. */
3209 if (reg_overlap_mentioned_p (reg, pat))
3210 {
3211 XEXP (note, 1) = REG_NOTES (our_prev);
3212 REG_NOTES (our_prev) = note;
3213 break;
3214 }
3215 }
3216 }
3217
3218 /* Delete INSN and recursively delete insns that compute values used only
3219 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3220 If we are running before flow.c, we need do nothing since flow.c will
3221 delete dead code. We also can't know if the registers being used are
3222 dead or not at this point.
3223
3224 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3225 nothing other than set a register that dies in this insn, we can delete
3226 that insn as well.
3227
3228 On machines with CC0, if CC0 is used in this insn, we may be able to
3229 delete the insn that set it. */
3230
3231 static void
3232 delete_computation (rtx insn)
3233 {
3234 rtx note, next;
3235
3236 #ifdef HAVE_cc0
3237 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3238 {
3239 rtx prev = prev_nonnote_insn (insn);
3240 /* We assume that at this stage
3241 CC's are always set explicitly
3242 and always immediately before the jump that
3243 will use them. So if the previous insn
3244 exists to set the CC's, delete it
3245 (unless it performs auto-increments, etc.). */
3246 if (prev && NONJUMP_INSN_P (prev)
3247 && sets_cc0_p (PATTERN (prev)))
3248 {
3249 if (sets_cc0_p (PATTERN (prev)) > 0
3250 && ! side_effects_p (PATTERN (prev)))
3251 delete_computation (prev);
3252 else
3253 /* Otherwise, show that cc0 won't be used. */
3254 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
3255 cc0_rtx, REG_NOTES (prev));
3256 }
3257 }
3258 #endif
3259
3260 for (note = REG_NOTES (insn); note; note = next)
3261 {
3262 next = XEXP (note, 1);
3263
3264 if (REG_NOTE_KIND (note) != REG_DEAD
3265 /* Verify that the REG_NOTE is legitimate. */
3266 || !REG_P (XEXP (note, 0)))
3267 continue;
3268
3269 delete_prior_computation (note, insn);
3270 }
3271
3272 delete_related_insns (insn);
3273 }
3274
3275 /* If all INSN does is set the pc, delete it,
3276 and delete the insn that set the condition codes for it
3277 if that's what the previous thing was. */
3278
3279 static void
3280 delete_jump (rtx insn)
3281 {
3282 rtx set = single_set (insn);
3283
3284 if (set && GET_CODE (SET_DEST (set)) == PC)
3285 delete_computation (insn);
3286 }
3287
3288 \f
3289 /* Once we have tried two ways to fill a delay slot, make a pass over the
3290 code to try to improve the results and to do such things as more jump
3291 threading. */
3292
3293 static void
3294 relax_delay_slots (rtx first)
3295 {
3296 rtx insn, next, pat;
3297 rtx trial, delay_insn, target_label;
3298
3299 /* Look at every JUMP_INSN and see if we can improve it. */
3300 for (insn = first; insn; insn = next)
3301 {
3302 rtx other;
3303
3304 next = next_active_insn (insn);
3305
3306 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3307 the next insn, or jumps to a label that is not the last of a
3308 group of consecutive labels. */
3309 if (JUMP_P (insn)
3310 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3311 && (target_label = JUMP_LABEL (insn)) != 0)
3312 {
3313 target_label = skip_consecutive_labels (follow_jumps (target_label));
3314 if (target_label == 0)
3315 target_label = find_end_label ();
3316
3317 if (target_label && next_active_insn (target_label) == next
3318 && ! condjump_in_parallel_p (insn))
3319 {
3320 delete_jump (insn);
3321 continue;
3322 }
3323
3324 if (target_label && target_label != JUMP_LABEL (insn))
3325 reorg_redirect_jump (insn, target_label);
3326
3327 /* See if this jump conditionally branches around an unconditional
3328 jump. If so, invert this jump and point it to the target of the
3329 second jump. */
3330 if (next && JUMP_P (next)
3331 && any_condjump_p (insn)
3332 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3333 && target_label
3334 && next_active_insn (target_label) == next_active_insn (next)
3335 && no_labels_between_p (insn, next))
3336 {
3337 rtx label = JUMP_LABEL (next);
3338
3339 /* Be careful how we do this to avoid deleting code or
3340 labels that are momentarily dead. See similar optimization
3341 in jump.c.
3342
3343 We also need to ensure we properly handle the case when
3344 invert_jump fails. */
3345
3346 ++LABEL_NUSES (target_label);
3347 if (label)
3348 ++LABEL_NUSES (label);
3349
3350 if (invert_jump (insn, label, 1))
3351 {
3352 delete_related_insns (next);
3353 next = insn;
3354 }
3355
3356 if (label)
3357 --LABEL_NUSES (label);
3358
3359 if (--LABEL_NUSES (target_label) == 0)
3360 delete_related_insns (target_label);
3361
3362 continue;
3363 }
3364 }
3365
3366 /* If this is an unconditional jump and the previous insn is a
3367 conditional jump, try reversing the condition of the previous
3368 insn and swapping our targets. The next pass might be able to
3369 fill the slots.
3370
3371 Don't do this if we expect the conditional branch to be true, because
3372 we would then be making the more common case longer. */
3373
3374 if (JUMP_P (insn)
3375 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3376 && (other = prev_active_insn (insn)) != 0
3377 && any_condjump_p (other)
3378 && no_labels_between_p (other, insn)
3379 && 0 > mostly_true_jump (other,
3380 get_branch_condition (other,
3381 JUMP_LABEL (other))))
3382 {
3383 rtx other_target = JUMP_LABEL (other);
3384 target_label = JUMP_LABEL (insn);
3385
3386 if (invert_jump (other, target_label, 0))
3387 reorg_redirect_jump (insn, other_target);
3388 }
3389
3390 /* Now look only at cases where we have filled a delay slot. */
3391 if (!NONJUMP_INSN_P (insn)
3392 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3393 continue;
3394
3395 pat = PATTERN (insn);
3396 delay_insn = XVECEXP (pat, 0, 0);
3397
3398 /* See if the first insn in the delay slot is redundant with some
3399 previous insn. Remove it from the delay slot if so; then set up
3400 to reprocess this insn. */
3401 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3402 {
3403 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3404 next = prev_active_insn (next);
3405 continue;
3406 }
3407
3408 /* See if we have a RETURN insn with a filled delay slot followed
3409 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3410 the first RETURN (but not its delay insn). This gives the same
3411 effect in fewer instructions.
3412
3413 Only do so if optimizing for size since this results in slower, but
3414 smaller code. */
3415 if (optimize_size
3416 && GET_CODE (PATTERN (delay_insn)) == RETURN
3417 && next
3418 && JUMP_P (next)
3419 && GET_CODE (PATTERN (next)) == RETURN)
3420 {
3421 rtx after;
3422 int i;
3423
3424 /* Delete the RETURN and just execute the delay list insns.
3425
3426 We do this by deleting the INSN containing the SEQUENCE, then
3427 re-emitting the insns separately, and then deleting the RETURN.
3428 This allows the count of the jump target to be properly
3429 decremented. */
3430
3431 /* Clear the from target bit, since these insns are no longer
3432 in delay slots. */
3433 for (i = 0; i < XVECLEN (pat, 0); i++)
3434 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3435
3436 trial = PREV_INSN (insn);
3437 delete_related_insns (insn);
3438 gcc_assert (GET_CODE (pat) == SEQUENCE);
3439 after = trial;
3440 for (i = 0; i < XVECLEN (pat, 0); i++)
3441 {
3442 rtx this_insn = XVECEXP (pat, 0, i);
3443 add_insn_after (this_insn, after);
3444 after = this_insn;
3445 }
3446 delete_scheduled_jump (delay_insn);
3447 continue;
3448 }
3449
3450 /* Now look only at the cases where we have a filled JUMP_INSN. */
3451 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3452 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3453 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3454 continue;
3455
3456 target_label = JUMP_LABEL (delay_insn);
3457
3458 if (target_label)
3459 {
3460 /* If this jump goes to another unconditional jump, thread it, but
3461 don't convert a jump into a RETURN here. */
3462 trial = skip_consecutive_labels (follow_jumps (target_label));
3463 if (trial == 0)
3464 trial = find_end_label ();
3465
3466 if (trial && trial != target_label
3467 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3468 {
3469 reorg_redirect_jump (delay_insn, trial);
3470 target_label = trial;
3471 }
3472
3473 /* If the first insn at TARGET_LABEL is redundant with a previous
3474 insn, redirect the jump to the following insn process again. */
3475 trial = next_active_insn (target_label);
3476 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3477 && redundant_insn (trial, insn, 0)
3478 && ! can_throw_internal (trial))
3479 {
3480 /* Figure out where to emit the special USE insn so we don't
3481 later incorrectly compute register live/death info. */
3482 rtx tmp = next_active_insn (trial);
3483 if (tmp == 0)
3484 tmp = find_end_label ();
3485
3486 if (tmp)
3487 {
3488 /* Insert the special USE insn and update dataflow info. */
3489 update_block (trial, tmp);
3490
3491 /* Now emit a label before the special USE insn, and
3492 redirect our jump to the new label. */
3493 target_label = get_label_before (PREV_INSN (tmp));
3494 reorg_redirect_jump (delay_insn, target_label);
3495 next = insn;
3496 continue;
3497 }
3498 }
3499
3500 /* Similarly, if it is an unconditional jump with one insn in its
3501 delay list and that insn is redundant, thread the jump. */
3502 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3503 && XVECLEN (PATTERN (trial), 0) == 2
3504 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3505 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3506 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3507 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3508 {
3509 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3510 if (target_label == 0)
3511 target_label = find_end_label ();
3512
3513 if (target_label
3514 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3515 insn))
3516 {
3517 reorg_redirect_jump (delay_insn, target_label);
3518 next = insn;
3519 continue;
3520 }
3521 }
3522 }
3523
3524 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3525 && prev_active_insn (target_label) == insn
3526 && ! condjump_in_parallel_p (delay_insn)
3527 #ifdef HAVE_cc0
3528 /* If the last insn in the delay slot sets CC0 for some insn,
3529 various code assumes that it is in a delay slot. We could
3530 put it back where it belonged and delete the register notes,
3531 but it doesn't seem worthwhile in this uncommon case. */
3532 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3533 REG_CC_USER, NULL_RTX)
3534 #endif
3535 )
3536 {
3537 rtx after;
3538 int i;
3539
3540 /* All this insn does is execute its delay list and jump to the
3541 following insn. So delete the jump and just execute the delay
3542 list insns.
3543
3544 We do this by deleting the INSN containing the SEQUENCE, then
3545 re-emitting the insns separately, and then deleting the jump.
3546 This allows the count of the jump target to be properly
3547 decremented. */
3548
3549 /* Clear the from target bit, since these insns are no longer
3550 in delay slots. */
3551 for (i = 0; i < XVECLEN (pat, 0); i++)
3552 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3553
3554 trial = PREV_INSN (insn);
3555 delete_related_insns (insn);
3556 gcc_assert (GET_CODE (pat) == SEQUENCE);
3557 after = trial;
3558 for (i = 0; i < XVECLEN (pat, 0); i++)
3559 {
3560 rtx this_insn = XVECEXP (pat, 0, i);
3561 add_insn_after (this_insn, after);
3562 after = this_insn;
3563 }
3564 delete_scheduled_jump (delay_insn);
3565 continue;
3566 }
3567
3568 /* See if this is an unconditional jump around a single insn which is
3569 identical to the one in its delay slot. In this case, we can just
3570 delete the branch and the insn in its delay slot. */
3571 if (next && NONJUMP_INSN_P (next)
3572 && prev_label (next_active_insn (next)) == target_label
3573 && simplejump_p (insn)
3574 && XVECLEN (pat, 0) == 2
3575 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3576 {
3577 delete_related_insns (insn);
3578 continue;
3579 }
3580
3581 /* See if this jump (with its delay slots) conditionally branches
3582 around an unconditional jump (without delay slots). If so, invert
3583 this jump and point it to the target of the second jump. We cannot
3584 do this for annulled jumps, though. Again, don't convert a jump to
3585 a RETURN here. */
3586 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3587 && any_condjump_p (delay_insn)
3588 && next && JUMP_P (next)
3589 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3590 && next_active_insn (target_label) == next_active_insn (next)
3591 && no_labels_between_p (insn, next))
3592 {
3593 rtx label = JUMP_LABEL (next);
3594 rtx old_label = JUMP_LABEL (delay_insn);
3595
3596 if (label == 0)
3597 label = find_end_label ();
3598
3599 /* find_end_label can generate a new label. Check this first. */
3600 if (label
3601 && no_labels_between_p (insn, next)
3602 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3603 {
3604 /* Be careful how we do this to avoid deleting code or labels
3605 that are momentarily dead. See similar optimization in
3606 jump.c */
3607 if (old_label)
3608 ++LABEL_NUSES (old_label);
3609
3610 if (invert_jump (delay_insn, label, 1))
3611 {
3612 int i;
3613
3614 /* Must update the INSN_FROM_TARGET_P bits now that
3615 the branch is reversed, so that mark_target_live_regs
3616 will handle the delay slot insn correctly. */
3617 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3618 {
3619 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3620 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3621 }
3622
3623 delete_related_insns (next);
3624 next = insn;
3625 }
3626
3627 if (old_label && --LABEL_NUSES (old_label) == 0)
3628 delete_related_insns (old_label);
3629 continue;
3630 }
3631 }
3632
3633 /* If we own the thread opposite the way this insn branches, see if we
3634 can merge its delay slots with following insns. */
3635 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3636 && own_thread_p (NEXT_INSN (insn), 0, 1))
3637 try_merge_delay_insns (insn, next);
3638 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3639 && own_thread_p (target_label, target_label, 0))
3640 try_merge_delay_insns (insn, next_active_insn (target_label));
3641
3642 /* If we get here, we haven't deleted INSN. But we may have deleted
3643 NEXT, so recompute it. */
3644 next = next_active_insn (insn);
3645 }
3646 }
3647 \f
3648 #ifdef HAVE_return
3649
3650 /* Look for filled jumps to the end of function label. We can try to convert
3651 them into RETURN insns if the insns in the delay slot are valid for the
3652 RETURN as well. */
3653
3654 static void
3655 make_return_insns (rtx first)
3656 {
3657 rtx insn, jump_insn, pat;
3658 rtx real_return_label = end_of_function_label;
3659 int slots, i;
3660
3661 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3662 /* If a previous pass filled delay slots in the epilogue, things get a
3663 bit more complicated, as those filler insns would generally (without
3664 data flow analysis) have to be executed after any existing branch
3665 delay slot filler insns. It is also unknown whether such a
3666 transformation would actually be profitable. Note that the existing
3667 code only cares for branches with (some) filled delay slots. */
3668 if (current_function_epilogue_delay_list != NULL)
3669 return;
3670 #endif
3671
3672 /* See if there is a RETURN insn in the function other than the one we
3673 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3674 into a RETURN to jump to it. */
3675 for (insn = first; insn; insn = NEXT_INSN (insn))
3676 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
3677 {
3678 real_return_label = get_label_before (insn);
3679 break;
3680 }
3681
3682 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3683 was equal to END_OF_FUNCTION_LABEL. */
3684 LABEL_NUSES (real_return_label)++;
3685
3686 /* Clear the list of insns to fill so we can use it. */
3687 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3688
3689 for (insn = first; insn; insn = NEXT_INSN (insn))
3690 {
3691 int flags;
3692
3693 /* Only look at filled JUMP_INSNs that go to the end of function
3694 label. */
3695 if (!NONJUMP_INSN_P (insn)
3696 || GET_CODE (PATTERN (insn)) != SEQUENCE
3697 || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3698 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3699 continue;
3700
3701 pat = PATTERN (insn);
3702 jump_insn = XVECEXP (pat, 0, 0);
3703
3704 /* If we can't make the jump into a RETURN, try to redirect it to the best
3705 RETURN and go on to the next insn. */
3706 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3707 {
3708 /* Make sure redirecting the jump will not invalidate the delay
3709 slot insns. */
3710 if (redirect_with_delay_slots_safe_p (jump_insn,
3711 real_return_label,
3712 insn))
3713 reorg_redirect_jump (jump_insn, real_return_label);
3714 continue;
3715 }
3716
3717 /* See if this RETURN can accept the insns current in its delay slot.
3718 It can if it has more or an equal number of slots and the contents
3719 of each is valid. */
3720
3721 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3722 slots = num_delay_slots (jump_insn);
3723 if (slots >= XVECLEN (pat, 0) - 1)
3724 {
3725 for (i = 1; i < XVECLEN (pat, 0); i++)
3726 if (! (
3727 #ifdef ANNUL_IFFALSE_SLOTS
3728 (INSN_ANNULLED_BRANCH_P (jump_insn)
3729 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3730 ? eligible_for_annul_false (jump_insn, i - 1,
3731 XVECEXP (pat, 0, i), flags) :
3732 #endif
3733 #ifdef ANNUL_IFTRUE_SLOTS
3734 (INSN_ANNULLED_BRANCH_P (jump_insn)
3735 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3736 ? eligible_for_annul_true (jump_insn, i - 1,
3737 XVECEXP (pat, 0, i), flags) :
3738 #endif
3739 eligible_for_delay (jump_insn, i - 1,
3740 XVECEXP (pat, 0, i), flags)))
3741 break;
3742 }
3743 else
3744 i = 0;
3745
3746 if (i == XVECLEN (pat, 0))
3747 continue;
3748
3749 /* We have to do something with this insn. If it is an unconditional
3750 RETURN, delete the SEQUENCE and output the individual insns,
3751 followed by the RETURN. Then set things up so we try to find
3752 insns for its delay slots, if it needs some. */
3753 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3754 {
3755 rtx prev = PREV_INSN (insn);
3756
3757 delete_related_insns (insn);
3758 for (i = 1; i < XVECLEN (pat, 0); i++)
3759 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3760
3761 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3762 emit_barrier_after (insn);
3763
3764 if (slots)
3765 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3766 }
3767 else
3768 /* It is probably more efficient to keep this with its current
3769 delay slot as a branch to a RETURN. */
3770 reorg_redirect_jump (jump_insn, real_return_label);
3771 }
3772
3773 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3774 new delay slots we have created. */
3775 if (--LABEL_NUSES (real_return_label) == 0)
3776 delete_related_insns (real_return_label);
3777
3778 fill_simple_delay_slots (1);
3779 fill_simple_delay_slots (0);
3780 }
3781 #endif
3782 \f
3783 /* Try to find insns to place in delay slots. */
3784
3785 void
3786 dbr_schedule (rtx first)
3787 {
3788 rtx insn, next, epilogue_insn = 0;
3789 int i;
3790
3791 /* If the current function has no insns other than the prologue and
3792 epilogue, then do not try to fill any delay slots. */
3793 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3794 return;
3795
3796 /* Find the highest INSN_UID and allocate and initialize our map from
3797 INSN_UID's to position in code. */
3798 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3799 {
3800 if (INSN_UID (insn) > max_uid)
3801 max_uid = INSN_UID (insn);
3802 if (NOTE_P (insn)
3803 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3804 epilogue_insn = insn;
3805 }
3806
3807 uid_to_ruid = xmalloc ((max_uid + 1) * sizeof (int));
3808 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3809 uid_to_ruid[INSN_UID (insn)] = i;
3810
3811 /* Initialize the list of insns that need filling. */
3812 if (unfilled_firstobj == 0)
3813 {
3814 gcc_obstack_init (&unfilled_slots_obstack);
3815 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3816 }
3817
3818 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3819 {
3820 rtx target;
3821
3822 INSN_ANNULLED_BRANCH_P (insn) = 0;
3823 INSN_FROM_TARGET_P (insn) = 0;
3824
3825 /* Skip vector tables. We can't get attributes for them. */
3826 if (JUMP_P (insn)
3827 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3828 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3829 continue;
3830
3831 if (num_delay_slots (insn) > 0)
3832 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3833
3834 /* Ensure all jumps go to the last of a set of consecutive labels. */
3835 if (JUMP_P (insn)
3836 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3837 && JUMP_LABEL (insn) != 0
3838 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3839 != JUMP_LABEL (insn)))
3840 redirect_jump (insn, target, 1);
3841 }
3842
3843 init_resource_info (epilogue_insn);
3844
3845 /* Show we haven't computed an end-of-function label yet. */
3846 end_of_function_label = 0;
3847
3848 /* Initialize the statistics for this function. */
3849 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3850 memset (num_filled_delays, 0, sizeof num_filled_delays);
3851
3852 /* Now do the delay slot filling. Try everything twice in case earlier
3853 changes make more slots fillable. */
3854
3855 for (reorg_pass_number = 0;
3856 reorg_pass_number < MAX_REORG_PASSES;
3857 reorg_pass_number++)
3858 {
3859 fill_simple_delay_slots (1);
3860 fill_simple_delay_slots (0);
3861 fill_eager_delay_slots ();
3862 relax_delay_slots (first);
3863 }
3864
3865 /* Delete any USE insns made by update_block; subsequent passes don't need
3866 them or know how to deal with them. */
3867 for (insn = first; insn; insn = next)
3868 {
3869 next = NEXT_INSN (insn);
3870
3871 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3872 && INSN_P (XEXP (PATTERN (insn), 0)))
3873 next = delete_related_insns (insn);
3874 }
3875
3876 /* If we made an end of function label, indicate that it is now
3877 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3878 If it is now unused, delete it. */
3879 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3880 delete_related_insns (end_of_function_label);
3881
3882 #ifdef HAVE_return
3883 if (HAVE_return && end_of_function_label != 0)
3884 make_return_insns (first);
3885 #endif
3886
3887 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3888
3889 /* It is not clear why the line below is needed, but it does seem to be. */
3890 unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0);
3891
3892 if (dump_file)
3893 {
3894 int i, j, need_comma;
3895 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3896 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3897
3898 for (reorg_pass_number = 0;
3899 reorg_pass_number < MAX_REORG_PASSES;
3900 reorg_pass_number++)
3901 {
3902 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3903 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3904 {
3905 need_comma = 0;
3906 fprintf (dump_file, ";; Reorg function #%d\n", i);
3907
3908 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3909 num_insns_needing_delays[i][reorg_pass_number]);
3910
3911 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3912 if (num_filled_delays[i][j][reorg_pass_number])
3913 {
3914 if (need_comma)
3915 fprintf (dump_file, ", ");
3916 need_comma = 1;
3917 fprintf (dump_file, "%d got %d delays",
3918 num_filled_delays[i][j][reorg_pass_number], j);
3919 }
3920 fprintf (dump_file, "\n");
3921 }
3922 }
3923 memset (total_delay_slots, 0, sizeof total_delay_slots);
3924 memset (total_annul_slots, 0, sizeof total_annul_slots);
3925 for (insn = first; insn; insn = NEXT_INSN (insn))
3926 {
3927 if (! INSN_DELETED_P (insn)
3928 && NONJUMP_INSN_P (insn)
3929 && GET_CODE (PATTERN (insn)) != USE
3930 && GET_CODE (PATTERN (insn)) != CLOBBER)
3931 {
3932 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3933 {
3934 j = XVECLEN (PATTERN (insn), 0) - 1;
3935 if (j > MAX_DELAY_HISTOGRAM)
3936 j = MAX_DELAY_HISTOGRAM;
3937 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3938 total_annul_slots[j]++;
3939 else
3940 total_delay_slots[j]++;
3941 }
3942 else if (num_delay_slots (insn) > 0)
3943 total_delay_slots[0]++;
3944 }
3945 }
3946 fprintf (dump_file, ";; Reorg totals: ");
3947 need_comma = 0;
3948 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3949 {
3950 if (total_delay_slots[j])
3951 {
3952 if (need_comma)
3953 fprintf (dump_file, ", ");
3954 need_comma = 1;
3955 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
3956 }
3957 }
3958 fprintf (dump_file, "\n");
3959 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3960 fprintf (dump_file, ";; Reorg annuls: ");
3961 need_comma = 0;
3962 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3963 {
3964 if (total_annul_slots[j])
3965 {
3966 if (need_comma)
3967 fprintf (dump_file, ", ");
3968 need_comma = 1;
3969 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
3970 }
3971 }
3972 fprintf (dump_file, "\n");
3973 #endif
3974 fprintf (dump_file, "\n");
3975 }
3976
3977 /* For all JUMP insns, fill in branch prediction notes, so that during
3978 assembler output a target can set branch prediction bits in the code.
3979 We have to do this now, as up until this point the destinations of
3980 JUMPS can be moved around and changed, but past right here that cannot
3981 happen. */
3982 for (insn = first; insn; insn = NEXT_INSN (insn))
3983 {
3984 int pred_flags;
3985
3986 if (NONJUMP_INSN_P (insn))
3987 {
3988 rtx pat = PATTERN (insn);
3989
3990 if (GET_CODE (pat) == SEQUENCE)
3991 insn = XVECEXP (pat, 0, 0);
3992 }
3993 if (!JUMP_P (insn))
3994 continue;
3995
3996 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3997 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3998 GEN_INT (pred_flags),
3999 REG_NOTES (insn));
4000 }
4001 free_resource_info ();
4002 free (uid_to_ruid);
4003 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4004 /* SPARC assembler, for instance, emit warning when debug info is output
4005 into the delay slot. */
4006 {
4007 rtx link;
4008
4009 for (link = current_function_epilogue_delay_list;
4010 link;
4011 link = XEXP (link, 1))
4012 INSN_LOCATOR (XEXP (link, 0)) = 0;
4013 }
4014 #endif
4015 }
4016 #endif /* DELAY_SLOTS */
4017 \f
4018 static bool
4019 gate_handle_delay_slots (void)
4020 {
4021 #ifdef DELAY_SLOTS
4022 return flag_delayed_branch;
4023 #else
4024 return 0;
4025 #endif
4026 }
4027
4028 /* Run delay slot optimization. */
4029 static unsigned int
4030 rest_of_handle_delay_slots (void)
4031 {
4032 #ifdef DELAY_SLOTS
4033 dbr_schedule (get_insns ());
4034 #endif
4035 return 0;
4036 }
4037
4038 struct tree_opt_pass pass_delay_slots =
4039 {
4040 "dbr", /* name */
4041 gate_handle_delay_slots, /* gate */
4042 rest_of_handle_delay_slots, /* execute */
4043 NULL, /* sub */
4044 NULL, /* next */
4045 0, /* static_pass_number */
4046 TV_DBR_SCHED, /* tv_id */
4047 0, /* properties_required */
4048 0, /* properties_provided */
4049 0, /* properties_destroyed */
4050 0, /* todo_flags_start */
4051 TODO_dump_func |
4052 TODO_ggc_collect, /* todo_flags_finish */
4053 'd' /* letter */
4054 };
4055
4056 /* Machine dependent reorg pass. */
4057 static bool
4058 gate_handle_machine_reorg (void)
4059 {
4060 return targetm.machine_dependent_reorg != 0;
4061 }
4062
4063
4064 static unsigned int
4065 rest_of_handle_machine_reorg (void)
4066 {
4067 targetm.machine_dependent_reorg ();
4068 return 0;
4069 }
4070
4071 struct tree_opt_pass pass_machine_reorg =
4072 {
4073 "mach", /* name */
4074 gate_handle_machine_reorg, /* gate */
4075 rest_of_handle_machine_reorg, /* execute */
4076 NULL, /* sub */
4077 NULL, /* next */
4078 0, /* static_pass_number */
4079 TV_MACH_DEP, /* tv_id */
4080 0, /* properties_required */
4081 0, /* properties_provided */
4082 0, /* properties_destroyed */
4083 0, /* todo_flags_start */
4084 TODO_dump_func |
4085 TODO_ggc_collect, /* todo_flags_finish */
4086 'M' /* letter */
4087 };
4088