reorg.c (relax_delay_slots): Call update_block before redirecting a branch past a...
[gcc.git] / gcc / reorg.c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 /* Instruction reorganization pass.
24
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
32
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
37
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
42
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
46
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
55 is taken.
56
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
63
64 Three techniques for filling delay slots have been implemented so far:
65
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
74
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
88
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
97 branch.
98
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
108
109 Not yet implemented:
110
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
114
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
117
118 #include <stdio.h>
119 #include "config.h"
120 #include "rtl.h"
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
125 #include "regs.h"
126 #include "insn-flags.h"
127 #include "recog.h"
128 #include "flags.h"
129 #include "output.h"
130 #include "obstack.h"
131 #include "insn-attr.h"
132
133 /* Import list of registers used as spill regs from reload. */
134 extern HARD_REG_SET used_spill_regs;
135
136 /* Import highest label used in function at end of reload. */
137 extern int max_label_num_after_reload;
138
139
140 #ifdef DELAY_SLOTS
141
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
144
145 #ifndef ANNUL_IFTRUE_SLOTS
146 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
147 #endif
148 #ifndef ANNUL_IFFALSE_SLOTS
149 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
150 #endif
151
152 /* Insns which have delay slots that have not yet been filled. */
153
154 static struct obstack unfilled_slots_obstack;
155 static rtx *unfilled_firstobj;
156
157 /* Define macros to refer to the first and last slot containing unfilled
158 insns. These are used because the list may move and its address
159 should be recomputed at each use. */
160
161 #define unfilled_slots_base \
162 ((rtx *) obstack_base (&unfilled_slots_obstack))
163
164 #define unfilled_slots_next \
165 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
166
167 /* This structure is used to indicate which hardware resources are set or
168 needed by insns so far. */
169
170 struct resources
171 {
172 char memory; /* Insn sets or needs a memory location. */
173 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
174 char volatil; /* Insn sets or needs a volatile memory loc. */
175 char cc; /* Insn sets or needs the condition codes. */
176 HARD_REG_SET regs; /* Which registers are set or needed. */
177 };
178
179 /* Macro to clear all resources. */
180 #define CLEAR_RESOURCE(RES) \
181 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
182 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
183
184 /* Indicates what resources are required at the beginning of the epilogue. */
185 static struct resources start_of_epilogue_needs;
186
187 /* Indicates what resources are required at function end. */
188 static struct resources end_of_function_needs;
189
190 /* Points to the label before the end of the function. */
191 static rtx end_of_function_label;
192
193 /* This structure is used to record liveness information at the targets or
194 fallthrough insns of branches. We will most likely need the information
195 at targets again, so save them in a hash table rather than recomputing them
196 each time. */
197
198 struct target_info
199 {
200 int uid; /* INSN_UID of target. */
201 struct target_info *next; /* Next info for same hash bucket. */
202 HARD_REG_SET live_regs; /* Registers live at target. */
203 int block; /* Basic block number containing target. */
204 int bb_tick; /* Generation count of basic block info. */
205 };
206
207 #define TARGET_HASH_PRIME 257
208
209 /* Define the hash table itself. */
210 static struct target_info **target_hash_table;
211
212 /* For each basic block, we maintain a generation number of its basic
213 block info, which is updated each time we move an insn from the
214 target of a jump. This is the generation number indexed by block
215 number. */
216
217 static int *bb_ticks;
218
219 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
220 not always monotonically increase. */
221 static int *uid_to_ruid;
222
223 /* Highest valid index in `uid_to_ruid'. */
224 static int max_uid;
225
226 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
227 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
228 static int stop_search_p PROTO((rtx, int));
229 static int resource_conflicts_p PROTO((struct resources *,
230 struct resources *));
231 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
232 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
233 static rtx find_end_label PROTO((void));
234 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
235 static rtx add_to_delay_list PROTO((rtx, rtx));
236 static void delete_from_delay_slot PROTO((rtx));
237 static void delete_scheduled_jump PROTO((rtx));
238 static void note_delay_statistics PROTO((int, int));
239 static rtx optimize_skip PROTO((rtx));
240 static int get_jump_flags PROTO((rtx, rtx));
241 static int rare_destination PROTO((rtx));
242 static int mostly_true_jump PROTO((rtx, rtx));
243 static rtx get_branch_condition PROTO((rtx, rtx));
244 static int condition_dominates_p PROTO((rtx, rtx));
245 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
246 struct resources *,
247 struct resources *,
248 struct resources *,
249 int, int *, int *, rtx *));
250 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
251 struct resources *,
252 struct resources *,
253 struct resources *,
254 int, int *, int *));
255 static void try_merge_delay_insns PROTO((rtx, rtx));
256 static rtx redundant_insn PROTO((rtx, rtx, rtx));
257 static int own_thread_p PROTO((rtx, rtx, int));
258 static int find_basic_block PROTO((rtx));
259 static void update_block PROTO((rtx, rtx));
260 static int reorg_redirect_jump PROTO((rtx, rtx));
261 static void update_reg_dead_notes PROTO((rtx, rtx));
262 static void fix_reg_dead_note PROTO((rtx, rtx));
263 static void update_reg_unused_notes PROTO((rtx, rtx));
264 static void update_live_status PROTO((rtx, rtx));
265 static rtx next_insn_no_annul PROTO((rtx));
266 static void mark_target_live_regs PROTO((rtx, struct resources *));
267 static void fill_simple_delay_slots PROTO((rtx, int));
268 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
269 int, int, int, int *));
270 static void fill_eager_delay_slots PROTO((rtx));
271 static void relax_delay_slots PROTO((rtx));
272 static void make_return_insns PROTO((rtx));
273 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
274 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
275 \f
276 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
277 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
278 is TRUE, resources used by the called routine will be included for
279 CALL_INSNs. */
280
281 static void
282 mark_referenced_resources (x, res, include_delayed_effects)
283 register rtx x;
284 register struct resources *res;
285 register int include_delayed_effects;
286 {
287 register enum rtx_code code = GET_CODE (x);
288 register int i, j;
289 register char *format_ptr;
290
291 /* Handle leaf items for which we set resource flags. Also, special-case
292 CALL, SET and CLOBBER operators. */
293 switch (code)
294 {
295 case CONST:
296 case CONST_INT:
297 case CONST_DOUBLE:
298 case PC:
299 case SYMBOL_REF:
300 case LABEL_REF:
301 return;
302
303 case SUBREG:
304 if (GET_CODE (SUBREG_REG (x)) != REG)
305 mark_referenced_resources (SUBREG_REG (x), res, 0);
306 else
307 {
308 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
309 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
310 for (i = regno; i < last_regno; i++)
311 SET_HARD_REG_BIT (res->regs, i);
312 }
313 return;
314
315 case REG:
316 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
317 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
318 return;
319
320 case MEM:
321 /* If this memory shouldn't change, it really isn't referencing
322 memory. */
323 if (RTX_UNCHANGING_P (x))
324 res->unch_memory = 1;
325 else
326 res->memory = 1;
327 res->volatil = MEM_VOLATILE_P (x);
328
329 /* Mark registers used to access memory. */
330 mark_referenced_resources (XEXP (x, 0), res, 0);
331 return;
332
333 case CC0:
334 res->cc = 1;
335 return;
336
337 case UNSPEC_VOLATILE:
338 case ASM_INPUT:
339 case TRAP_IF:
340 /* Traditional asm's are always volatile. */
341 res->volatil = 1;
342 return;
343
344 case ASM_OPERANDS:
345 res->volatil = MEM_VOLATILE_P (x);
346
347 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
348 We can not just fall through here since then we would be confused
349 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
350 traditional asms unlike their normal usage. */
351
352 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
353 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
354 return;
355
356 case CALL:
357 /* The first operand will be a (MEM (xxx)) but doesn't really reference
358 memory. The second operand may be referenced, though. */
359 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
360 mark_referenced_resources (XEXP (x, 1), res, 0);
361 return;
362
363 case SET:
364 /* Usually, the first operand of SET is set, not referenced. But
365 registers used to access memory are referenced. SET_DEST is
366 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
367
368 mark_referenced_resources (SET_SRC (x), res, 0);
369
370 x = SET_DEST (x);
371 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
372 mark_referenced_resources (x, res, 0);
373 else if (GET_CODE (x) == SUBREG)
374 x = SUBREG_REG (x);
375 if (GET_CODE (x) == MEM)
376 mark_referenced_resources (XEXP (x, 0), res, 0);
377 return;
378
379 case CLOBBER:
380 return;
381
382 case CALL_INSN:
383 if (include_delayed_effects)
384 {
385 /* A CALL references memory, the frame pointer if it exists, the
386 stack pointer, any global registers and any registers given in
387 USE insns immediately in front of the CALL.
388
389 However, we may have moved some of the parameter loading insns
390 into the delay slot of this CALL. If so, the USE's for them
391 don't count and should be skipped. */
392 rtx insn = PREV_INSN (x);
393 rtx sequence = 0;
394 int seq_size = 0;
395 rtx next = NEXT_INSN (x);
396 int i;
397
398 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
399 if (NEXT_INSN (insn) != x)
400 {
401 next = NEXT_INSN (NEXT_INSN (insn));
402 sequence = PATTERN (NEXT_INSN (insn));
403 seq_size = XVECLEN (sequence, 0);
404 if (GET_CODE (sequence) != SEQUENCE)
405 abort ();
406 }
407
408 res->memory = 1;
409 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
410 if (frame_pointer_needed)
411 {
412 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
414 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
415 #endif
416 }
417
418 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
419 if (global_regs[i])
420 SET_HARD_REG_BIT (res->regs, i);
421
422 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
423 assume that this call can need any register.
424
425 This is done to be more conservative about how we handle setjmp.
426 We assume that they both use and set all registers. Using all
427 registers ensures that a register will not be considered dead
428 just because it crosses a setjmp call. A register should be
429 considered dead only if the setjmp call returns non-zero. */
430 if (next && GET_CODE (next) == NOTE
431 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
432 SET_HARD_REG_SET (res->regs);
433
434 {
435 rtx link;
436
437 for (link = CALL_INSN_FUNCTION_USAGE (x);
438 link;
439 link = XEXP (link, 1))
440 if (GET_CODE (XEXP (link, 0)) == USE)
441 {
442 for (i = 1; i < seq_size; i++)
443 {
444 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
445 if (GET_CODE (slot_pat) == SET
446 && rtx_equal_p (SET_DEST (slot_pat),
447 SET_DEST (XEXP (link, 0))))
448 break;
449 }
450 if (i >= seq_size)
451 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
452 res, 0);
453 }
454 }
455 }
456
457 /* ... fall through to other INSN processing ... */
458
459 case INSN:
460 case JUMP_INSN:
461
462 #ifdef INSN_REFERENCES_ARE_DELAYED
463 if (! include_delayed_effects
464 && INSN_REFERENCES_ARE_DELAYED (x))
465 return;
466 #endif
467
468 /* No special processing, just speed up. */
469 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
470 return;
471 }
472
473 /* Process each sub-expression and flag what it needs. */
474 format_ptr = GET_RTX_FORMAT (code);
475 for (i = 0; i < GET_RTX_LENGTH (code); i++)
476 switch (*format_ptr++)
477 {
478 case 'e':
479 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
480 break;
481
482 case 'E':
483 for (j = 0; j < XVECLEN (x, i); j++)
484 mark_referenced_resources (XVECEXP (x, i, j), res,
485 include_delayed_effects);
486 break;
487 }
488 }
489 \f
490 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
491 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
492 is nonzero, also mark resources potentially set by the called routine.
493
494 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
495 objects are being referenced instead of set.
496
497 We never mark the insn as modifying the condition code unless it explicitly
498 SETs CC0 even though this is not totally correct. The reason for this is
499 that we require a SET of CC0 to immediately precede the reference to CC0.
500 So if some other insn sets CC0 as a side-effect, we know it cannot affect
501 our computation and thus may be placed in a delay slot. */
502
503 static void
504 mark_set_resources (x, res, in_dest, include_delayed_effects)
505 register rtx x;
506 register struct resources *res;
507 int in_dest;
508 int include_delayed_effects;
509 {
510 register enum rtx_code code;
511 register int i, j;
512 register char *format_ptr;
513
514 restart:
515
516 code = GET_CODE (x);
517
518 switch (code)
519 {
520 case NOTE:
521 case BARRIER:
522 case CODE_LABEL:
523 case USE:
524 case CONST_INT:
525 case CONST_DOUBLE:
526 case LABEL_REF:
527 case SYMBOL_REF:
528 case CONST:
529 case PC:
530 /* These don't set any resources. */
531 return;
532
533 case CC0:
534 if (in_dest)
535 res->cc = 1;
536 return;
537
538 case CALL_INSN:
539 /* Called routine modifies the condition code, memory, any registers
540 that aren't saved across calls, global registers and anything
541 explicitly CLOBBERed immediately after the CALL_INSN. */
542
543 if (include_delayed_effects)
544 {
545 rtx next = NEXT_INSN (x);
546 rtx prev = PREV_INSN (x);
547 rtx link;
548
549 res->cc = res->memory = 1;
550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
551 if (call_used_regs[i] || global_regs[i])
552 SET_HARD_REG_BIT (res->regs, i);
553
554 /* If X is part of a delay slot sequence, then NEXT should be
555 the first insn after the sequence. */
556 if (NEXT_INSN (prev) != x)
557 next = NEXT_INSN (NEXT_INSN (prev));
558
559 for (link = CALL_INSN_FUNCTION_USAGE (x);
560 link; link = XEXP (link, 1))
561 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
562 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
563
564 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
565 assume that this call can clobber any register. */
566 if (next && GET_CODE (next) == NOTE
567 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
568 SET_HARD_REG_SET (res->regs);
569 }
570
571 /* ... and also what it's RTL says it modifies, if anything. */
572
573 case JUMP_INSN:
574 case INSN:
575
576 /* An insn consisting of just a CLOBBER (or USE) is just for flow
577 and doesn't actually do anything, so we ignore it. */
578
579 #ifdef INSN_SETS_ARE_DELAYED
580 if (! include_delayed_effects
581 && INSN_SETS_ARE_DELAYED (x))
582 return;
583 #endif
584
585 x = PATTERN (x);
586 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
587 goto restart;
588 return;
589
590 case SET:
591 /* If the source of a SET is a CALL, this is actually done by
592 the called routine. So only include it if we are to include the
593 effects of the calling routine. */
594
595 mark_set_resources (SET_DEST (x), res,
596 (include_delayed_effects
597 || GET_CODE (SET_SRC (x)) != CALL),
598 0);
599
600 mark_set_resources (SET_SRC (x), res, 0, 0);
601 return;
602
603 case CLOBBER:
604 mark_set_resources (XEXP (x, 0), res, 1, 0);
605 return;
606
607 case SEQUENCE:
608 for (i = 0; i < XVECLEN (x, 0); i++)
609 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
610 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
611 mark_set_resources (XVECEXP (x, 0, i), res, 0,
612 include_delayed_effects);
613 return;
614
615 case POST_INC:
616 case PRE_INC:
617 case POST_DEC:
618 case PRE_DEC:
619 mark_set_resources (XEXP (x, 0), res, 1, 0);
620 return;
621
622 case ZERO_EXTRACT:
623 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
624 mark_set_resources (XEXP (x, 1), res, 0, 0);
625 mark_set_resources (XEXP (x, 2), res, 0, 0);
626 return;
627
628 case MEM:
629 if (in_dest)
630 {
631 res->memory = 1;
632 res->unch_memory = RTX_UNCHANGING_P (x);
633 res->volatil = MEM_VOLATILE_P (x);
634 }
635
636 mark_set_resources (XEXP (x, 0), res, 0, 0);
637 return;
638
639 case SUBREG:
640 if (in_dest)
641 {
642 if (GET_CODE (SUBREG_REG (x)) != REG)
643 mark_set_resources (SUBREG_REG (x), res,
644 in_dest, include_delayed_effects);
645 else
646 {
647 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
648 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
649 for (i = regno; i < last_regno; i++)
650 SET_HARD_REG_BIT (res->regs, i);
651 }
652 }
653 return;
654
655 case REG:
656 if (in_dest)
657 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
658 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
659 return;
660 }
661
662 /* Process each sub-expression and flag what it needs. */
663 format_ptr = GET_RTX_FORMAT (code);
664 for (i = 0; i < GET_RTX_LENGTH (code); i++)
665 switch (*format_ptr++)
666 {
667 case 'e':
668 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
669 break;
670
671 case 'E':
672 for (j = 0; j < XVECLEN (x, i); j++)
673 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
674 include_delayed_effects);
675 break;
676 }
677 }
678 \f
679 /* Return TRUE if this insn should stop the search for insn to fill delay
680 slots. LABELS_P indicates that labels should terminate the search.
681 In all cases, jumps terminate the search. */
682
683 static int
684 stop_search_p (insn, labels_p)
685 rtx insn;
686 int labels_p;
687 {
688 if (insn == 0)
689 return 1;
690
691 switch (GET_CODE (insn))
692 {
693 case NOTE:
694 case CALL_INSN:
695 return 0;
696
697 case CODE_LABEL:
698 return labels_p;
699
700 case JUMP_INSN:
701 case BARRIER:
702 return 1;
703
704 case INSN:
705 /* OK unless it contains a delay slot or is an `asm' insn of some type.
706 We don't know anything about these. */
707 return (GET_CODE (PATTERN (insn)) == SEQUENCE
708 || GET_CODE (PATTERN (insn)) == ASM_INPUT
709 || asm_noperands (PATTERN (insn)) >= 0);
710
711 default:
712 abort ();
713 }
714 }
715 \f
716 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
717 resource set contains a volatile memory reference. Otherwise, return FALSE. */
718
719 static int
720 resource_conflicts_p (res1, res2)
721 struct resources *res1, *res2;
722 {
723 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
724 || (res1->unch_memory && res2->unch_memory)
725 || res1->volatil || res2->volatil)
726 return 1;
727
728 #ifdef HARD_REG_SET
729 return (res1->regs & res2->regs) != HARD_CONST (0);
730 #else
731 {
732 int i;
733
734 for (i = 0; i < HARD_REG_SET_LONGS; i++)
735 if ((res1->regs[i] & res2->regs[i]) != 0)
736 return 1;
737 return 0;
738 }
739 #endif
740 }
741
742 /* Return TRUE if any resource marked in RES, a `struct resources', is
743 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
744 routine is using those resources.
745
746 We compute this by computing all the resources referenced by INSN and
747 seeing if this conflicts with RES. It might be faster to directly check
748 ourselves, and this is the way it used to work, but it means duplicating
749 a large block of complex code. */
750
751 static int
752 insn_references_resource_p (insn, res, include_delayed_effects)
753 register rtx insn;
754 register struct resources *res;
755 int include_delayed_effects;
756 {
757 struct resources insn_res;
758
759 CLEAR_RESOURCE (&insn_res);
760 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
761 return resource_conflicts_p (&insn_res, res);
762 }
763
764 /* Return TRUE if INSN modifies resources that are marked in RES.
765 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
766 included. CC0 is only modified if it is explicitly set; see comments
767 in front of mark_set_resources for details. */
768
769 static int
770 insn_sets_resource_p (insn, res, include_delayed_effects)
771 register rtx insn;
772 register struct resources *res;
773 int include_delayed_effects;
774 {
775 struct resources insn_sets;
776
777 CLEAR_RESOURCE (&insn_sets);
778 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
779 return resource_conflicts_p (&insn_sets, res);
780 }
781 \f
782 /* Find a label at the end of the function or before a RETURN. If there is
783 none, make one. */
784
785 static rtx
786 find_end_label ()
787 {
788 rtx insn;
789
790 /* If we found one previously, return it. */
791 if (end_of_function_label)
792 return end_of_function_label;
793
794 /* Otherwise, see if there is a label at the end of the function. If there
795 is, it must be that RETURN insns aren't needed, so that is our return
796 label and we don't have to do anything else. */
797
798 insn = get_last_insn ();
799 while (GET_CODE (insn) == NOTE
800 || (GET_CODE (insn) == INSN
801 && (GET_CODE (PATTERN (insn)) == USE
802 || GET_CODE (PATTERN (insn)) == CLOBBER)))
803 insn = PREV_INSN (insn);
804
805 /* When a target threads its epilogue we might already have a
806 suitable return insn. If so put a label before it for the
807 end_of_function_label. */
808 if (GET_CODE (insn) == BARRIER
809 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
810 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
811 {
812 rtx temp = PREV_INSN (PREV_INSN (insn));
813 end_of_function_label = gen_label_rtx ();
814 LABEL_NUSES (end_of_function_label) = 0;
815
816 /* Put the label before an USE insns that may proceed the RETURN insn. */
817 while (GET_CODE (temp) == USE)
818 temp = PREV_INSN (temp);
819
820 emit_label_after (end_of_function_label, temp);
821 }
822
823 else if (GET_CODE (insn) == CODE_LABEL)
824 end_of_function_label = insn;
825 else
826 {
827 /* Otherwise, make a new label and emit a RETURN and BARRIER,
828 if needed. */
829 end_of_function_label = gen_label_rtx ();
830 LABEL_NUSES (end_of_function_label) = 0;
831 emit_label (end_of_function_label);
832 #ifdef HAVE_return
833 if (HAVE_return)
834 {
835 /* The return we make may have delay slots too. */
836 rtx insn = gen_return ();
837 insn = emit_jump_insn (insn);
838 emit_barrier ();
839 if (num_delay_slots (insn) > 0)
840 obstack_ptr_grow (&unfilled_slots_obstack, insn);
841 }
842 #endif
843 }
844
845 /* Show one additional use for this label so it won't go away until
846 we are done. */
847 ++LABEL_NUSES (end_of_function_label);
848
849 return end_of_function_label;
850 }
851 \f
852 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
853 the pattern of INSN with the SEQUENCE.
854
855 Chain the insns so that NEXT_INSN of each insn in the sequence points to
856 the next and NEXT_INSN of the last insn in the sequence points to
857 the first insn after the sequence. Similarly for PREV_INSN. This makes
858 it easier to scan all insns.
859
860 Returns the SEQUENCE that replaces INSN. */
861
862 static rtx
863 emit_delay_sequence (insn, list, length, avail)
864 rtx insn;
865 rtx list;
866 int length;
867 int avail;
868 {
869 register int i = 1;
870 register rtx li;
871 int had_barrier = 0;
872
873 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
874 rtvec seqv = rtvec_alloc (length + 1);
875 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
876 rtx seq_insn = make_insn_raw (seq);
877 rtx first = get_insns ();
878 rtx last = get_last_insn ();
879
880 /* Make a copy of the insn having delay slots. */
881 rtx delay_insn = copy_rtx (insn);
882
883 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
884 confuse further processing. Update LAST in case it was the last insn.
885 We will put the BARRIER back in later. */
886 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
887 {
888 delete_insn (NEXT_INSN (insn));
889 last = get_last_insn ();
890 had_barrier = 1;
891 }
892
893 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
894 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
895 PREV_INSN (seq_insn) = PREV_INSN (insn);
896
897 if (insn == last)
898 set_new_first_and_last_insn (first, seq_insn);
899 else
900 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
901
902 if (insn == first)
903 set_new_first_and_last_insn (seq_insn, last);
904 else
905 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
906
907 /* Build our SEQUENCE and rebuild the insn chain. */
908 XVECEXP (seq, 0, 0) = delay_insn;
909 INSN_DELETED_P (delay_insn) = 0;
910 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
911
912 for (li = list; li; li = XEXP (li, 1), i++)
913 {
914 rtx tem = XEXP (li, 0);
915 rtx note;
916
917 /* Show that this copy of the insn isn't deleted. */
918 INSN_DELETED_P (tem) = 0;
919
920 XVECEXP (seq, 0, i) = tem;
921 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
922 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
923
924 /* Remove any REG_DEAD notes because we can't rely on them now
925 that the insn has been moved. */
926 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
927 if (REG_NOTE_KIND (note) == REG_DEAD)
928 XEXP (note, 0) = const0_rtx;
929 }
930
931 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
932
933 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
934 last insn in that SEQUENCE to point to us. Similarly for the first
935 insn in the following insn if it is a SEQUENCE. */
936
937 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
938 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
939 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
940 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
941 = seq_insn;
942
943 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
944 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
945 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
946
947 /* If there used to be a BARRIER, put it back. */
948 if (had_barrier)
949 emit_barrier_after (seq_insn);
950
951 if (i != length + 1)
952 abort ();
953
954 return seq_insn;
955 }
956
957 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
958 be in the order in which the insns are to be executed. */
959
960 static rtx
961 add_to_delay_list (insn, delay_list)
962 rtx insn;
963 rtx delay_list;
964 {
965 /* If we have an empty list, just make a new list element. If
966 INSN has it's block number recorded, clear it since we may
967 be moving the insn to a new block. */
968
969 if (delay_list == 0)
970 {
971 struct target_info *tinfo;
972
973 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
974 tinfo; tinfo = tinfo->next)
975 if (tinfo->uid == INSN_UID (insn))
976 break;
977
978 if (tinfo)
979 tinfo->block = -1;
980
981 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
982 }
983
984 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
985 list. */
986 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
987
988 return delay_list;
989 }
990 \f
991 /* Delete INSN from the the delay slot of the insn that it is in. This may
992 produce an insn without anything in its delay slots. */
993
994 static void
995 delete_from_delay_slot (insn)
996 rtx insn;
997 {
998 rtx trial, seq_insn, seq, prev;
999 rtx delay_list = 0;
1000 int i;
1001
1002 /* We first must find the insn containing the SEQUENCE with INSN in its
1003 delay slot. Do this by finding an insn, TRIAL, where
1004 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1005
1006 for (trial = insn;
1007 PREV_INSN (NEXT_INSN (trial)) == trial;
1008 trial = NEXT_INSN (trial))
1009 ;
1010
1011 seq_insn = PREV_INSN (NEXT_INSN (trial));
1012 seq = PATTERN (seq_insn);
1013
1014 /* Create a delay list consisting of all the insns other than the one
1015 we are deleting (unless we were the only one). */
1016 if (XVECLEN (seq, 0) > 2)
1017 for (i = 1; i < XVECLEN (seq, 0); i++)
1018 if (XVECEXP (seq, 0, i) != insn)
1019 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1020
1021 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1022 list, and rebuild the delay list if non-empty. */
1023 prev = PREV_INSN (seq_insn);
1024 trial = XVECEXP (seq, 0, 0);
1025 delete_insn (seq_insn);
1026 add_insn_after (trial, prev);
1027
1028 if (GET_CODE (trial) == JUMP_INSN
1029 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1030 emit_barrier_after (trial);
1031
1032 /* If there are any delay insns, remit them. Otherwise clear the
1033 annul flag. */
1034 if (delay_list)
1035 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
1036 else
1037 INSN_ANNULLED_BRANCH_P (trial) = 0;
1038
1039 INSN_FROM_TARGET_P (insn) = 0;
1040
1041 /* Show we need to fill this insn again. */
1042 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1043 }
1044 \f
1045 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1046 the insn that sets CC0 for it and delete it too. */
1047
1048 static void
1049 delete_scheduled_jump (insn)
1050 rtx insn;
1051 {
1052 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1053 delete the insn that sets the condition code, but it is hard to find it.
1054 Since this case is rare anyway, don't bother trying; there would likely
1055 be other insns that became dead anyway, which we wouldn't know to
1056 delete. */
1057
1058 #ifdef HAVE_cc0
1059 if (reg_mentioned_p (cc0_rtx, insn))
1060 {
1061 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1062
1063 /* If a reg-note was found, it points to an insn to set CC0. This
1064 insn is in the delay list of some other insn. So delete it from
1065 the delay list it was in. */
1066 if (note)
1067 {
1068 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1069 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1070 delete_from_delay_slot (XEXP (note, 0));
1071 }
1072 else
1073 {
1074 /* The insn setting CC0 is our previous insn, but it may be in
1075 a delay slot. It will be the last insn in the delay slot, if
1076 it is. */
1077 rtx trial = previous_insn (insn);
1078 if (GET_CODE (trial) == NOTE)
1079 trial = prev_nonnote_insn (trial);
1080 if (sets_cc0_p (PATTERN (trial)) != 1
1081 || FIND_REG_INC_NOTE (trial, 0))
1082 return;
1083 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1084 delete_insn (trial);
1085 else
1086 delete_from_delay_slot (trial);
1087 }
1088 }
1089 #endif
1090
1091 delete_insn (insn);
1092 }
1093 \f
1094 /* Counters for delay-slot filling. */
1095
1096 #define NUM_REORG_FUNCTIONS 2
1097 #define MAX_DELAY_HISTOGRAM 3
1098 #define MAX_REORG_PASSES 2
1099
1100 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1101
1102 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1103
1104 static int reorg_pass_number;
1105
1106 static void
1107 note_delay_statistics (slots_filled, index)
1108 int slots_filled, index;
1109 {
1110 num_insns_needing_delays[index][reorg_pass_number]++;
1111 if (slots_filled > MAX_DELAY_HISTOGRAM)
1112 slots_filled = MAX_DELAY_HISTOGRAM;
1113 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1114 }
1115 \f
1116 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1117
1118 /* Optimize the following cases:
1119
1120 1. When a conditional branch skips over only one instruction,
1121 use an annulling branch and put that insn in the delay slot.
1122 Use either a branch that annuls when the condition if true or
1123 invert the test with a branch that annuls when the condition is
1124 false. This saves insns, since otherwise we must copy an insn
1125 from the L1 target.
1126
1127 (orig) (skip) (otherwise)
1128 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1129 insn insn insn2
1130 L1: L1: L1:
1131 insn2 insn2 insn2
1132 insn3 insn3 L1':
1133 insn3
1134
1135 2. When a conditional branch skips over only one instruction,
1136 and after that, it unconditionally branches somewhere else,
1137 perform the similar optimization. This saves executing the
1138 second branch in the case where the inverted condition is true.
1139
1140 Bcc.n L1 Bcc',a L2
1141 insn insn
1142 L1: L1:
1143 Bra L2 Bra L2
1144
1145 INSN is a JUMP_INSN.
1146
1147 This should be expanded to skip over N insns, where N is the number
1148 of delay slots required. */
1149
1150 static rtx
1151 optimize_skip (insn)
1152 register rtx insn;
1153 {
1154 register rtx trial = next_nonnote_insn (insn);
1155 rtx next_trial = next_active_insn (trial);
1156 rtx delay_list = 0;
1157 rtx target_label;
1158 int flags;
1159
1160 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1161
1162 if (trial == 0
1163 || GET_CODE (trial) != INSN
1164 || GET_CODE (PATTERN (trial)) == SEQUENCE
1165 || recog_memoized (trial) < 0
1166 || (! eligible_for_annul_false (insn, 0, trial, flags)
1167 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1168 return 0;
1169
1170 /* There are two cases where we are just executing one insn (we assume
1171 here that a branch requires only one insn; this should be generalized
1172 at some point): Where the branch goes around a single insn or where
1173 we have one insn followed by a branch to the same label we branch to.
1174 In both of these cases, inverting the jump and annulling the delay
1175 slot give the same effect in fewer insns. */
1176 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1177 || (next_trial != 0
1178 && GET_CODE (next_trial) == JUMP_INSN
1179 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1180 && (simplejump_p (next_trial)
1181 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1182 {
1183 if (eligible_for_annul_false (insn, 0, trial, flags))
1184 {
1185 if (invert_jump (insn, JUMP_LABEL (insn)))
1186 INSN_FROM_TARGET_P (trial) = 1;
1187 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1188 return 0;
1189 }
1190
1191 delay_list = add_to_delay_list (trial, NULL_RTX);
1192 next_trial = next_active_insn (trial);
1193 update_block (trial, trial);
1194 delete_insn (trial);
1195
1196 /* Also, if we are targeting an unconditional
1197 branch, thread our jump to the target of that branch. Don't
1198 change this into a RETURN here, because it may not accept what
1199 we have in the delay slot. We'll fix this up later. */
1200 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1201 && (simplejump_p (next_trial)
1202 || GET_CODE (PATTERN (next_trial)) == RETURN))
1203 {
1204 target_label = JUMP_LABEL (next_trial);
1205 if (target_label == 0)
1206 target_label = find_end_label ();
1207
1208 /* Recompute the flags based on TARGET_LABEL since threading
1209 the jump to TARGET_LABEL may change the direction of the
1210 jump (which may change the circumstances in which the
1211 delay slot is nullified). */
1212 flags = get_jump_flags (insn, target_label);
1213 if (eligible_for_annul_true (insn, 0, trial, flags))
1214 reorg_redirect_jump (insn, target_label);
1215 }
1216
1217 INSN_ANNULLED_BRANCH_P (insn) = 1;
1218 }
1219
1220 return delay_list;
1221 }
1222 #endif
1223 \f
1224
1225 /* Encode and return branch direction and prediction information for
1226 INSN assuming it will jump to LABEL.
1227
1228 Non conditional branches return no direction information and
1229 are predicted as very likely taken. */
1230 static int
1231 get_jump_flags (insn, label)
1232 rtx insn, label;
1233 {
1234 int flags;
1235
1236 /* get_jump_flags can be passed any insn with delay slots, these may
1237 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1238 direction information, and only if they are conditional jumps.
1239
1240 If LABEL is zero, then there is no way to determine the branch
1241 direction. */
1242 if (GET_CODE (insn) == JUMP_INSN
1243 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1244 && INSN_UID (insn) <= max_uid
1245 && label != 0
1246 && INSN_UID (label) <= max_uid)
1247 flags
1248 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1249 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1250 /* No valid direction information. */
1251 else
1252 flags = 0;
1253
1254 /* If insn is a conditional branch call mostly_true_jump to get
1255 determine the branch prediction.
1256
1257 Non conditional branches are predicted as very likely taken. */
1258 if (GET_CODE (insn) == JUMP_INSN
1259 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1260 {
1261 int prediction;
1262
1263 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1264 switch (prediction)
1265 {
1266 case 2:
1267 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1268 break;
1269 case 1:
1270 flags |= ATTR_FLAG_likely;
1271 break;
1272 case 0:
1273 flags |= ATTR_FLAG_unlikely;
1274 break;
1275 case -1:
1276 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1277 break;
1278
1279 default:
1280 abort();
1281 }
1282 }
1283 else
1284 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1285
1286 return flags;
1287 }
1288
1289 /* Return 1 if INSN is a destination that will be branched to rarely (the
1290 return point of a function); return 2 if DEST will be branched to very
1291 rarely (a call to a function that doesn't return). Otherwise,
1292 return 0. */
1293
1294 static int
1295 rare_destination (insn)
1296 rtx insn;
1297 {
1298 int jump_count = 0;
1299 rtx next;
1300
1301 for (; insn; insn = next)
1302 {
1303 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1304 insn = XVECEXP (PATTERN (insn), 0, 0);
1305
1306 next = NEXT_INSN (insn);
1307
1308 switch (GET_CODE (insn))
1309 {
1310 case CODE_LABEL:
1311 return 0;
1312 case BARRIER:
1313 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1314 don't scan past JUMP_INSNs, so any barrier we find here must
1315 have been after a CALL_INSN and hence mean the call doesn't
1316 return. */
1317 return 2;
1318 case JUMP_INSN:
1319 if (GET_CODE (PATTERN (insn)) == RETURN)
1320 return 1;
1321 else if (simplejump_p (insn)
1322 && jump_count++ < 10)
1323 next = JUMP_LABEL (insn);
1324 else
1325 return 0;
1326 }
1327 }
1328
1329 /* If we got here it means we hit the end of the function. So this
1330 is an unlikely destination. */
1331
1332 return 1;
1333 }
1334
1335 /* Return truth value of the statement that this branch
1336 is mostly taken. If we think that the branch is extremely likely
1337 to be taken, we return 2. If the branch is slightly more likely to be
1338 taken, return 1. If the branch is slightly less likely to be taken,
1339 return 0 and if the branch is highly unlikely to be taken, return -1.
1340
1341 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1342
1343 static int
1344 mostly_true_jump (jump_insn, condition)
1345 rtx jump_insn, condition;
1346 {
1347 rtx target_label = JUMP_LABEL (jump_insn);
1348 rtx insn;
1349 int rare_dest = rare_destination (target_label);
1350 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1351
1352 /* If this is a branch outside a loop, it is highly unlikely. */
1353 if (GET_CODE (PATTERN (jump_insn)) == SET
1354 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1355 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1356 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1357 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1358 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1359 return -1;
1360
1361 if (target_label)
1362 {
1363 /* If this is the test of a loop, it is very likely true. We scan
1364 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1365 before the next real insn, we assume the branch is to the top of
1366 the loop. */
1367 for (insn = PREV_INSN (target_label);
1368 insn && GET_CODE (insn) == NOTE;
1369 insn = PREV_INSN (insn))
1370 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1371 return 2;
1372
1373 /* If this is a jump to the test of a loop, it is likely true. We scan
1374 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1375 before the next real insn, we assume the branch is to the loop branch
1376 test. */
1377 for (insn = NEXT_INSN (target_label);
1378 insn && GET_CODE (insn) == NOTE;
1379 insn = PREV_INSN (insn))
1380 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1381 return 1;
1382 }
1383
1384 /* Look at the relative rarities of the fallthrough and destination. If
1385 they differ, we can predict the branch that way. */
1386
1387 switch (rare_fallthrough - rare_dest)
1388 {
1389 case -2:
1390 return -1;
1391 case -1:
1392 return 0;
1393 case 0:
1394 break;
1395 case 1:
1396 return 1;
1397 case 2:
1398 return 2;
1399 }
1400
1401 /* If we couldn't figure out what this jump was, assume it won't be
1402 taken. This should be rare. */
1403 if (condition == 0)
1404 return 0;
1405
1406 /* EQ tests are usually false and NE tests are usually true. Also,
1407 most quantities are positive, so we can make the appropriate guesses
1408 about signed comparisons against zero. */
1409 switch (GET_CODE (condition))
1410 {
1411 case CONST_INT:
1412 /* Unconditional branch. */
1413 return 1;
1414 case EQ:
1415 return 0;
1416 case NE:
1417 return 1;
1418 case LE:
1419 case LT:
1420 if (XEXP (condition, 1) == const0_rtx)
1421 return 0;
1422 break;
1423 case GE:
1424 case GT:
1425 if (XEXP (condition, 1) == const0_rtx)
1426 return 1;
1427 break;
1428 }
1429
1430 /* Predict backward branches usually take, forward branches usually not. If
1431 we don't know whether this is forward or backward, assume the branch
1432 will be taken, since most are. */
1433 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1434 || INSN_UID (target_label) > max_uid
1435 || (uid_to_ruid[INSN_UID (jump_insn)]
1436 > uid_to_ruid[INSN_UID (target_label)]));;
1437 }
1438
1439 /* Return the condition under which INSN will branch to TARGET. If TARGET
1440 is zero, return the condition under which INSN will return. If INSN is
1441 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1442 type of jump, or it doesn't go to TARGET, return 0. */
1443
1444 static rtx
1445 get_branch_condition (insn, target)
1446 rtx insn;
1447 rtx target;
1448 {
1449 rtx pat = PATTERN (insn);
1450 rtx src;
1451
1452 if (condjump_in_parallel_p (insn))
1453 pat = XVECEXP (pat, 0, 0);
1454
1455 if (GET_CODE (pat) == RETURN)
1456 return target == 0 ? const_true_rtx : 0;
1457
1458 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1459 return 0;
1460
1461 src = SET_SRC (pat);
1462 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1463 return const_true_rtx;
1464
1465 else if (GET_CODE (src) == IF_THEN_ELSE
1466 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1467 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1468 && XEXP (XEXP (src, 1), 0) == target))
1469 && XEXP (src, 2) == pc_rtx)
1470 return XEXP (src, 0);
1471
1472 else if (GET_CODE (src) == IF_THEN_ELSE
1473 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1474 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1475 && XEXP (XEXP (src, 2), 0) == target))
1476 && XEXP (src, 1) == pc_rtx)
1477 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1478 GET_MODE (XEXP (src, 0)),
1479 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1480
1481 return 0;
1482 }
1483
1484 /* Return non-zero if CONDITION is more strict than the condition of
1485 INSN, i.e., if INSN will always branch if CONDITION is true. */
1486
1487 static int
1488 condition_dominates_p (condition, insn)
1489 rtx condition;
1490 rtx insn;
1491 {
1492 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1493 enum rtx_code code = GET_CODE (condition);
1494 enum rtx_code other_code;
1495
1496 if (rtx_equal_p (condition, other_condition)
1497 || other_condition == const_true_rtx)
1498 return 1;
1499
1500 else if (condition == const_true_rtx || other_condition == 0)
1501 return 0;
1502
1503 other_code = GET_CODE (other_condition);
1504 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1505 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1506 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1507 return 0;
1508
1509 return comparison_dominates_p (code, other_code);
1510 }
1511
1512 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1513 any insns already in the delay slot of JUMP. */
1514
1515 static int
1516 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1517 rtx jump, newlabel, seq;
1518 {
1519 int flags, slots, i;
1520 rtx pat = PATTERN (seq);
1521
1522 /* Make sure all the delay slots of this jump would still
1523 be valid after threading the jump. If they are still
1524 valid, then return non-zero. */
1525
1526 flags = get_jump_flags (jump, newlabel);
1527 for (i = 1; i < XVECLEN (pat, 0); i++)
1528 if (! (
1529 #ifdef ANNUL_IFFALSE_SLOTS
1530 (INSN_ANNULLED_BRANCH_P (jump)
1531 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1532 ? eligible_for_annul_false (jump, i - 1,
1533 XVECEXP (pat, 0, i), flags) :
1534 #endif
1535 #ifdef ANNUL_IFTRUE_SLOTS
1536 (INSN_ANNULLED_BRANCH_P (jump)
1537 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1538 ? eligible_for_annul_true (jump, i - 1,
1539 XVECEXP (pat, 0, i), flags) :
1540 #endif
1541 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1542 break;
1543
1544 return (i == XVECLEN (pat, 0));
1545 }
1546
1547 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1548 any insns we wish to place in the delay slot of JUMP. */
1549
1550 static int
1551 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1552 rtx jump, newlabel, delay_list;
1553 {
1554 int flags, i;
1555 rtx li;
1556
1557 /* Make sure all the insns in DELAY_LIST would still be
1558 valid after threading the jump. If they are still
1559 valid, then return non-zero. */
1560
1561 flags = get_jump_flags (jump, newlabel);
1562 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1563 if (! (
1564 #ifdef ANNUL_IFFALSE_SLOTS
1565 (INSN_ANNULLED_BRANCH_P (jump)
1566 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1567 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1568 #endif
1569 #ifdef ANNUL_IFTRUE_SLOTS
1570 (INSN_ANNULLED_BRANCH_P (jump)
1571 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1572 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1573 #endif
1574 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1575 break;
1576
1577 return (li == NULL);
1578 }
1579
1580 \f
1581 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1582 the condition tested by INSN is CONDITION and the resources shown in
1583 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1584 from SEQ's delay list, in addition to whatever insns it may execute
1585 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1586 needed while searching for delay slot insns. Return the concatenated
1587 delay list if possible, otherwise, return 0.
1588
1589 SLOTS_TO_FILL is the total number of slots required by INSN, and
1590 PSLOTS_FILLED points to the number filled so far (also the number of
1591 insns in DELAY_LIST). It is updated with the number that have been
1592 filled from the SEQUENCE, if any.
1593
1594 PANNUL_P points to a non-zero value if we already know that we need
1595 to annul INSN. If this routine determines that annulling is needed,
1596 it may set that value non-zero.
1597
1598 PNEW_THREAD points to a location that is to receive the place at which
1599 execution should continue. */
1600
1601 static rtx
1602 steal_delay_list_from_target (insn, condition, seq, delay_list,
1603 sets, needed, other_needed,
1604 slots_to_fill, pslots_filled, pannul_p,
1605 pnew_thread)
1606 rtx insn, condition;
1607 rtx seq;
1608 rtx delay_list;
1609 struct resources *sets, *needed, *other_needed;
1610 int slots_to_fill;
1611 int *pslots_filled;
1612 int *pannul_p;
1613 rtx *pnew_thread;
1614 {
1615 rtx temp;
1616 int slots_remaining = slots_to_fill - *pslots_filled;
1617 int total_slots_filled = *pslots_filled;
1618 rtx new_delay_list = 0;
1619 int must_annul = *pannul_p;
1620 int i;
1621
1622 /* We can't do anything if there are more delay slots in SEQ than we
1623 can handle, or if we don't know that it will be a taken branch.
1624 We know that it will be a taken branch if it is either an unconditional
1625 branch or a conditional branch with a stricter branch condition.
1626
1627 Also, exit if the branch has more than one set, since then it is computing
1628 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1629 ??? It may be possible to move other sets into INSN in addition to
1630 moving the instructions in the delay slots. */
1631
1632 if (XVECLEN (seq, 0) - 1 > slots_remaining
1633 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1634 || ! single_set (XVECEXP (seq, 0, 0)))
1635 return delay_list;
1636
1637 for (i = 1; i < XVECLEN (seq, 0); i++)
1638 {
1639 rtx trial = XVECEXP (seq, 0, i);
1640 int flags;
1641
1642 if (insn_references_resource_p (trial, sets, 0)
1643 || insn_sets_resource_p (trial, needed, 0)
1644 || insn_sets_resource_p (trial, sets, 0)
1645 #ifdef HAVE_cc0
1646 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1647 delay list. */
1648 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1649 #endif
1650 /* If TRIAL is from the fallthrough code of an annulled branch insn
1651 in SEQ, we cannot use it. */
1652 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1653 && ! INSN_FROM_TARGET_P (trial)))
1654 return delay_list;
1655
1656 /* If this insn was already done (usually in a previous delay slot),
1657 pretend we put it in our delay slot. */
1658 if (redundant_insn (trial, insn, new_delay_list))
1659 continue;
1660
1661 /* We will end up re-vectoring this branch, so compute flags
1662 based on jumping to the new label. */
1663 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1664
1665 if (! must_annul
1666 && ((condition == const_true_rtx
1667 || (! insn_sets_resource_p (trial, other_needed, 0)
1668 && ! may_trap_p (PATTERN (trial)))))
1669 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1670 : (must_annul = 1,
1671 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1672 {
1673 temp = copy_rtx (trial);
1674 INSN_FROM_TARGET_P (temp) = 1;
1675 new_delay_list = add_to_delay_list (temp, new_delay_list);
1676 total_slots_filled++;
1677
1678 if (--slots_remaining == 0)
1679 break;
1680 }
1681 else
1682 return delay_list;
1683 }
1684
1685 /* Show the place to which we will be branching. */
1686 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1687
1688 /* Add any new insns to the delay list and update the count of the
1689 number of slots filled. */
1690 *pslots_filled = total_slots_filled;
1691 *pannul_p = must_annul;
1692
1693 if (delay_list == 0)
1694 return new_delay_list;
1695
1696 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1697 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1698
1699 return delay_list;
1700 }
1701 \f
1702 /* Similar to steal_delay_list_from_target except that SEQ is on the
1703 fallthrough path of INSN. Here we only do something if the delay insn
1704 of SEQ is an unconditional branch. In that case we steal its delay slot
1705 for INSN since unconditional branches are much easier to fill. */
1706
1707 static rtx
1708 steal_delay_list_from_fallthrough (insn, condition, seq,
1709 delay_list, sets, needed, other_needed,
1710 slots_to_fill, pslots_filled, pannul_p)
1711 rtx insn, condition;
1712 rtx seq;
1713 rtx delay_list;
1714 struct resources *sets, *needed, *other_needed;
1715 int slots_to_fill;
1716 int *pslots_filled;
1717 int *pannul_p;
1718 {
1719 int i;
1720 int flags;
1721
1722 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1723
1724 /* We can't do anything if SEQ's delay insn isn't an
1725 unconditional branch. */
1726
1727 if (! simplejump_p (XVECEXP (seq, 0, 0))
1728 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1729 return delay_list;
1730
1731 for (i = 1; i < XVECLEN (seq, 0); i++)
1732 {
1733 rtx trial = XVECEXP (seq, 0, i);
1734
1735 /* If TRIAL sets CC0, stealing it will move it too far from the use
1736 of CC0. */
1737 if (insn_references_resource_p (trial, sets, 0)
1738 || insn_sets_resource_p (trial, needed, 0)
1739 || insn_sets_resource_p (trial, sets, 0)
1740 #ifdef HAVE_cc0
1741 || sets_cc0_p (PATTERN (trial))
1742 #endif
1743 )
1744
1745 break;
1746
1747 /* If this insn was already done, we don't need it. */
1748 if (redundant_insn (trial, insn, delay_list))
1749 {
1750 delete_from_delay_slot (trial);
1751 continue;
1752 }
1753
1754 if (! *pannul_p
1755 && ((condition == const_true_rtx
1756 || (! insn_sets_resource_p (trial, other_needed, 0)
1757 && ! may_trap_p (PATTERN (trial)))))
1758 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1759 : (*pannul_p = 1,
1760 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1761 {
1762 delete_from_delay_slot (trial);
1763 delay_list = add_to_delay_list (trial, delay_list);
1764
1765 if (++(*pslots_filled) == slots_to_fill)
1766 break;
1767 }
1768 else
1769 break;
1770 }
1771
1772 return delay_list;
1773 }
1774 \f
1775 /* Try merging insns starting at THREAD which match exactly the insns in
1776 INSN's delay list.
1777
1778 If all insns were matched and the insn was previously annulling, the
1779 annul bit will be cleared.
1780
1781 For each insn that is merged, if the branch is or will be non-annulling,
1782 we delete the merged insn. */
1783
1784 static void
1785 try_merge_delay_insns (insn, thread)
1786 rtx insn, thread;
1787 {
1788 rtx trial, next_trial;
1789 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1790 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1791 int slot_number = 1;
1792 int num_slots = XVECLEN (PATTERN (insn), 0);
1793 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1794 struct resources set, needed;
1795 rtx merged_insns = 0;
1796 int i;
1797 int flags;
1798
1799 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1800
1801 CLEAR_RESOURCE (&needed);
1802 CLEAR_RESOURCE (&set);
1803
1804 /* If this is not an annulling branch, take into account anything needed in
1805 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1806 folded into one. If we are annulling, this would be the correct
1807 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1808 will essentially disable this optimization. This method is somewhat of
1809 a kludge, but I don't see a better way.) */
1810 if (! annul_p)
1811 mark_referenced_resources (next_to_match, &needed, 1);
1812
1813 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1814 {
1815 rtx pat = PATTERN (trial);
1816 rtx oldtrial = trial;
1817
1818 next_trial = next_nonnote_insn (trial);
1819
1820 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1821 if (GET_CODE (trial) == INSN
1822 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1823 continue;
1824
1825 if (GET_CODE (next_to_match) == GET_CODE (trial)
1826 #ifdef HAVE_cc0
1827 /* We can't share an insn that sets cc0. */
1828 && ! sets_cc0_p (pat)
1829 #endif
1830 && ! insn_references_resource_p (trial, &set, 1)
1831 && ! insn_sets_resource_p (trial, &set, 1)
1832 && ! insn_sets_resource_p (trial, &needed, 1)
1833 && (trial = try_split (pat, trial, 0)) != 0
1834 /* Update next_trial, in case try_split succeeded. */
1835 && (next_trial = next_nonnote_insn (trial))
1836 /* Likewise THREAD. */
1837 && (thread = oldtrial == thread ? trial : thread)
1838 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1839 /* Have to test this condition if annul condition is different
1840 from (and less restrictive than) non-annulling one. */
1841 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1842 {
1843
1844 if (! annul_p)
1845 {
1846 update_block (trial, thread);
1847 if (trial == thread)
1848 thread = next_active_insn (thread);
1849
1850 delete_insn (trial);
1851 INSN_FROM_TARGET_P (next_to_match) = 0;
1852 }
1853 else
1854 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1855
1856 if (++slot_number == num_slots)
1857 break;
1858
1859 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1860 if (! annul_p)
1861 mark_referenced_resources (next_to_match, &needed, 1);
1862 }
1863
1864 mark_set_resources (trial, &set, 0, 1);
1865 mark_referenced_resources (trial, &needed, 1);
1866 }
1867
1868 /* See if we stopped on a filled insn. If we did, try to see if its
1869 delay slots match. */
1870 if (slot_number != num_slots
1871 && trial && GET_CODE (trial) == INSN
1872 && GET_CODE (PATTERN (trial)) == SEQUENCE
1873 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1874 {
1875 rtx pat = PATTERN (trial);
1876 rtx filled_insn = XVECEXP (pat, 0, 0);
1877
1878 /* Account for resources set/needed by the filled insn. */
1879 mark_set_resources (filled_insn, &set, 0, 1);
1880 mark_referenced_resources (filled_insn, &needed, 1);
1881
1882 for (i = 1; i < XVECLEN (pat, 0); i++)
1883 {
1884 rtx dtrial = XVECEXP (pat, 0, i);
1885
1886 if (! insn_references_resource_p (dtrial, &set, 1)
1887 && ! insn_sets_resource_p (dtrial, &set, 1)
1888 && ! insn_sets_resource_p (dtrial, &needed, 1)
1889 #ifdef HAVE_cc0
1890 && ! sets_cc0_p (PATTERN (dtrial))
1891 #endif
1892 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1893 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1894 {
1895 if (! annul_p)
1896 {
1897 update_block (dtrial, thread);
1898 delete_from_delay_slot (dtrial);
1899 INSN_FROM_TARGET_P (next_to_match) = 0;
1900 }
1901 else
1902 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1903 merged_insns);
1904
1905 if (++slot_number == num_slots)
1906 break;
1907
1908 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1909 }
1910 }
1911 }
1912
1913 /* If all insns in the delay slot have been matched and we were previously
1914 annulling the branch, we need not any more. In that case delete all the
1915 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1916 the delay list so that we know that it isn't only being used at the
1917 target. */
1918 if (slot_number == num_slots && annul_p)
1919 {
1920 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1921 {
1922 if (GET_MODE (merged_insns) == SImode)
1923 {
1924 update_block (XEXP (merged_insns, 0), thread);
1925 delete_from_delay_slot (XEXP (merged_insns, 0));
1926 }
1927 else
1928 {
1929 update_block (XEXP (merged_insns, 0), thread);
1930 delete_insn (XEXP (merged_insns, 0));
1931 }
1932 }
1933
1934 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1935
1936 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1937 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1938 }
1939 }
1940 \f
1941 /* See if INSN is redundant with an insn in front of TARGET. Often this
1942 is called when INSN is a candidate for a delay slot of TARGET.
1943 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1944 of INSN. Often INSN will be redundant with an insn in a delay slot of
1945 some previous insn. This happens when we have a series of branches to the
1946 same label; in that case the first insn at the target might want to go
1947 into each of the delay slots.
1948
1949 If we are not careful, this routine can take up a significant fraction
1950 of the total compilation time (4%), but only wins rarely. Hence we
1951 speed this routine up by making two passes. The first pass goes back
1952 until it hits a label and sees if it find an insn with an identical
1953 pattern. Only in this (relatively rare) event does it check for
1954 data conflicts.
1955
1956 We do not split insns we encounter. This could cause us not to find a
1957 redundant insn, but the cost of splitting seems greater than the possible
1958 gain in rare cases. */
1959
1960 static rtx
1961 redundant_insn (insn, target, delay_list)
1962 rtx insn;
1963 rtx target;
1964 rtx delay_list;
1965 {
1966 rtx target_main = target;
1967 rtx ipat = PATTERN (insn);
1968 rtx trial, pat;
1969 struct resources needed, set;
1970 int i;
1971
1972 /* Scan backwards looking for a match. */
1973 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1974 {
1975 if (GET_CODE (trial) == CODE_LABEL)
1976 return 0;
1977
1978 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1979 continue;
1980
1981 pat = PATTERN (trial);
1982 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1983 continue;
1984
1985 if (GET_CODE (pat) == SEQUENCE)
1986 {
1987 /* Stop for a CALL and its delay slots because it is difficult to
1988 track its resource needs correctly. */
1989 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1990 return 0;
1991
1992 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1993 slots because it is difficult to track its resource needs
1994 correctly. */
1995
1996 #ifdef INSN_SETS_ARE_DELAYED
1997 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1998 return 0;
1999 #endif
2000
2001 #ifdef INSN_REFERENCES_ARE_DELAYED
2002 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2003 return 0;
2004 #endif
2005
2006 /* See if any of the insns in the delay slot match, updating
2007 resource requirements as we go. */
2008 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2009 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2010 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
2011 break;
2012
2013 /* If found a match, exit this loop early. */
2014 if (i > 0)
2015 break;
2016 }
2017
2018 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
2019 break;
2020 }
2021
2022 /* If we didn't find an insn that matches, return 0. */
2023 if (trial == 0)
2024 return 0;
2025
2026 /* See what resources this insn sets and needs. If they overlap, or
2027 if this insn references CC0, it can't be redundant. */
2028
2029 CLEAR_RESOURCE (&needed);
2030 CLEAR_RESOURCE (&set);
2031 mark_set_resources (insn, &set, 0, 1);
2032 mark_referenced_resources (insn, &needed, 1);
2033
2034 /* If TARGET is a SEQUENCE, get the main insn. */
2035 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2036 target_main = XVECEXP (PATTERN (target), 0, 0);
2037
2038 if (resource_conflicts_p (&needed, &set)
2039 #ifdef HAVE_cc0
2040 || reg_mentioned_p (cc0_rtx, ipat)
2041 #endif
2042 /* The insn requiring the delay may not set anything needed or set by
2043 INSN. */
2044 || insn_sets_resource_p (target_main, &needed, 1)
2045 || insn_sets_resource_p (target_main, &set, 1))
2046 return 0;
2047
2048 /* Insns we pass may not set either NEEDED or SET, so merge them for
2049 simpler tests. */
2050 needed.memory |= set.memory;
2051 needed.unch_memory |= set.unch_memory;
2052 IOR_HARD_REG_SET (needed.regs, set.regs);
2053
2054 /* This insn isn't redundant if it conflicts with an insn that either is
2055 or will be in a delay slot of TARGET. */
2056
2057 while (delay_list)
2058 {
2059 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2060 return 0;
2061 delay_list = XEXP (delay_list, 1);
2062 }
2063
2064 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2065 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2066 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2067 return 0;
2068
2069 /* Scan backwards until we reach a label or an insn that uses something
2070 INSN sets or sets something insn uses or sets. */
2071
2072 for (trial = PREV_INSN (target);
2073 trial && GET_CODE (trial) != CODE_LABEL;
2074 trial = PREV_INSN (trial))
2075 {
2076 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2077 && GET_CODE (trial) != JUMP_INSN)
2078 continue;
2079
2080 pat = PATTERN (trial);
2081 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2082 continue;
2083
2084 if (GET_CODE (pat) == SEQUENCE)
2085 {
2086 /* If this is a CALL_INSN and its delay slots, it is hard to track
2087 the resource needs properly, so give up. */
2088 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2089 return 0;
2090
2091 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2092 is hard to track the resource needs properly, so give up. */
2093
2094 #ifdef INSN_SETS_ARE_DELAYED
2095 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2096 return 0;
2097 #endif
2098
2099 #ifdef INSN_REFERENCES_ARE_DELAYED
2100 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2101 return 0;
2102 #endif
2103
2104 /* See if any of the insns in the delay slot match, updating
2105 resource requirements as we go. */
2106 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2107 {
2108 rtx candidate = XVECEXP (pat, 0, i);
2109
2110 /* If an insn will be annulled if the branch is false, it isn't
2111 considered as a possible duplicate insn. */
2112 if (rtx_equal_p (PATTERN (candidate), ipat)
2113 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2114 && INSN_FROM_TARGET_P (candidate)))
2115 {
2116 /* Show that this insn will be used in the sequel. */
2117 INSN_FROM_TARGET_P (candidate) = 0;
2118 return candidate;
2119 }
2120
2121 /* Unless this is an annulled insn from the target of a branch,
2122 we must stop if it sets anything needed or set by INSN. */
2123 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2124 || ! INSN_FROM_TARGET_P (candidate))
2125 && insn_sets_resource_p (candidate, &needed, 1))
2126 return 0;
2127 }
2128
2129
2130 /* If the insn requiring the delay slot conflicts with INSN, we
2131 must stop. */
2132 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2133 return 0;
2134 }
2135 else
2136 {
2137 /* See if TRIAL is the same as INSN. */
2138 pat = PATTERN (trial);
2139 if (rtx_equal_p (pat, ipat))
2140 return trial;
2141
2142 /* Can't go any further if TRIAL conflicts with INSN. */
2143 if (insn_sets_resource_p (trial, &needed, 1))
2144 return 0;
2145 }
2146 }
2147
2148 return 0;
2149 }
2150 \f
2151 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2152 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2153 is non-zero, we are allowed to fall into this thread; otherwise, we are
2154 not.
2155
2156 If LABEL is used more than one or we pass a label other than LABEL before
2157 finding an active insn, we do not own this thread. */
2158
2159 static int
2160 own_thread_p (thread, label, allow_fallthrough)
2161 rtx thread;
2162 rtx label;
2163 int allow_fallthrough;
2164 {
2165 rtx active_insn;
2166 rtx insn;
2167
2168 /* We don't own the function end. */
2169 if (thread == 0)
2170 return 0;
2171
2172 /* Get the first active insn, or THREAD, if it is an active insn. */
2173 active_insn = next_active_insn (PREV_INSN (thread));
2174
2175 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2176 if (GET_CODE (insn) == CODE_LABEL
2177 && (insn != label || LABEL_NUSES (insn) != 1))
2178 return 0;
2179
2180 if (allow_fallthrough)
2181 return 1;
2182
2183 /* Ensure that we reach a BARRIER before any insn or label. */
2184 for (insn = prev_nonnote_insn (thread);
2185 insn == 0 || GET_CODE (insn) != BARRIER;
2186 insn = prev_nonnote_insn (insn))
2187 if (insn == 0
2188 || GET_CODE (insn) == CODE_LABEL
2189 || (GET_CODE (insn) == INSN
2190 && GET_CODE (PATTERN (insn)) != USE
2191 && GET_CODE (PATTERN (insn)) != CLOBBER))
2192 return 0;
2193
2194 return 1;
2195 }
2196 \f
2197 /* Find the number of the basic block that starts closest to INSN. Return -1
2198 if we couldn't find such a basic block. */
2199
2200 static int
2201 find_basic_block (insn)
2202 rtx insn;
2203 {
2204 int i;
2205
2206 /* Scan backwards to the previous BARRIER. Then see if we can find a
2207 label that starts a basic block. Return the basic block number. */
2208
2209 for (insn = prev_nonnote_insn (insn);
2210 insn && GET_CODE (insn) != BARRIER;
2211 insn = prev_nonnote_insn (insn))
2212 ;
2213
2214 /* The start of the function is basic block zero. */
2215 if (insn == 0)
2216 return 0;
2217
2218 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2219 anything other than a CODE_LABEL or note, we can't find this code. */
2220 for (insn = next_nonnote_insn (insn);
2221 insn && GET_CODE (insn) == CODE_LABEL;
2222 insn = next_nonnote_insn (insn))
2223 {
2224 for (i = 0; i < n_basic_blocks; i++)
2225 if (insn == basic_block_head[i])
2226 return i;
2227 }
2228
2229 return -1;
2230 }
2231 \f
2232 /* Called when INSN is being moved from a location near the target of a jump.
2233 We leave a marker of the form (use (INSN)) immediately in front
2234 of WHERE for mark_target_live_regs. These markers will be deleted when
2235 reorg finishes.
2236
2237 We used to try to update the live status of registers if WHERE is at
2238 the start of a basic block, but that can't work since we may remove a
2239 BARRIER in relax_delay_slots. */
2240
2241 static void
2242 update_block (insn, where)
2243 rtx insn;
2244 rtx where;
2245 {
2246 int b;
2247
2248 /* Ignore if this was in a delay slot and it came from the target of
2249 a branch. */
2250 if (INSN_FROM_TARGET_P (insn))
2251 return;
2252
2253 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2254
2255 /* INSN might be making a value live in a block where it didn't use to
2256 be. So recompute liveness information for this block. */
2257
2258 b = find_basic_block (insn);
2259 if (b != -1)
2260 bb_ticks[b]++;
2261 }
2262
2263 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2264 the basic block containing the jump. */
2265
2266 static int
2267 reorg_redirect_jump (jump, nlabel)
2268 rtx jump;
2269 rtx nlabel;
2270 {
2271 int b = find_basic_block (jump);
2272
2273 if (b != -1)
2274 bb_ticks[b]++;
2275
2276 return redirect_jump (jump, nlabel);
2277 }
2278
2279 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2280 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2281 that reference values used in INSN. If we find one, then we move the
2282 REG_DEAD note to INSN.
2283
2284 This is needed to handle the case where an later insn (after INSN) has a
2285 REG_DEAD note for a register used by INSN, and this later insn subsequently
2286 gets moved before a CODE_LABEL because it is a redundant insn. In this
2287 case, mark_target_live_regs may be confused into thinking the register
2288 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2289
2290 static void
2291 update_reg_dead_notes (insn, delayed_insn)
2292 rtx insn, delayed_insn;
2293 {
2294 rtx p, link, next;
2295
2296 for (p = next_nonnote_insn (insn); p != delayed_insn;
2297 p = next_nonnote_insn (p))
2298 for (link = REG_NOTES (p); link; link = next)
2299 {
2300 next = XEXP (link, 1);
2301
2302 if (REG_NOTE_KIND (link) != REG_DEAD
2303 || GET_CODE (XEXP (link, 0)) != REG)
2304 continue;
2305
2306 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2307 {
2308 /* Move the REG_DEAD note from P to INSN. */
2309 remove_note (p, link);
2310 XEXP (link, 1) = REG_NOTES (insn);
2311 REG_NOTES (insn) = link;
2312 }
2313 }
2314 }
2315
2316 /* Called when an insn redundant with start_insn is deleted. If there
2317 is a REG_DEAD note for the target of start_insn between start_insn
2318 and stop_insn, then the REG_DEAD note needs to be deleted since the
2319 value no longer dies there.
2320
2321 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2322 confused into thinking the register is dead. */
2323
2324 static void
2325 fix_reg_dead_note (start_insn, stop_insn)
2326 rtx start_insn, stop_insn;
2327 {
2328 rtx p, link, next;
2329
2330 for (p = next_nonnote_insn (start_insn); p != stop_insn;
2331 p = next_nonnote_insn (p))
2332 for (link = REG_NOTES (p); link; link = next)
2333 {
2334 next = XEXP (link, 1);
2335
2336 if (REG_NOTE_KIND (link) != REG_DEAD
2337 || GET_CODE (XEXP (link, 0)) != REG)
2338 continue;
2339
2340 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2341 {
2342 remove_note (p, link);
2343 return;
2344 }
2345 }
2346 }
2347
2348 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2349
2350 This handles the case of udivmodXi4 instructions which optimize their
2351 output depending on whether any REG_UNUSED notes are present.
2352 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2353 does. */
2354
2355 static void
2356 update_reg_unused_notes (insn, redundant_insn)
2357 rtx insn, redundant_insn;
2358 {
2359 rtx p, link, next;
2360
2361 for (link = REG_NOTES (insn); link; link = next)
2362 {
2363 next = XEXP (link, 1);
2364
2365 if (REG_NOTE_KIND (link) != REG_UNUSED
2366 || GET_CODE (XEXP (link, 0)) != REG)
2367 continue;
2368
2369 if (! find_regno_note (redundant_insn, REG_UNUSED,
2370 REGNO (XEXP (link, 0))))
2371 remove_note (insn, link);
2372 }
2373 }
2374 \f
2375 /* Marks registers possibly live at the current place being scanned by
2376 mark_target_live_regs. Used only by next two function. */
2377
2378 static HARD_REG_SET current_live_regs;
2379
2380 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2381 Also only used by the next two functions. */
2382
2383 static HARD_REG_SET pending_dead_regs;
2384
2385 /* Utility function called from mark_target_live_regs via note_stores.
2386 It deadens any CLOBBERed registers and livens any SET registers. */
2387
2388 static void
2389 update_live_status (dest, x)
2390 rtx dest;
2391 rtx x;
2392 {
2393 int first_regno, last_regno;
2394 int i;
2395
2396 if (GET_CODE (dest) != REG
2397 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2398 return;
2399
2400 if (GET_CODE (dest) == SUBREG)
2401 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2402 else
2403 first_regno = REGNO (dest);
2404
2405 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2406
2407 if (GET_CODE (x) == CLOBBER)
2408 for (i = first_regno; i < last_regno; i++)
2409 CLEAR_HARD_REG_BIT (current_live_regs, i);
2410 else
2411 for (i = first_regno; i < last_regno; i++)
2412 {
2413 SET_HARD_REG_BIT (current_live_regs, i);
2414 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2415 }
2416 }
2417
2418 /* Similar to next_insn, but ignores insns in the delay slots of
2419 an annulled branch. */
2420
2421 static rtx
2422 next_insn_no_annul (insn)
2423 rtx insn;
2424 {
2425 if (insn)
2426 {
2427 /* If INSN is an annulled branch, skip any insns from the target
2428 of the branch. */
2429 if (INSN_ANNULLED_BRANCH_P (insn)
2430 && NEXT_INSN (PREV_INSN (insn)) != insn)
2431 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2432 insn = NEXT_INSN (insn);
2433
2434 insn = NEXT_INSN (insn);
2435 if (insn && GET_CODE (insn) == INSN
2436 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2437 insn = XVECEXP (PATTERN (insn), 0, 0);
2438 }
2439
2440 return insn;
2441 }
2442 \f
2443 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2444 looking for registers that are set before they are used. These are dead.
2445 Stop after passing a few conditional jumps, and/or a small
2446 number of unconditional branches. */
2447
2448 static rtx
2449 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
2450 rtx target;
2451 struct resources *res;
2452 rtx *jump_target;
2453 int jump_count;
2454 struct resources set, needed;
2455 {
2456 HARD_REG_SET scratch;
2457 rtx insn, next;
2458 rtx jump_insn = 0;
2459 int i;
2460
2461 for (insn = target; insn; insn = next)
2462 {
2463 rtx this_jump_insn = insn;
2464
2465 next = NEXT_INSN (insn);
2466 switch (GET_CODE (insn))
2467 {
2468 case CODE_LABEL:
2469 /* After a label, any pending dead registers that weren't yet
2470 used can be made dead. */
2471 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2472 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2473 CLEAR_HARD_REG_SET (pending_dead_regs);
2474
2475 if (CODE_LABEL_NUMBER (insn) < max_label_num_after_reload)
2476 {
2477 /* All spill registers are dead at a label, so kill all of the
2478 ones that aren't needed also. */
2479 COPY_HARD_REG_SET (scratch, used_spill_regs);
2480 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2481 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2482 }
2483 continue;
2484
2485 case BARRIER:
2486 case NOTE:
2487 continue;
2488
2489 case INSN:
2490 if (GET_CODE (PATTERN (insn)) == USE)
2491 {
2492 /* If INSN is a USE made by update_block, we care about the
2493 underlying insn. Any registers set by the underlying insn
2494 are live since the insn is being done somewhere else. */
2495 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2496 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2497
2498 /* All other USE insns are to be ignored. */
2499 continue;
2500 }
2501 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2502 continue;
2503 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2504 {
2505 /* An unconditional jump can be used to fill the delay slot
2506 of a call, so search for a JUMP_INSN in any position. */
2507 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2508 {
2509 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2510 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2511 break;
2512 }
2513 }
2514 }
2515
2516 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2517 {
2518 if (jump_count++ < 10)
2519 {
2520 if (simplejump_p (this_jump_insn)
2521 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
2522 {
2523 next = JUMP_LABEL (this_jump_insn);
2524 if (jump_insn == 0)
2525 {
2526 jump_insn = insn;
2527 if (jump_target)
2528 *jump_target = JUMP_LABEL (this_jump_insn);
2529 }
2530 }
2531 else if (condjump_p (this_jump_insn)
2532 || condjump_in_parallel_p (this_jump_insn))
2533 {
2534 struct resources target_set, target_res;
2535 struct resources fallthrough_res;
2536
2537 /* We can handle conditional branches here by following
2538 both paths, and then IOR the results of the two paths
2539 together, which will give us registers that are dead
2540 on both paths. Since this is expensive, we give it
2541 a much higher cost than unconditional branches. The
2542 cost was chosen so that we will follow at most 1
2543 conditional branch. */
2544
2545 jump_count += 4;
2546 if (jump_count >= 10)
2547 break;
2548
2549 mark_referenced_resources (insn, &needed, 1);
2550
2551 /* For an annulled branch, mark_set_resources ignores slots
2552 filled by instructions from the target. This is correct
2553 if the branch is not taken. Since we are following both
2554 paths from the branch, we must also compute correct info
2555 if the branch is taken. We do this by inverting all of
2556 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2557 and then inverting the INSN_FROM_TARGET_P bits again. */
2558
2559 if (GET_CODE (PATTERN (insn)) == SEQUENCE
2560 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
2561 {
2562 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2563 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2564 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2565
2566 target_set = set;
2567 mark_set_resources (insn, &target_set, 0, 1);
2568
2569 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2570 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2571 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2572
2573 mark_set_resources (insn, &set, 0, 1);
2574 }
2575 else
2576 {
2577 mark_set_resources (insn, &set, 0, 1);
2578 target_set = set;
2579 }
2580
2581 target_res = *res;
2582 COPY_HARD_REG_SET (scratch, target_set.regs);
2583 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2584 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
2585
2586 fallthrough_res = *res;
2587 COPY_HARD_REG_SET (scratch, set.regs);
2588 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2589 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
2590
2591 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
2592 &target_res, 0, jump_count,
2593 target_set, needed);
2594 find_dead_or_set_registers (next,
2595 &fallthrough_res, 0, jump_count,
2596 set, needed);
2597 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
2598 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
2599 break;
2600 }
2601 else
2602 break;
2603 }
2604 else
2605 {
2606 /* Don't try this optimization if we expired our jump count
2607 above, since that would mean there may be an infinite loop
2608 in the function being compiled. */
2609 jump_insn = 0;
2610 break;
2611 }
2612 }
2613
2614 mark_referenced_resources (insn, &needed, 1);
2615 mark_set_resources (insn, &set, 0, 1);
2616
2617 COPY_HARD_REG_SET (scratch, set.regs);
2618 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2619 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2620 }
2621
2622 return jump_insn;
2623 }
2624
2625 /* Set the resources that are live at TARGET.
2626
2627 If TARGET is zero, we refer to the end of the current function and can
2628 return our precomputed value.
2629
2630 Otherwise, we try to find out what is live by consulting the basic block
2631 information. This is tricky, because we must consider the actions of
2632 reload and jump optimization, which occur after the basic block information
2633 has been computed.
2634
2635 Accordingly, we proceed as follows::
2636
2637 We find the previous BARRIER and look at all immediately following labels
2638 (with no intervening active insns) to see if any of them start a basic
2639 block. If we hit the start of the function first, we use block 0.
2640
2641 Once we have found a basic block and a corresponding first insns, we can
2642 accurately compute the live status from basic_block_live_regs and
2643 reg_renumber. (By starting at a label following a BARRIER, we are immune
2644 to actions taken by reload and jump.) Then we scan all insns between
2645 that point and our target. For each CLOBBER (or for call-clobbered regs
2646 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2647 a SET, mark them as live.
2648
2649 We have to be careful when using REG_DEAD notes because they are not
2650 updated by such things as find_equiv_reg. So keep track of registers
2651 marked as dead that haven't been assigned to, and mark them dead at the
2652 next CODE_LABEL since reload and jump won't propagate values across labels.
2653
2654 If we cannot find the start of a basic block (should be a very rare
2655 case, if it can happen at all), mark everything as potentially live.
2656
2657 Next, scan forward from TARGET looking for things set or clobbered
2658 before they are used. These are not live.
2659
2660 Because we can be called many times on the same target, save our results
2661 in a hash table indexed by INSN_UID. */
2662
2663 static void
2664 mark_target_live_regs (target, res)
2665 rtx target;
2666 struct resources *res;
2667 {
2668 int b = -1;
2669 int i;
2670 struct target_info *tinfo;
2671 rtx insn, next;
2672 rtx jump_insn = 0;
2673 rtx jump_target;
2674 HARD_REG_SET scratch;
2675 struct resources set, needed;
2676 int jump_count = 0;
2677
2678 /* Handle end of function. */
2679 if (target == 0)
2680 {
2681 *res = end_of_function_needs;
2682 return;
2683 }
2684
2685 /* We have to assume memory is needed, but the CC isn't. */
2686 res->memory = 1;
2687 res->volatil = res->unch_memory = 0;
2688 res->cc = 0;
2689
2690 /* See if we have computed this value already. */
2691 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2692 tinfo; tinfo = tinfo->next)
2693 if (tinfo->uid == INSN_UID (target))
2694 break;
2695
2696 /* Start by getting the basic block number. If we have saved information,
2697 we can get it from there unless the insn at the start of the basic block
2698 has been deleted. */
2699 if (tinfo && tinfo->block != -1
2700 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2701 b = tinfo->block;
2702
2703 if (b == -1)
2704 b = find_basic_block (target);
2705
2706 if (tinfo)
2707 {
2708 /* If the information is up-to-date, use it. Otherwise, we will
2709 update it below. */
2710 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2711 {
2712 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2713 return;
2714 }
2715 }
2716 else
2717 {
2718 /* Allocate a place to put our results and chain it into the
2719 hash table. */
2720 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2721 tinfo->uid = INSN_UID (target);
2722 tinfo->block = b;
2723 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2724 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2725 }
2726
2727 CLEAR_HARD_REG_SET (pending_dead_regs);
2728
2729 /* If we found a basic block, get the live registers from it and update
2730 them with anything set or killed between its start and the insn before
2731 TARGET. Otherwise, we must assume everything is live. */
2732 if (b != -1)
2733 {
2734 regset regs_live = basic_block_live_at_start[b];
2735 int offset, j;
2736 REGSET_ELT_TYPE bit;
2737 int regno;
2738 rtx start_insn, stop_insn;
2739
2740 /* Compute hard regs live at start of block -- this is the real hard regs
2741 marked live, plus live pseudo regs that have been renumbered to
2742 hard regs. */
2743
2744 #ifdef HARD_REG_SET
2745 current_live_regs = *regs_live;
2746 #else
2747 COPY_HARD_REG_SET (current_live_regs, regs_live);
2748 #endif
2749
2750 for (offset = 0, i = 0; offset < regset_size; offset++)
2751 {
2752 if (regs_live[offset] == 0)
2753 i += REGSET_ELT_BITS;
2754 else
2755 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2756 if ((regs_live[offset] & bit)
2757 && (regno = reg_renumber[i]) >= 0)
2758 for (j = regno;
2759 j < regno + HARD_REGNO_NREGS (regno,
2760 PSEUDO_REGNO_MODE (i));
2761 j++)
2762 SET_HARD_REG_BIT (current_live_regs, j);
2763 }
2764
2765 /* Get starting and ending insn, handling the case where each might
2766 be a SEQUENCE. */
2767 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2768 stop_insn = target;
2769
2770 if (GET_CODE (start_insn) == INSN
2771 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2772 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2773
2774 if (GET_CODE (stop_insn) == INSN
2775 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2776 stop_insn = next_insn (PREV_INSN (stop_insn));
2777
2778 for (insn = start_insn; insn != stop_insn;
2779 insn = next_insn_no_annul (insn))
2780 {
2781 rtx link;
2782 rtx real_insn = insn;
2783
2784 /* If this insn is from the target of a branch, it isn't going to
2785 be used in the sequel. If it is used in both cases, this
2786 test will not be true. */
2787 if (INSN_FROM_TARGET_P (insn))
2788 continue;
2789
2790 /* If this insn is a USE made by update_block, we care about the
2791 underlying insn. */
2792 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2793 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2794 real_insn = XEXP (PATTERN (insn), 0);
2795
2796 if (GET_CODE (real_insn) == CALL_INSN)
2797 {
2798 /* CALL clobbers all call-used regs that aren't fixed except
2799 sp, ap, and fp. Do this before setting the result of the
2800 call live. */
2801 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2802 if (call_used_regs[i]
2803 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2804 && i != ARG_POINTER_REGNUM
2805 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2806 && i != HARD_FRAME_POINTER_REGNUM
2807 #endif
2808 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2809 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2810 #endif
2811 #ifdef PIC_OFFSET_TABLE_REGNUM
2812 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2813 #endif
2814 )
2815 CLEAR_HARD_REG_BIT (current_live_regs, i);
2816
2817 /* A CALL_INSN sets any global register live, since it may
2818 have been modified by the call. */
2819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2820 if (global_regs[i])
2821 SET_HARD_REG_BIT (current_live_regs, i);
2822 }
2823
2824 /* Mark anything killed in an insn to be deadened at the next
2825 label. Ignore USE insns; the only REG_DEAD notes will be for
2826 parameters. But they might be early. A CALL_INSN will usually
2827 clobber registers used for parameters. It isn't worth bothering
2828 with the unlikely case when it won't. */
2829 if ((GET_CODE (real_insn) == INSN
2830 && GET_CODE (PATTERN (real_insn)) != USE
2831 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2832 || GET_CODE (real_insn) == JUMP_INSN
2833 || GET_CODE (real_insn) == CALL_INSN)
2834 {
2835 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2836 if (REG_NOTE_KIND (link) == REG_DEAD
2837 && GET_CODE (XEXP (link, 0)) == REG
2838 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2839 {
2840 int first_regno = REGNO (XEXP (link, 0));
2841 int last_regno
2842 = (first_regno
2843 + HARD_REGNO_NREGS (first_regno,
2844 GET_MODE (XEXP (link, 0))));
2845
2846 for (i = first_regno; i < last_regno; i++)
2847 SET_HARD_REG_BIT (pending_dead_regs, i);
2848 }
2849
2850 note_stores (PATTERN (real_insn), update_live_status);
2851
2852 /* If any registers were unused after this insn, kill them.
2853 These notes will always be accurate. */
2854 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2855 if (REG_NOTE_KIND (link) == REG_UNUSED
2856 && GET_CODE (XEXP (link, 0)) == REG
2857 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2858 {
2859 int first_regno = REGNO (XEXP (link, 0));
2860 int last_regno
2861 = (first_regno
2862 + HARD_REGNO_NREGS (first_regno,
2863 GET_MODE (XEXP (link, 0))));
2864
2865 for (i = first_regno; i < last_regno; i++)
2866 CLEAR_HARD_REG_BIT (current_live_regs, i);
2867 }
2868 }
2869
2870 else if (GET_CODE (real_insn) == CODE_LABEL)
2871 {
2872 /* A label clobbers the pending dead registers since neither
2873 reload nor jump will propagate a value across a label. */
2874 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2875 CLEAR_HARD_REG_SET (pending_dead_regs);
2876 }
2877
2878 /* The beginning of the epilogue corresponds to the end of the
2879 RTL chain when there are no epilogue insns. Certain resources
2880 are implicitly required at that point. */
2881 else if (GET_CODE (real_insn) == NOTE
2882 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2883 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2884 }
2885
2886 COPY_HARD_REG_SET (res->regs, current_live_regs);
2887 tinfo->block = b;
2888 tinfo->bb_tick = bb_ticks[b];
2889 }
2890 else
2891 /* We didn't find the start of a basic block. Assume everything
2892 in use. This should happen only extremely rarely. */
2893 SET_HARD_REG_SET (res->regs);
2894
2895 CLEAR_RESOURCE (&set);
2896 CLEAR_RESOURCE (&needed);
2897
2898 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
2899 set, needed);
2900
2901 /* If we hit an unconditional branch, we have another way of finding out
2902 what is live: we can see what is live at the branch target and include
2903 anything used but not set before the branch. The only things that are
2904 live are those that are live using the above test and the test below. */
2905
2906 if (jump_insn)
2907 {
2908 struct resources new_resources;
2909 rtx stop_insn = next_active_insn (jump_insn);
2910
2911 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2912 CLEAR_RESOURCE (&set);
2913 CLEAR_RESOURCE (&needed);
2914
2915 /* Include JUMP_INSN in the needed registers. */
2916 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2917 {
2918 mark_referenced_resources (insn, &needed, 1);
2919
2920 COPY_HARD_REG_SET (scratch, needed.regs);
2921 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2922 IOR_HARD_REG_SET (new_resources.regs, scratch);
2923
2924 mark_set_resources (insn, &set, 0, 1);
2925 }
2926
2927 AND_HARD_REG_SET (res->regs, new_resources.regs);
2928 }
2929
2930 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2931 }
2932 \f
2933 /* Scan a function looking for insns that need a delay slot and find insns to
2934 put into the delay slot.
2935
2936 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2937 as calls). We do these first since we don't want jump insns (that are
2938 easier to fill) to get the only insns that could be used for non-jump insns.
2939 When it is zero, only try to fill JUMP_INSNs.
2940
2941 When slots are filled in this manner, the insns (including the
2942 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2943 it is possible to tell whether a delay slot has really been filled
2944 or not. `final' knows how to deal with this, by communicating
2945 through FINAL_SEQUENCE. */
2946
2947 static void
2948 fill_simple_delay_slots (first, non_jumps_p)
2949 rtx first;
2950 int non_jumps_p;
2951 {
2952 register rtx insn, pat, trial, next_trial;
2953 register int i, j;
2954 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2955 struct resources needed, set;
2956 int slots_to_fill, slots_filled;
2957 rtx delay_list;
2958
2959 for (i = 0; i < num_unfilled_slots; i++)
2960 {
2961 int flags;
2962 /* Get the next insn to fill. If it has already had any slots assigned,
2963 we can't do anything with it. Maybe we'll improve this later. */
2964
2965 insn = unfilled_slots_base[i];
2966 if (insn == 0
2967 || INSN_DELETED_P (insn)
2968 || (GET_CODE (insn) == INSN
2969 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2970 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2971 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2972 continue;
2973
2974 if (GET_CODE (insn) == JUMP_INSN)
2975 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2976 else
2977 flags = get_jump_flags (insn, NULL_RTX);
2978 slots_to_fill = num_delay_slots (insn);
2979 if (slots_to_fill == 0)
2980 abort ();
2981
2982 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2983 says how many. After initialization, first try optimizing
2984
2985 call _foo call _foo
2986 nop add %o7,.-L1,%o7
2987 b,a L1
2988 nop
2989
2990 If this case applies, the delay slot of the call is filled with
2991 the unconditional jump. This is done first to avoid having the
2992 delay slot of the call filled in the backward scan. Also, since
2993 the unconditional jump is likely to also have a delay slot, that
2994 insn must exist when it is subsequently scanned.
2995
2996 This is tried on each insn with delay slots as some machines
2997 have insns which perform calls, but are not represented as
2998 CALL_INSNs. */
2999
3000 slots_filled = 0;
3001 delay_list = 0;
3002
3003 if ((trial = next_active_insn (insn))
3004 && GET_CODE (trial) == JUMP_INSN
3005 && simplejump_p (trial)
3006 && eligible_for_delay (insn, slots_filled, trial, flags)
3007 && no_labels_between_p (insn, trial))
3008 {
3009 rtx *tmp;
3010 slots_filled++;
3011 delay_list = add_to_delay_list (trial, delay_list);
3012
3013 /* TRIAL may have had its delay slot filled, then unfilled. When
3014 the delay slot is unfilled, TRIAL is placed back on the unfilled
3015 slots obstack. Unfortunately, it is placed on the end of the
3016 obstack, not in its original location. Therefore, we must search
3017 from entry i + 1 to the end of the unfilled slots obstack to
3018 try and find TRIAL. */
3019 tmp = &unfilled_slots_base[i + 1];
3020 while (*tmp != trial && tmp != unfilled_slots_next)
3021 tmp++;
3022
3023 /* Remove the unconditional jump from consideration for delay slot
3024 filling and unthread it. */
3025 if (*tmp == trial)
3026 *tmp = 0;
3027 {
3028 rtx next = NEXT_INSN (trial);
3029 rtx prev = PREV_INSN (trial);
3030 if (prev)
3031 NEXT_INSN (prev) = next;
3032 if (next)
3033 PREV_INSN (next) = prev;
3034 }
3035 }
3036
3037 /* Now, scan backwards from the insn to search for a potential
3038 delay-slot candidate. Stop searching when a label or jump is hit.
3039
3040 For each candidate, if it is to go into the delay slot (moved
3041 forward in execution sequence), it must not need or set any resources
3042 that were set by later insns and must not set any resources that
3043 are needed for those insns.
3044
3045 The delay slot insn itself sets resources unless it is a call
3046 (in which case the called routine, not the insn itself, is doing
3047 the setting). */
3048
3049 if (slots_filled < slots_to_fill)
3050 {
3051 CLEAR_RESOURCE (&needed);
3052 CLEAR_RESOURCE (&set);
3053 mark_set_resources (insn, &set, 0, 0);
3054 mark_referenced_resources (insn, &needed, 0);
3055
3056 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
3057 trial = next_trial)
3058 {
3059 next_trial = prev_nonnote_insn (trial);
3060
3061 /* This must be an INSN or CALL_INSN. */
3062 pat = PATTERN (trial);
3063
3064 /* USE and CLOBBER at this level was just for flow; ignore it. */
3065 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3066 continue;
3067
3068 /* Check for resource conflict first, to avoid unnecessary
3069 splitting. */
3070 if (! insn_references_resource_p (trial, &set, 1)
3071 && ! insn_sets_resource_p (trial, &set, 1)
3072 && ! insn_sets_resource_p (trial, &needed, 1)
3073 #ifdef HAVE_cc0
3074 /* Can't separate set of cc0 from its use. */
3075 && ! (reg_mentioned_p (cc0_rtx, pat)
3076 && ! sets_cc0_p (cc0_rtx, pat))
3077 #endif
3078 )
3079 {
3080 trial = try_split (pat, trial, 1);
3081 next_trial = prev_nonnote_insn (trial);
3082 if (eligible_for_delay (insn, slots_filled, trial, flags))
3083 {
3084 /* In this case, we are searching backward, so if we
3085 find insns to put on the delay list, we want
3086 to put them at the head, rather than the
3087 tail, of the list. */
3088
3089 update_reg_dead_notes (trial, insn);
3090 delay_list = gen_rtx (INSN_LIST, VOIDmode,
3091 trial, delay_list);
3092 update_block (trial, trial);
3093 delete_insn (trial);
3094 if (slots_to_fill == ++slots_filled)
3095 break;
3096 continue;
3097 }
3098 }
3099
3100 mark_set_resources (trial, &set, 0, 1);
3101 mark_referenced_resources (trial, &needed, 1);
3102 }
3103 }
3104
3105 /* If all needed slots haven't been filled, we come here. */
3106
3107 /* Try to optimize case of jumping around a single insn. */
3108 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3109 if (slots_filled != slots_to_fill
3110 && delay_list == 0
3111 && GET_CODE (insn) == JUMP_INSN
3112 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
3113 {
3114 delay_list = optimize_skip (insn);
3115 if (delay_list)
3116 slots_filled += 1;
3117 }
3118 #endif
3119
3120 /* Try to get insns from beyond the insn needing the delay slot.
3121 These insns can neither set or reference resources set in insns being
3122 skipped, cannot set resources in the insn being skipped, and, if this
3123 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3124 call might not return).
3125
3126 There used to be code which continued past the target label if
3127 we saw all uses of the target label. This code did not work,
3128 because it failed to account for some instructions which were
3129 both annulled and marked as from the target. This can happen as a
3130 result of optimize_skip. Since this code was redundant with
3131 fill_eager_delay_slots anyways, it was just deleted. */
3132
3133 if (slots_filled != slots_to_fill
3134 && (GET_CODE (insn) != JUMP_INSN
3135 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
3136 && ! simplejump_p (insn)
3137 && JUMP_LABEL (insn) != 0)))
3138 {
3139 rtx target = 0;
3140 int maybe_never = 0;
3141 struct resources needed_at_jump;
3142
3143 CLEAR_RESOURCE (&needed);
3144 CLEAR_RESOURCE (&set);
3145
3146 if (GET_CODE (insn) == CALL_INSN)
3147 {
3148 mark_set_resources (insn, &set, 0, 1);
3149 mark_referenced_resources (insn, &needed, 1);
3150 maybe_never = 1;
3151 }
3152 else
3153 {
3154 mark_set_resources (insn, &set, 0, 1);
3155 mark_referenced_resources (insn, &needed, 1);
3156 if (GET_CODE (insn) == JUMP_INSN)
3157 target = JUMP_LABEL (insn);
3158 }
3159
3160 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3161 {
3162 rtx pat, trial_delay;
3163
3164 next_trial = next_nonnote_insn (trial);
3165
3166 if (GET_CODE (trial) == CODE_LABEL
3167 || GET_CODE (trial) == BARRIER)
3168 break;
3169
3170 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3171 pat = PATTERN (trial);
3172
3173 /* Stand-alone USE and CLOBBER are just for flow. */
3174 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3175 continue;
3176
3177 /* If this already has filled delay slots, get the insn needing
3178 the delay slots. */
3179 if (GET_CODE (pat) == SEQUENCE)
3180 trial_delay = XVECEXP (pat, 0, 0);
3181 else
3182 trial_delay = trial;
3183
3184 /* If this is a jump insn to our target, indicate that we have
3185 seen another jump to it. If we aren't handling a conditional
3186 jump, stop our search. Otherwise, compute the needs at its
3187 target and add them to NEEDED. */
3188 if (GET_CODE (trial_delay) == JUMP_INSN)
3189 {
3190 if (target == 0)
3191 break;
3192 else if (JUMP_LABEL (trial_delay) != target)
3193 {
3194 mark_target_live_regs
3195 (next_active_insn (JUMP_LABEL (trial_delay)),
3196 &needed_at_jump);
3197 needed.memory |= needed_at_jump.memory;
3198 needed.unch_memory |= needed_at_jump.unch_memory;
3199 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3200 }
3201 }
3202
3203 /* See if we have a resource problem before we try to
3204 split. */
3205 if (target == 0
3206 && GET_CODE (pat) != SEQUENCE
3207 && ! insn_references_resource_p (trial, &set, 1)
3208 && ! insn_sets_resource_p (trial, &set, 1)
3209 && ! insn_sets_resource_p (trial, &needed, 1)
3210 #ifdef HAVE_cc0
3211 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3212 #endif
3213 && ! (maybe_never && may_trap_p (pat))
3214 && (trial = try_split (pat, trial, 0))
3215 && eligible_for_delay (insn, slots_filled, trial, flags))
3216 {
3217 next_trial = next_nonnote_insn (trial);
3218 delay_list = add_to_delay_list (trial, delay_list);
3219
3220 #ifdef HAVE_cc0
3221 if (reg_mentioned_p (cc0_rtx, pat))
3222 link_cc0_insns (trial);
3223 #endif
3224
3225 delete_insn (trial);
3226 if (slots_to_fill == ++slots_filled)
3227 break;
3228 continue;
3229 }
3230
3231 mark_set_resources (trial, &set, 0, 1);
3232 mark_referenced_resources (trial, &needed, 1);
3233
3234 /* Ensure we don't put insns between the setting of cc and the
3235 comparison by moving a setting of cc into an earlier delay
3236 slot since these insns could clobber the condition code. */
3237 set.cc = 1;
3238
3239 /* If this is a call or jump, we might not get here. */
3240 if (GET_CODE (trial_delay) == CALL_INSN
3241 || GET_CODE (trial_delay) == JUMP_INSN)
3242 maybe_never = 1;
3243 }
3244
3245 /* If there are slots left to fill and our search was stopped by an
3246 unconditional branch, try the insn at the branch target. We can
3247 redirect the branch if it works.
3248
3249 Don't do this if the insn at the branch target is a branch. */
3250 if (slots_to_fill != slots_filled
3251 && trial
3252 && GET_CODE (trial) == JUMP_INSN
3253 && simplejump_p (trial)
3254 && (target == 0 || JUMP_LABEL (trial) == target)
3255 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3256 && ! (GET_CODE (next_trial) == INSN
3257 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3258 && GET_CODE (next_trial) != JUMP_INSN
3259 && ! insn_references_resource_p (next_trial, &set, 1)
3260 && ! insn_sets_resource_p (next_trial, &set, 1)
3261 && ! insn_sets_resource_p (next_trial, &needed, 1)
3262 #ifdef HAVE_cc0
3263 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3264 #endif
3265 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3266 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3267 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3268 {
3269 rtx new_label = next_active_insn (next_trial);
3270
3271 if (new_label != 0)
3272 new_label = get_label_before (new_label);
3273 else
3274 new_label = find_end_label ();
3275
3276 delay_list
3277 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3278 slots_filled++;
3279 reorg_redirect_jump (trial, new_label);
3280
3281 /* If we merged because we both jumped to the same place,
3282 redirect the original insn also. */
3283 if (target)
3284 reorg_redirect_jump (insn, new_label);
3285 }
3286 }
3287
3288 /* If this is an unconditional jump, then try to get insns from the
3289 target of the jump. */
3290 if (GET_CODE (insn) == JUMP_INSN
3291 && simplejump_p (insn)
3292 && slots_filled != slots_to_fill)
3293 delay_list
3294 = fill_slots_from_thread (insn, const_true_rtx,
3295 next_active_insn (JUMP_LABEL (insn)),
3296 NULL, 1, 1,
3297 own_thread_p (JUMP_LABEL (insn),
3298 JUMP_LABEL (insn), 0),
3299 0, slots_to_fill, &slots_filled);
3300
3301 if (delay_list)
3302 unfilled_slots_base[i]
3303 = emit_delay_sequence (insn, delay_list,
3304 slots_filled, slots_to_fill);
3305
3306 if (slots_to_fill == slots_filled)
3307 unfilled_slots_base[i] = 0;
3308
3309 note_delay_statistics (slots_filled, 0);
3310 }
3311
3312 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3313 /* See if the epilogue needs any delay slots. Try to fill them if so.
3314 The only thing we can do is scan backwards from the end of the
3315 function. If we did this in a previous pass, it is incorrect to do it
3316 again. */
3317 if (current_function_epilogue_delay_list)
3318 return;
3319
3320 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3321 if (slots_to_fill == 0)
3322 return;
3323
3324 slots_filled = 0;
3325 CLEAR_RESOURCE (&set);
3326
3327 /* The frame pointer and stack pointer are needed at the beginning of
3328 the epilogue, so instructions setting them can not be put in the
3329 epilogue delay slot. However, everything else needed at function
3330 end is safe, so we don't want to use end_of_function_needs here. */
3331 CLEAR_RESOURCE (&needed);
3332 if (frame_pointer_needed)
3333 {
3334 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3335 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3336 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3337 #endif
3338 #ifdef EXIT_IGNORE_STACK
3339 if (! EXIT_IGNORE_STACK)
3340 #endif
3341 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3342 }
3343 else
3344 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3345
3346 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3347 trial = PREV_INSN (trial))
3348 {
3349 if (GET_CODE (trial) == NOTE)
3350 continue;
3351 pat = PATTERN (trial);
3352 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3353 continue;
3354
3355 if (! insn_references_resource_p (trial, &set, 1)
3356 && ! insn_sets_resource_p (trial, &needed, 1)
3357 && ! insn_sets_resource_p (trial, &set, 1)
3358 #ifdef HAVE_cc0
3359 /* Don't want to mess with cc0 here. */
3360 && ! reg_mentioned_p (cc0_rtx, pat)
3361 #endif
3362 )
3363 {
3364 trial = try_split (pat, trial, 1);
3365 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3366 {
3367 /* Here as well we are searching backward, so put the
3368 insns we find on the head of the list. */
3369
3370 current_function_epilogue_delay_list
3371 = gen_rtx (INSN_LIST, VOIDmode, trial,
3372 current_function_epilogue_delay_list);
3373 mark_referenced_resources (trial, &end_of_function_needs, 1);
3374 update_block (trial, trial);
3375 delete_insn (trial);
3376
3377 /* Clear deleted bit so final.c will output the insn. */
3378 INSN_DELETED_P (trial) = 0;
3379
3380 if (slots_to_fill == ++slots_filled)
3381 break;
3382 continue;
3383 }
3384 }
3385
3386 mark_set_resources (trial, &set, 0, 1);
3387 mark_referenced_resources (trial, &needed, 1);
3388 }
3389
3390 note_delay_statistics (slots_filled, 0);
3391 #endif
3392 }
3393 \f
3394 /* Try to find insns to place in delay slots.
3395
3396 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3397 or is an unconditional branch if CONDITION is const_true_rtx.
3398 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3399
3400 THREAD is a flow-of-control, either the insns to be executed if the
3401 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3402
3403 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3404 to see if any potential delay slot insns set things needed there.
3405
3406 LIKELY is non-zero if it is extremely likely that the branch will be
3407 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3408 end of a loop back up to the top.
3409
3410 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3411 thread. I.e., it is the fallthrough code of our jump or the target of the
3412 jump when we are the only jump going there.
3413
3414 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3415 case, we can only take insns from the head of the thread for our delay
3416 slot. We then adjust the jump to point after the insns we have taken. */
3417
3418 static rtx
3419 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3420 thread_if_true, own_thread, own_opposite_thread,
3421 slots_to_fill, pslots_filled)
3422 rtx insn;
3423 rtx condition;
3424 rtx thread, opposite_thread;
3425 int likely;
3426 int thread_if_true;
3427 int own_thread, own_opposite_thread;
3428 int slots_to_fill, *pslots_filled;
3429 {
3430 rtx new_thread;
3431 rtx delay_list = 0;
3432 struct resources opposite_needed, set, needed;
3433 rtx trial;
3434 int lose = 0;
3435 int must_annul = 0;
3436 int flags;
3437
3438 /* Validate our arguments. */
3439 if ((condition == const_true_rtx && ! thread_if_true)
3440 || (! own_thread && ! thread_if_true))
3441 abort ();
3442
3443 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3444
3445 /* If our thread is the end of subroutine, we can't get any delay
3446 insns from that. */
3447 if (thread == 0)
3448 return 0;
3449
3450 /* If this is an unconditional branch, nothing is needed at the
3451 opposite thread. Otherwise, compute what is needed there. */
3452 if (condition == const_true_rtx)
3453 CLEAR_RESOURCE (&opposite_needed);
3454 else
3455 mark_target_live_regs (opposite_thread, &opposite_needed);
3456
3457 /* If the insn at THREAD can be split, do it here to avoid having to
3458 update THREAD and NEW_THREAD if it is done in the loop below. Also
3459 initialize NEW_THREAD. */
3460
3461 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3462
3463 /* Scan insns at THREAD. We are looking for an insn that can be removed
3464 from THREAD (it neither sets nor references resources that were set
3465 ahead of it and it doesn't set anything needs by the insns ahead of
3466 it) and that either can be placed in an annulling insn or aren't
3467 needed at OPPOSITE_THREAD. */
3468
3469 CLEAR_RESOURCE (&needed);
3470 CLEAR_RESOURCE (&set);
3471
3472 /* If we do not own this thread, we must stop as soon as we find
3473 something that we can't put in a delay slot, since all we can do
3474 is branch into THREAD at a later point. Therefore, labels stop
3475 the search if this is not the `true' thread. */
3476
3477 for (trial = thread;
3478 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3479 trial = next_nonnote_insn (trial))
3480 {
3481 rtx pat, old_trial;
3482
3483 /* If we have passed a label, we no longer own this thread. */
3484 if (GET_CODE (trial) == CODE_LABEL)
3485 {
3486 own_thread = 0;
3487 continue;
3488 }
3489
3490 pat = PATTERN (trial);
3491 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3492 continue;
3493
3494 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3495 don't separate or copy insns that set and use CC0. */
3496 if (! insn_references_resource_p (trial, &set, 1)
3497 && ! insn_sets_resource_p (trial, &set, 1)
3498 && ! insn_sets_resource_p (trial, &needed, 1)
3499 #ifdef HAVE_cc0
3500 && ! (reg_mentioned_p (cc0_rtx, pat)
3501 && (! own_thread || ! sets_cc0_p (pat)))
3502 #endif
3503 )
3504 {
3505 rtx prior_insn;
3506
3507 /* If TRIAL is redundant with some insn before INSN, we don't
3508 actually need to add it to the delay list; we can merely pretend
3509 we did. */
3510 if (prior_insn = redundant_insn (trial, insn, delay_list))
3511 {
3512 fix_reg_dead_note (prior_insn, insn);
3513 if (own_thread)
3514 {
3515 update_block (trial, thread);
3516 if (trial == thread)
3517 {
3518 thread = next_active_insn (thread);
3519 if (new_thread == trial)
3520 new_thread = thread;
3521 }
3522
3523 delete_insn (trial);
3524 }
3525 else
3526 {
3527 update_reg_unused_notes (prior_insn, trial);
3528 new_thread = next_active_insn (trial);
3529 }
3530
3531 continue;
3532 }
3533
3534 /* There are two ways we can win: If TRIAL doesn't set anything
3535 needed at the opposite thread and can't trap, or if it can
3536 go into an annulled delay slot. */
3537 if (condition == const_true_rtx
3538 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3539 && ! may_trap_p (pat)))
3540 {
3541 old_trial = trial;
3542 trial = try_split (pat, trial, 0);
3543 if (new_thread == old_trial)
3544 new_thread = trial;
3545 if (thread == old_trial)
3546 thread = trial;
3547 pat = PATTERN (trial);
3548 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3549 goto winner;
3550 }
3551 else if (0
3552 #ifdef ANNUL_IFTRUE_SLOTS
3553 || ! thread_if_true
3554 #endif
3555 #ifdef ANNUL_IFFALSE_SLOTS
3556 || thread_if_true
3557 #endif
3558 )
3559 {
3560 old_trial = trial;
3561 trial = try_split (pat, trial, 0);
3562 if (new_thread == old_trial)
3563 new_thread = trial;
3564 if (thread == old_trial)
3565 thread = trial;
3566 pat = PATTERN (trial);
3567 if ((thread_if_true
3568 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3569 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3570 {
3571 rtx temp;
3572
3573 must_annul = 1;
3574 winner:
3575
3576 #ifdef HAVE_cc0
3577 if (reg_mentioned_p (cc0_rtx, pat))
3578 link_cc0_insns (trial);
3579 #endif
3580
3581 /* If we own this thread, delete the insn. If this is the
3582 destination of a branch, show that a basic block status
3583 may have been updated. In any case, mark the new
3584 starting point of this thread. */
3585 if (own_thread)
3586 {
3587 update_block (trial, thread);
3588 delete_insn (trial);
3589 }
3590 else
3591 new_thread = next_active_insn (trial);
3592
3593 temp = own_thread ? trial : copy_rtx (trial);
3594 if (thread_if_true)
3595 INSN_FROM_TARGET_P (temp) = 1;
3596
3597 delay_list = add_to_delay_list (temp, delay_list);
3598
3599 if (slots_to_fill == ++(*pslots_filled))
3600 {
3601 /* Even though we have filled all the slots, we
3602 may be branching to a location that has a
3603 redundant insn. Skip any if so. */
3604 while (new_thread && ! own_thread
3605 && ! insn_sets_resource_p (new_thread, &set, 1)
3606 && ! insn_sets_resource_p (new_thread, &needed, 1)
3607 && ! insn_references_resource_p (new_thread,
3608 &set, 1)
3609 && redundant_insn (new_thread, insn, delay_list))
3610 new_thread = next_active_insn (new_thread);
3611 break;
3612 }
3613
3614 continue;
3615 }
3616 }
3617 }
3618
3619 /* This insn can't go into a delay slot. */
3620 lose = 1;
3621 mark_set_resources (trial, &set, 0, 1);
3622 mark_referenced_resources (trial, &needed, 1);
3623
3624 /* Ensure we don't put insns between the setting of cc and the comparison
3625 by moving a setting of cc into an earlier delay slot since these insns
3626 could clobber the condition code. */
3627 set.cc = 1;
3628
3629 /* If this insn is a register-register copy and the next insn has
3630 a use of our destination, change it to use our source. That way,
3631 it will become a candidate for our delay slot the next time
3632 through this loop. This case occurs commonly in loops that
3633 scan a list.
3634
3635 We could check for more complex cases than those tested below,
3636 but it doesn't seem worth it. It might also be a good idea to try
3637 to swap the two insns. That might do better.
3638
3639 We can't do this if the next insn modifies our destination, because
3640 that would make the replacement into the insn invalid. We also can't
3641 do this if it modifies our source, because it might be an earlyclobber
3642 operand. This latter test also prevents updating the contents of
3643 a PRE_INC. */
3644
3645 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3646 && GET_CODE (SET_SRC (pat)) == REG
3647 && GET_CODE (SET_DEST (pat)) == REG)
3648 {
3649 rtx next = next_nonnote_insn (trial);
3650
3651 if (next && GET_CODE (next) == INSN
3652 && GET_CODE (PATTERN (next)) != USE
3653 && ! reg_set_p (SET_DEST (pat), next)
3654 && ! reg_set_p (SET_SRC (pat), next)
3655 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3656 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3657 }
3658 }
3659
3660 /* If we stopped on a branch insn that has delay slots, see if we can
3661 steal some of the insns in those slots. */
3662 if (trial && GET_CODE (trial) == INSN
3663 && GET_CODE (PATTERN (trial)) == SEQUENCE
3664 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3665 {
3666 /* If this is the `true' thread, we will want to follow the jump,
3667 so we can only do this if we have taken everything up to here. */
3668 if (thread_if_true && trial == new_thread)
3669 delay_list
3670 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3671 delay_list, &set, &needed,
3672 &opposite_needed, slots_to_fill,
3673 pslots_filled, &must_annul,
3674 &new_thread);
3675 else if (! thread_if_true)
3676 delay_list
3677 = steal_delay_list_from_fallthrough (insn, condition,
3678 PATTERN (trial),
3679 delay_list, &set, &needed,
3680 &opposite_needed, slots_to_fill,
3681 pslots_filled, &must_annul);
3682 }
3683
3684 /* If we haven't found anything for this delay slot and it is very
3685 likely that the branch will be taken, see if the insn at our target
3686 increments or decrements a register with an increment that does not
3687 depend on the destination register. If so, try to place the opposite
3688 arithmetic insn after the jump insn and put the arithmetic insn in the
3689 delay slot. If we can't do this, return. */
3690 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3691 {
3692 rtx pat = PATTERN (new_thread);
3693 rtx dest;
3694 rtx src;
3695
3696 trial = new_thread;
3697 pat = PATTERN (trial);
3698
3699 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3700 || ! eligible_for_delay (insn, 0, trial, flags))
3701 return 0;
3702
3703 dest = SET_DEST (pat), src = SET_SRC (pat);
3704 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3705 && rtx_equal_p (XEXP (src, 0), dest)
3706 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3707 {
3708 rtx other = XEXP (src, 1);
3709 rtx new_arith;
3710 rtx ninsn;
3711
3712 /* If this is a constant adjustment, use the same code with
3713 the negated constant. Otherwise, reverse the sense of the
3714 arithmetic. */
3715 if (GET_CODE (other) == CONST_INT)
3716 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3717 negate_rtx (GET_MODE (src), other));
3718 else
3719 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3720 GET_MODE (src), dest, other);
3721
3722 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3723 insn);
3724
3725 if (recog_memoized (ninsn) < 0
3726 || (insn_extract (ninsn),
3727 ! constrain_operands (INSN_CODE (ninsn), 1)))
3728 {
3729 delete_insn (ninsn);
3730 return 0;
3731 }
3732
3733 if (own_thread)
3734 {
3735 update_block (trial, thread);
3736 delete_insn (trial);
3737 }
3738 else
3739 new_thread = next_active_insn (trial);
3740
3741 ninsn = own_thread ? trial : copy_rtx (trial);
3742 if (thread_if_true)
3743 INSN_FROM_TARGET_P (ninsn) = 1;
3744
3745 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3746 (*pslots_filled)++;
3747 }
3748 }
3749
3750 if (delay_list && must_annul)
3751 INSN_ANNULLED_BRANCH_P (insn) = 1;
3752
3753 /* If we are to branch into the middle of this thread, find an appropriate
3754 label or make a new one if none, and redirect INSN to it. If we hit the
3755 end of the function, use the end-of-function label. */
3756 if (new_thread != thread)
3757 {
3758 rtx label;
3759
3760 if (! thread_if_true)
3761 abort ();
3762
3763 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3764 && (simplejump_p (new_thread)
3765 || GET_CODE (PATTERN (new_thread)) == RETURN)
3766 && redirect_with_delay_list_safe_p (insn,
3767 JUMP_LABEL (new_thread),
3768 delay_list))
3769 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3770
3771 if (new_thread == 0)
3772 label = find_end_label ();
3773 else if (GET_CODE (new_thread) == CODE_LABEL)
3774 label = new_thread;
3775 else
3776 label = get_label_before (new_thread);
3777
3778 reorg_redirect_jump (insn, label);
3779 }
3780
3781 return delay_list;
3782 }
3783 \f
3784 /* Make another attempt to find insns to place in delay slots.
3785
3786 We previously looked for insns located in front of the delay insn
3787 and, for non-jump delay insns, located behind the delay insn.
3788
3789 Here only try to schedule jump insns and try to move insns from either
3790 the target or the following insns into the delay slot. If annulling is
3791 supported, we will be likely to do this. Otherwise, we can do this only
3792 if safe. */
3793
3794 static void
3795 fill_eager_delay_slots (first)
3796 rtx first;
3797 {
3798 register rtx insn;
3799 register int i;
3800 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3801
3802 for (i = 0; i < num_unfilled_slots; i++)
3803 {
3804 rtx condition;
3805 rtx target_label, insn_at_target, fallthrough_insn;
3806 rtx delay_list = 0;
3807 int own_target;
3808 int own_fallthrough;
3809 int prediction, slots_to_fill, slots_filled;
3810
3811 insn = unfilled_slots_base[i];
3812 if (insn == 0
3813 || INSN_DELETED_P (insn)
3814 || GET_CODE (insn) != JUMP_INSN
3815 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3816 continue;
3817
3818 slots_to_fill = num_delay_slots (insn);
3819 if (slots_to_fill == 0)
3820 abort ();
3821
3822 slots_filled = 0;
3823 target_label = JUMP_LABEL (insn);
3824 condition = get_branch_condition (insn, target_label);
3825
3826 if (condition == 0)
3827 continue;
3828
3829 /* Get the next active fallthrough and target insns and see if we own
3830 them. Then see whether the branch is likely true. We don't need
3831 to do a lot of this for unconditional branches. */
3832
3833 insn_at_target = next_active_insn (target_label);
3834 own_target = own_thread_p (target_label, target_label, 0);
3835
3836 if (condition == const_true_rtx)
3837 {
3838 own_fallthrough = 0;
3839 fallthrough_insn = 0;
3840 prediction = 2;
3841 }
3842 else
3843 {
3844 fallthrough_insn = next_active_insn (insn);
3845 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3846 prediction = mostly_true_jump (insn, condition);
3847 }
3848
3849 /* If this insn is expected to branch, first try to get insns from our
3850 target, then our fallthrough insns. If it is not, expected to branch,
3851 try the other order. */
3852
3853 if (prediction > 0)
3854 {
3855 delay_list
3856 = fill_slots_from_thread (insn, condition, insn_at_target,
3857 fallthrough_insn, prediction == 2, 1,
3858 own_target, own_fallthrough,
3859 slots_to_fill, &slots_filled);
3860
3861 if (delay_list == 0 && own_fallthrough)
3862 {
3863 /* Even though we didn't find anything for delay slots,
3864 we might have found a redundant insn which we deleted
3865 from the thread that was filled. So we have to recompute
3866 the next insn at the target. */
3867 target_label = JUMP_LABEL (insn);
3868 insn_at_target = next_active_insn (target_label);
3869
3870 delay_list
3871 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3872 insn_at_target, 0, 0,
3873 own_fallthrough, own_target,
3874 slots_to_fill, &slots_filled);
3875 }
3876 }
3877 else
3878 {
3879 if (own_fallthrough)
3880 delay_list
3881 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3882 insn_at_target, 0, 0,
3883 own_fallthrough, own_target,
3884 slots_to_fill, &slots_filled);
3885
3886 if (delay_list == 0)
3887 delay_list
3888 = fill_slots_from_thread (insn, condition, insn_at_target,
3889 next_active_insn (insn), 0, 1,
3890 own_target, own_fallthrough,
3891 slots_to_fill, &slots_filled);
3892 }
3893
3894 if (delay_list)
3895 unfilled_slots_base[i]
3896 = emit_delay_sequence (insn, delay_list,
3897 slots_filled, slots_to_fill);
3898
3899 if (slots_to_fill == slots_filled)
3900 unfilled_slots_base[i] = 0;
3901
3902 note_delay_statistics (slots_filled, 1);
3903 }
3904 }
3905 \f
3906 /* Once we have tried two ways to fill a delay slot, make a pass over the
3907 code to try to improve the results and to do such things as more jump
3908 threading. */
3909
3910 static void
3911 relax_delay_slots (first)
3912 rtx first;
3913 {
3914 register rtx insn, next, pat;
3915 register rtx trial, delay_insn, target_label;
3916
3917 /* Look at every JUMP_INSN and see if we can improve it. */
3918 for (insn = first; insn; insn = next)
3919 {
3920 rtx other;
3921
3922 next = next_active_insn (insn);
3923
3924 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3925 the next insn, or jumps to a label that is not the last of a
3926 group of consecutive labels. */
3927 if (GET_CODE (insn) == JUMP_INSN
3928 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3929 && (target_label = JUMP_LABEL (insn)) != 0)
3930 {
3931 target_label = follow_jumps (target_label);
3932 target_label = prev_label (next_active_insn (target_label));
3933
3934 if (target_label == 0)
3935 target_label = find_end_label ();
3936
3937 if (next_active_insn (target_label) == next
3938 && ! condjump_in_parallel_p (insn))
3939 {
3940 delete_jump (insn);
3941 continue;
3942 }
3943
3944 if (target_label != JUMP_LABEL (insn))
3945 reorg_redirect_jump (insn, target_label);
3946
3947 /* See if this jump branches around a unconditional jump.
3948 If so, invert this jump and point it to the target of the
3949 second jump. */
3950 if (next && GET_CODE (next) == JUMP_INSN
3951 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3952 && next_active_insn (target_label) == next_active_insn (next)
3953 && no_labels_between_p (insn, next))
3954 {
3955 rtx label = JUMP_LABEL (next);
3956
3957 /* Be careful how we do this to avoid deleting code or
3958 labels that are momentarily dead. See similar optimization
3959 in jump.c.
3960
3961 We also need to ensure we properly handle the case when
3962 invert_jump fails. */
3963
3964 ++LABEL_NUSES (target_label);
3965 if (label)
3966 ++LABEL_NUSES (label);
3967
3968 if (invert_jump (insn, label))
3969 {
3970 delete_insn (next);
3971 next = insn;
3972 }
3973
3974 if (label)
3975 --LABEL_NUSES (label);
3976
3977 if (--LABEL_NUSES (target_label) == 0)
3978 delete_insn (target_label);
3979
3980 continue;
3981 }
3982 }
3983
3984 /* If this is an unconditional jump and the previous insn is a
3985 conditional jump, try reversing the condition of the previous
3986 insn and swapping our targets. The next pass might be able to
3987 fill the slots.
3988
3989 Don't do this if we expect the conditional branch to be true, because
3990 we would then be making the more common case longer. */
3991
3992 if (GET_CODE (insn) == JUMP_INSN
3993 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3994 && (other = prev_active_insn (insn)) != 0
3995 && (condjump_p (other) || condjump_in_parallel_p (other))
3996 && no_labels_between_p (other, insn)
3997 && 0 < mostly_true_jump (other,
3998 get_branch_condition (other,
3999 JUMP_LABEL (other))))
4000 {
4001 rtx other_target = JUMP_LABEL (other);
4002 target_label = JUMP_LABEL (insn);
4003
4004 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4005 as we move the label. */
4006 if (other_target)
4007 ++LABEL_NUSES (other_target);
4008
4009 if (invert_jump (other, target_label))
4010 reorg_redirect_jump (insn, other_target);
4011
4012 if (other_target)
4013 --LABEL_NUSES (other_target);
4014 }
4015
4016 /* Now look only at cases where we have filled a delay slot. */
4017 if (GET_CODE (insn) != INSN
4018 || GET_CODE (PATTERN (insn)) != SEQUENCE)
4019 continue;
4020
4021 pat = PATTERN (insn);
4022 delay_insn = XVECEXP (pat, 0, 0);
4023
4024 /* See if the first insn in the delay slot is redundant with some
4025 previous insn. Remove it from the delay slot if so; then set up
4026 to reprocess this insn. */
4027 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
4028 {
4029 delete_from_delay_slot (XVECEXP (pat, 0, 1));
4030 next = prev_active_insn (next);
4031 continue;
4032 }
4033
4034 /* Now look only at the cases where we have a filled JUMP_INSN. */
4035 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4036 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
4037 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
4038 continue;
4039
4040 target_label = JUMP_LABEL (delay_insn);
4041
4042 if (target_label)
4043 {
4044 /* If this jump goes to another unconditional jump, thread it, but
4045 don't convert a jump into a RETURN here. */
4046 trial = follow_jumps (target_label);
4047 /* We use next_real_insn instead of next_active_insn, so that
4048 the special USE insns emitted by reorg won't be ignored.
4049 If they are ignored, then they will get deleted if target_label
4050 is now unreachable, and that would cause mark_target_live_regs
4051 to fail. */
4052 trial = prev_label (next_real_insn (trial));
4053 if (trial == 0 && target_label != 0)
4054 trial = find_end_label ();
4055
4056 if (trial != target_label
4057 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
4058 {
4059 reorg_redirect_jump (delay_insn, trial);
4060 target_label = trial;
4061 }
4062
4063 /* If the first insn at TARGET_LABEL is redundant with a previous
4064 insn, redirect the jump to the following insn process again. */
4065 trial = next_active_insn (target_label);
4066 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
4067 && redundant_insn (trial, insn, 0))
4068 {
4069 rtx tmp;
4070
4071 /* Figure out where to emit the special USE insn so we don't
4072 later incorrectly compute register live/death info. */
4073 tmp = next_active_insn (trial);
4074 if (tmp == 0)
4075 tmp = find_end_label ();
4076
4077 /* Insert the special USE insn and update dataflow info. */
4078 update_block (trial, tmp);
4079
4080 /* Now emit a label before the special USE insn, and
4081 redirect our jump to the new label. */
4082 target_label = get_label_before (PREV_INSN (tmp));
4083 reorg_redirect_jump (delay_insn, target_label);
4084 next = insn;
4085 continue;
4086 }
4087
4088 /* Similarly, if it is an unconditional jump with one insn in its
4089 delay list and that insn is redundant, thread the jump. */
4090 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
4091 && XVECLEN (PATTERN (trial), 0) == 2
4092 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
4093 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
4094 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
4095 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
4096 {
4097 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
4098 if (target_label == 0)
4099 target_label = find_end_label ();
4100
4101 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
4102 insn))
4103 {
4104 reorg_redirect_jump (delay_insn, target_label);
4105 next = insn;
4106 continue;
4107 }
4108 }
4109 }
4110
4111 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4112 && prev_active_insn (target_label) == insn
4113 && ! condjump_in_parallel_p (delay_insn)
4114 #ifdef HAVE_cc0
4115 /* If the last insn in the delay slot sets CC0 for some insn,
4116 various code assumes that it is in a delay slot. We could
4117 put it back where it belonged and delete the register notes,
4118 but it doesn't seem worthwhile in this uncommon case. */
4119 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
4120 REG_CC_USER, NULL_RTX)
4121 #endif
4122 )
4123 {
4124 int i;
4125
4126 /* All this insn does is execute its delay list and jump to the
4127 following insn. So delete the jump and just execute the delay
4128 list insns.
4129
4130 We do this by deleting the INSN containing the SEQUENCE, then
4131 re-emitting the insns separately, and then deleting the jump.
4132 This allows the count of the jump target to be properly
4133 decremented. */
4134
4135 /* Clear the from target bit, since these insns are no longer
4136 in delay slots. */
4137 for (i = 0; i < XVECLEN (pat, 0); i++)
4138 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
4139
4140 trial = PREV_INSN (insn);
4141 delete_insn (insn);
4142 emit_insn_after (pat, trial);
4143 delete_scheduled_jump (delay_insn);
4144 continue;
4145 }
4146
4147 /* See if this is an unconditional jump around a single insn which is
4148 identical to the one in its delay slot. In this case, we can just
4149 delete the branch and the insn in its delay slot. */
4150 if (next && GET_CODE (next) == INSN
4151 && prev_label (next_active_insn (next)) == target_label
4152 && simplejump_p (insn)
4153 && XVECLEN (pat, 0) == 2
4154 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4155 {
4156 delete_insn (insn);
4157 continue;
4158 }
4159
4160 /* See if this jump (with its delay slots) branches around another
4161 jump (without delay slots). If so, invert this jump and point
4162 it to the target of the second jump. We cannot do this for
4163 annulled jumps, though. Again, don't convert a jump to a RETURN
4164 here. */
4165 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4166 && next && GET_CODE (next) == JUMP_INSN
4167 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4168 && next_active_insn (target_label) == next_active_insn (next)
4169 && no_labels_between_p (insn, next))
4170 {
4171 rtx label = JUMP_LABEL (next);
4172 rtx old_label = JUMP_LABEL (delay_insn);
4173
4174 if (label == 0)
4175 label = find_end_label ();
4176
4177 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4178 {
4179 /* Be careful how we do this to avoid deleting code or labels
4180 that are momentarily dead. See similar optimization in
4181 jump.c */
4182 if (old_label)
4183 ++LABEL_NUSES (old_label);
4184
4185 if (invert_jump (delay_insn, label))
4186 {
4187 int i;
4188
4189 /* Must update the INSN_FROM_TARGET_P bits now that
4190 the branch is reversed, so that mark_target_live_regs
4191 will handle the delay slot insn correctly. */
4192 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4193 {
4194 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4195 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4196 }
4197
4198 delete_insn (next);
4199 next = insn;
4200 }
4201
4202 if (old_label && --LABEL_NUSES (old_label) == 0)
4203 delete_insn (old_label);
4204 continue;
4205 }
4206 }
4207
4208 /* If we own the thread opposite the way this insn branches, see if we
4209 can merge its delay slots with following insns. */
4210 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4211 && own_thread_p (NEXT_INSN (insn), 0, 1))
4212 try_merge_delay_insns (insn, next);
4213 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4214 && own_thread_p (target_label, target_label, 0))
4215 try_merge_delay_insns (insn, next_active_insn (target_label));
4216
4217 /* If we get here, we haven't deleted INSN. But we may have deleted
4218 NEXT, so recompute it. */
4219 next = next_active_insn (insn);
4220 }
4221 }
4222 \f
4223 #ifdef HAVE_return
4224
4225 /* Look for filled jumps to the end of function label. We can try to convert
4226 them into RETURN insns if the insns in the delay slot are valid for the
4227 RETURN as well. */
4228
4229 static void
4230 make_return_insns (first)
4231 rtx first;
4232 {
4233 rtx insn, jump_insn, pat;
4234 rtx real_return_label = end_of_function_label;
4235 int slots, i;
4236
4237 /* See if there is a RETURN insn in the function other than the one we
4238 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4239 into a RETURN to jump to it. */
4240 for (insn = first; insn; insn = NEXT_INSN (insn))
4241 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4242 {
4243 real_return_label = get_label_before (insn);
4244 break;
4245 }
4246
4247 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4248 was equal to END_OF_FUNCTION_LABEL. */
4249 LABEL_NUSES (real_return_label)++;
4250
4251 /* Clear the list of insns to fill so we can use it. */
4252 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4253
4254 for (insn = first; insn; insn = NEXT_INSN (insn))
4255 {
4256 int flags;
4257
4258 /* Only look at filled JUMP_INSNs that go to the end of function
4259 label. */
4260 if (GET_CODE (insn) != INSN
4261 || GET_CODE (PATTERN (insn)) != SEQUENCE
4262 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4263 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4264 continue;
4265
4266 pat = PATTERN (insn);
4267 jump_insn = XVECEXP (pat, 0, 0);
4268
4269 /* If we can't make the jump into a RETURN, try to redirect it to the best
4270 RETURN and go on to the next insn. */
4271 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4272 {
4273 /* Make sure redirecting the jump will not invalidate the delay
4274 slot insns. */
4275 if (redirect_with_delay_slots_safe_p (jump_insn,
4276 real_return_label,
4277 insn))
4278 reorg_redirect_jump (jump_insn, real_return_label);
4279 continue;
4280 }
4281
4282 /* See if this RETURN can accept the insns current in its delay slot.
4283 It can if it has more or an equal number of slots and the contents
4284 of each is valid. */
4285
4286 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4287 slots = num_delay_slots (jump_insn);
4288 if (slots >= XVECLEN (pat, 0) - 1)
4289 {
4290 for (i = 1; i < XVECLEN (pat, 0); i++)
4291 if (! (
4292 #ifdef ANNUL_IFFALSE_SLOTS
4293 (INSN_ANNULLED_BRANCH_P (jump_insn)
4294 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4295 ? eligible_for_annul_false (jump_insn, i - 1,
4296 XVECEXP (pat, 0, i), flags) :
4297 #endif
4298 #ifdef ANNUL_IFTRUE_SLOTS
4299 (INSN_ANNULLED_BRANCH_P (jump_insn)
4300 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4301 ? eligible_for_annul_true (jump_insn, i - 1,
4302 XVECEXP (pat, 0, i), flags) :
4303 #endif
4304 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4305 break;
4306 }
4307 else
4308 i = 0;
4309
4310 if (i == XVECLEN (pat, 0))
4311 continue;
4312
4313 /* We have to do something with this insn. If it is an unconditional
4314 RETURN, delete the SEQUENCE and output the individual insns,
4315 followed by the RETURN. Then set things up so we try to find
4316 insns for its delay slots, if it needs some. */
4317 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4318 {
4319 rtx prev = PREV_INSN (insn);
4320
4321 delete_insn (insn);
4322 for (i = 1; i < XVECLEN (pat, 0); i++)
4323 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4324
4325 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4326 emit_barrier_after (insn);
4327
4328 if (slots)
4329 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4330 }
4331 else
4332 /* It is probably more efficient to keep this with its current
4333 delay slot as a branch to a RETURN. */
4334 reorg_redirect_jump (jump_insn, real_return_label);
4335 }
4336
4337 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4338 new delay slots we have created. */
4339 if (--LABEL_NUSES (real_return_label) == 0)
4340 delete_insn (real_return_label);
4341
4342 fill_simple_delay_slots (first, 1);
4343 fill_simple_delay_slots (first, 0);
4344 }
4345 #endif
4346 \f
4347 /* Try to find insns to place in delay slots. */
4348
4349 void
4350 dbr_schedule (first, file)
4351 rtx first;
4352 FILE *file;
4353 {
4354 rtx insn, next, epilogue_insn = 0;
4355 int i;
4356 #if 0
4357 int old_flag_no_peephole = flag_no_peephole;
4358
4359 /* Execute `final' once in prescan mode to delete any insns that won't be
4360 used. Don't let final try to do any peephole optimization--it will
4361 ruin dataflow information for this pass. */
4362
4363 flag_no_peephole = 1;
4364 final (first, 0, NO_DEBUG, 1, 1);
4365 flag_no_peephole = old_flag_no_peephole;
4366 #endif
4367
4368 /* If the current function has no insns other than the prologue and
4369 epilogue, then do not try to fill any delay slots. */
4370 if (n_basic_blocks == 0)
4371 return;
4372
4373 /* Find the highest INSN_UID and allocate and initialize our map from
4374 INSN_UID's to position in code. */
4375 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4376 {
4377 if (INSN_UID (insn) > max_uid)
4378 max_uid = INSN_UID (insn);
4379 if (GET_CODE (insn) == NOTE
4380 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4381 epilogue_insn = insn;
4382 }
4383
4384 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4385 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4386 uid_to_ruid[INSN_UID (insn)] = i;
4387
4388 /* Initialize the list of insns that need filling. */
4389 if (unfilled_firstobj == 0)
4390 {
4391 gcc_obstack_init (&unfilled_slots_obstack);
4392 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4393 }
4394
4395 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4396 {
4397 rtx target;
4398
4399 INSN_ANNULLED_BRANCH_P (insn) = 0;
4400 INSN_FROM_TARGET_P (insn) = 0;
4401
4402 /* Skip vector tables. We can't get attributes for them. */
4403 if (GET_CODE (insn) == JUMP_INSN
4404 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4405 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4406 continue;
4407
4408 if (num_delay_slots (insn) > 0)
4409 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4410
4411 /* Ensure all jumps go to the last of a set of consecutive labels. */
4412 if (GET_CODE (insn) == JUMP_INSN
4413 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4414 && JUMP_LABEL (insn) != 0
4415 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4416 != JUMP_LABEL (insn)))
4417 redirect_jump (insn, target);
4418 }
4419
4420 /* Indicate what resources are required to be valid at the end of the current
4421 function. The condition code never is and memory always is. If the
4422 frame pointer is needed, it is and so is the stack pointer unless
4423 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4424 stack pointer is. Registers used to return the function value are
4425 needed. Registers holding global variables are needed. */
4426
4427 end_of_function_needs.cc = 0;
4428 end_of_function_needs.memory = 1;
4429 end_of_function_needs.unch_memory = 0;
4430 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4431
4432 if (frame_pointer_needed)
4433 {
4434 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4435 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4436 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4437 #endif
4438 #ifdef EXIT_IGNORE_STACK
4439 if (! EXIT_IGNORE_STACK)
4440 #endif
4441 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4442 }
4443 else
4444 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4445
4446 if (current_function_return_rtx != 0
4447 && GET_CODE (current_function_return_rtx) == REG)
4448 mark_referenced_resources (current_function_return_rtx,
4449 &end_of_function_needs, 1);
4450
4451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4452 if (global_regs[i])
4453 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4454
4455 /* The registers required to be live at the end of the function are
4456 represented in the flow information as being dead just prior to
4457 reaching the end of the function. For example, the return of a value
4458 might be represented by a USE of the return register immediately
4459 followed by an unconditional jump to the return label where the
4460 return label is the end of the RTL chain. The end of the RTL chain
4461 is then taken to mean that the return register is live.
4462
4463 This sequence is no longer maintained when epilogue instructions are
4464 added to the RTL chain. To reconstruct the original meaning, the
4465 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4466 point where these registers become live (start_of_epilogue_needs).
4467 If epilogue instructions are present, the registers set by those
4468 instructions won't have been processed by flow. Thus, those
4469 registers are additionally required at the end of the RTL chain
4470 (end_of_function_needs). */
4471
4472 start_of_epilogue_needs = end_of_function_needs;
4473
4474 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4475 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4476
4477 /* Show we haven't computed an end-of-function label yet. */
4478 end_of_function_label = 0;
4479
4480 /* Allocate and initialize the tables used by mark_target_live_regs. */
4481 target_hash_table
4482 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4483 * sizeof (struct target_info *)));
4484 bzero ((char *) target_hash_table,
4485 TARGET_HASH_PRIME * sizeof (struct target_info *));
4486
4487 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4488 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4489
4490 /* Initialize the statistics for this function. */
4491 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4492 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4493
4494 /* Now do the delay slot filling. Try everything twice in case earlier
4495 changes make more slots fillable. */
4496
4497 for (reorg_pass_number = 0;
4498 reorg_pass_number < MAX_REORG_PASSES;
4499 reorg_pass_number++)
4500 {
4501 fill_simple_delay_slots (first, 1);
4502 fill_simple_delay_slots (first, 0);
4503 fill_eager_delay_slots (first);
4504 relax_delay_slots (first);
4505 }
4506
4507 /* Delete any USE insns made by update_block; subsequent passes don't need
4508 them or know how to deal with them. */
4509 for (insn = first; insn; insn = next)
4510 {
4511 next = NEXT_INSN (insn);
4512
4513 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4514 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4515 next = delete_insn (insn);
4516 }
4517
4518 /* If we made an end of function label, indicate that it is now
4519 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4520 If it is now unused, delete it. */
4521 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4522 delete_insn (end_of_function_label);
4523
4524 #ifdef HAVE_return
4525 if (HAVE_return && end_of_function_label != 0)
4526 make_return_insns (first);
4527 #endif
4528
4529 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4530
4531 /* It is not clear why the line below is needed, but it does seem to be. */
4532 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4533
4534 /* Reposition the prologue and epilogue notes in case we moved the
4535 prologue/epilogue insns. */
4536 reposition_prologue_and_epilogue_notes (first);
4537
4538 if (file)
4539 {
4540 register int i, j, need_comma;
4541
4542 for (reorg_pass_number = 0;
4543 reorg_pass_number < MAX_REORG_PASSES;
4544 reorg_pass_number++)
4545 {
4546 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4547 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4548 {
4549 need_comma = 0;
4550 fprintf (file, ";; Reorg function #%d\n", i);
4551
4552 fprintf (file, ";; %d insns needing delay slots\n;; ",
4553 num_insns_needing_delays[i][reorg_pass_number]);
4554
4555 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4556 if (num_filled_delays[i][j][reorg_pass_number])
4557 {
4558 if (need_comma)
4559 fprintf (file, ", ");
4560 need_comma = 1;
4561 fprintf (file, "%d got %d delays",
4562 num_filled_delays[i][j][reorg_pass_number], j);
4563 }
4564 fprintf (file, "\n");
4565 }
4566 }
4567 }
4568 }
4569 #endif /* DELAY_SLOTS */