1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "insn-attr.h"
33 #include "function-abi.h"
35 /* This structure is used to record liveness information at the targets or
36 fallthrough insns of branches. We will most likely need the information
37 at targets again, so save them in a hash table rather than recomputing them
42 int uid
; /* INSN_UID of target. */
43 struct target_info
*next
; /* Next info for same hash bucket. */
44 HARD_REG_SET live_regs
; /* Registers live at target. */
45 int block
; /* Basic block number containing target. */
46 int bb_tick
; /* Generation count of basic block info. */
49 #define TARGET_HASH_PRIME 257
51 /* Indicates what resources are required at the beginning of the epilogue. */
52 static struct resources start_of_epilogue_needs
;
54 /* Indicates what resources are required at function end. */
55 static struct resources end_of_function_needs
;
57 /* Define the hash table itself. */
58 static struct target_info
**target_hash_table
= NULL
;
60 /* For each basic block, we maintain a generation number of its basic
61 block info, which is updated each time we move an insn from the
62 target of a jump. This is the generation number indexed by block
67 /* Marks registers possibly live at the current place being scanned by
68 mark_target_live_regs. Also used by update_live_status. */
70 static HARD_REG_SET current_live_regs
;
72 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
73 Also only used by the next two functions. */
75 static HARD_REG_SET pending_dead_regs
;
77 static void update_live_status (rtx
, const_rtx
, void *);
78 static int find_basic_block (rtx_insn
*, int);
79 static rtx_insn
*next_insn_no_annul (rtx_insn
*);
80 static rtx_insn
*find_dead_or_set_registers (rtx_insn
*, struct resources
*,
81 rtx
*, int, struct resources
,
84 /* Utility function called from mark_target_live_regs via note_stores.
85 It deadens any CLOBBERed registers and livens any SET registers. */
88 update_live_status (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
90 int first_regno
, last_regno
;
94 && (GET_CODE (dest
) != SUBREG
|| !REG_P (SUBREG_REG (dest
))))
97 if (GET_CODE (dest
) == SUBREG
)
99 first_regno
= subreg_regno (dest
);
100 last_regno
= first_regno
+ subreg_nregs (dest
);
105 first_regno
= REGNO (dest
);
106 last_regno
= END_REGNO (dest
);
109 if (GET_CODE (x
) == CLOBBER
)
110 for (i
= first_regno
; i
< last_regno
; i
++)
111 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
112 else if (GET_CODE (x
) == CLOBBER_HIGH
)
113 /* No current target supports both branch delay slots and CLOBBER_HIGH.
114 We'd need more elaborate liveness tracking to handle that
118 for (i
= first_regno
; i
< last_regno
; i
++)
120 SET_HARD_REG_BIT (current_live_regs
, i
);
121 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
125 /* Find the number of the basic block with correct live register
126 information that starts closest to INSN. Return -1 if we couldn't
127 find such a basic block or the beginning is more than
128 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
131 The delay slot filling code destroys the control-flow graph so,
132 instead of finding the basic block containing INSN, we search
133 backwards toward a BARRIER where the live register information is
137 find_basic_block (rtx_insn
*insn
, int search_limit
)
139 /* Scan backwards to the previous BARRIER. Then see if we can find a
140 label that starts a basic block. Return the basic block number. */
141 for (insn
= prev_nonnote_insn (insn
);
142 insn
&& !BARRIER_P (insn
) && search_limit
!= 0;
143 insn
= prev_nonnote_insn (insn
), --search_limit
)
146 /* The closest BARRIER is too far away. */
147 if (search_limit
== 0)
150 /* The start of the function. */
152 return ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
->index
;
154 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
155 anything other than a CODE_LABEL or note, we can't find this code. */
156 for (insn
= next_nonnote_insn (insn
);
157 insn
&& LABEL_P (insn
);
158 insn
= next_nonnote_insn (insn
))
159 if (BLOCK_FOR_INSN (insn
))
160 return BLOCK_FOR_INSN (insn
)->index
;
165 /* Similar to next_insn, but ignores insns in the delay slots of
166 an annulled branch. */
169 next_insn_no_annul (rtx_insn
*insn
)
173 /* If INSN is an annulled branch, skip any insns from the target
176 && INSN_ANNULLED_BRANCH_P (insn
)
177 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
179 rtx_insn
*next
= NEXT_INSN (insn
);
181 while ((NONJUMP_INSN_P (next
) || JUMP_P (next
) || CALL_P (next
))
182 && INSN_FROM_TARGET_P (next
))
185 next
= NEXT_INSN (insn
);
189 insn
= NEXT_INSN (insn
);
190 if (insn
&& NONJUMP_INSN_P (insn
)
191 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
192 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
198 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
199 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
200 is TRUE, resources used by the called routine will be included for
204 mark_referenced_resources (rtx x
, struct resources
*res
,
205 bool include_delayed_effects
)
207 enum rtx_code code
= GET_CODE (x
);
210 const char *format_ptr
;
212 /* Handle leaf items for which we set resource flags. Also, special-case
213 CALL, SET and CLOBBER operators. */
225 if (!REG_P (SUBREG_REG (x
)))
226 mark_referenced_resources (SUBREG_REG (x
), res
, false);
229 unsigned int regno
= subreg_regno (x
);
230 unsigned int last_regno
= regno
+ subreg_nregs (x
);
232 gcc_assert (last_regno
<= FIRST_PSEUDO_REGISTER
);
233 for (r
= regno
; r
< last_regno
; r
++)
234 SET_HARD_REG_BIT (res
->regs
, r
);
239 gcc_assert (HARD_REGISTER_P (x
));
240 add_to_hard_reg_set (&res
->regs
, GET_MODE (x
), REGNO (x
));
244 /* If this memory shouldn't change, it really isn't referencing
246 if (! MEM_READONLY_P (x
))
248 res
->volatil
|= MEM_VOLATILE_P (x
);
250 /* Mark registers used to access memory. */
251 mark_referenced_resources (XEXP (x
, 0), res
, false);
258 case UNSPEC_VOLATILE
:
261 /* Traditional asm's are always volatile. */
266 res
->volatil
|= MEM_VOLATILE_P (x
);
268 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
269 We cannot just fall through here since then we would be confused
270 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
271 traditional asms unlike their normal usage. */
273 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
274 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, false);
278 /* The first operand will be a (MEM (xxx)) but doesn't really reference
279 memory. The second operand may be referenced, though. */
280 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, false);
281 mark_referenced_resources (XEXP (x
, 1), res
, false);
285 /* Usually, the first operand of SET is set, not referenced. But
286 registers used to access memory are referenced. SET_DEST is
287 also referenced if it is a ZERO_EXTRACT. */
289 mark_referenced_resources (SET_SRC (x
), res
, false);
292 if (GET_CODE (x
) == ZERO_EXTRACT
293 || GET_CODE (x
) == STRICT_LOW_PART
)
294 mark_referenced_resources (x
, res
, false);
295 else if (GET_CODE (x
) == SUBREG
)
298 mark_referenced_resources (XEXP (x
, 0), res
, false);
306 if (include_delayed_effects
)
308 /* A CALL references memory, the frame pointer if it exists, the
309 stack pointer, any global registers and any registers given in
310 USE insns immediately in front of the CALL.
312 However, we may have moved some of the parameter loading insns
313 into the delay slot of this CALL. If so, the USE's for them
314 don't count and should be skipped. */
315 rtx_insn
*insn
= PREV_INSN (as_a
<rtx_insn
*> (x
));
316 rtx_sequence
*sequence
= 0;
320 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
321 if (NEXT_INSN (insn
) != x
)
323 sequence
= as_a
<rtx_sequence
*> (PATTERN (NEXT_INSN (insn
)));
324 seq_size
= sequence
->len ();
325 gcc_assert (GET_CODE (sequence
) == SEQUENCE
);
329 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
330 if (frame_pointer_needed
)
332 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
333 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
)
334 SET_HARD_REG_BIT (res
->regs
, HARD_FRAME_POINTER_REGNUM
);
337 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
339 SET_HARD_REG_BIT (res
->regs
, i
);
341 /* Check for a REG_SETJMP. If it exists, then we must
342 assume that this call can need any register.
344 This is done to be more conservative about how we handle setjmp.
345 We assume that they both use and set all registers. Using all
346 registers ensures that a register will not be considered dead
347 just because it crosses a setjmp call. A register should be
348 considered dead only if the setjmp call returns nonzero. */
349 if (find_reg_note (x
, REG_SETJMP
, NULL
))
350 SET_HARD_REG_SET (res
->regs
);
355 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
357 link
= XEXP (link
, 1))
358 if (GET_CODE (XEXP (link
, 0)) == USE
)
360 for (i
= 1; i
< seq_size
; i
++)
362 rtx slot_pat
= PATTERN (sequence
->element (i
));
363 if (GET_CODE (slot_pat
) == SET
364 && rtx_equal_p (SET_DEST (slot_pat
),
365 XEXP (XEXP (link
, 0), 0)))
369 mark_referenced_resources (XEXP (XEXP (link
, 0), 0),
375 /* ... fall through to other INSN processing ... */
381 if (GET_CODE (PATTERN (x
)) == COND_EXEC
)
382 /* In addition to the usual references, also consider all outputs
383 as referenced, to compensate for mark_set_resources treating
384 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
385 handling, execpt that we got a partial incidence instead of a partial
387 mark_set_resources (x
, res
, 0,
388 include_delayed_effects
389 ? MARK_SRC_DEST_CALL
: MARK_SRC_DEST
);
391 if (! include_delayed_effects
392 && INSN_REFERENCES_ARE_DELAYED (as_a
<rtx_insn
*> (x
)))
395 /* No special processing, just speed up. */
396 mark_referenced_resources (PATTERN (x
), res
, include_delayed_effects
);
403 /* Process each sub-expression and flag what it needs. */
404 format_ptr
= GET_RTX_FORMAT (code
);
405 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
406 switch (*format_ptr
++)
409 mark_referenced_resources (XEXP (x
, i
), res
, include_delayed_effects
);
413 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
414 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
415 include_delayed_effects
);
420 /* A subroutine of mark_target_live_regs. Search forward from TARGET
421 looking for registers that are set before they are used. These are dead.
422 Stop after passing a few conditional jumps, and/or a small
423 number of unconditional branches. */
426 find_dead_or_set_registers (rtx_insn
*target
, struct resources
*res
,
427 rtx
*jump_target
, int jump_count
,
428 struct resources set
, struct resources needed
)
430 HARD_REG_SET scratch
;
433 rtx_insn
*jump_insn
= 0;
436 for (insn
= target
; insn
; insn
= next_insn
)
438 rtx_insn
*this_insn
= insn
;
440 next_insn
= NEXT_INSN (insn
);
442 /* If this instruction can throw an exception, then we don't
443 know where we might end up next. That means that we have to
444 assume that whatever we have already marked as live really is
446 if (can_throw_internal (insn
))
449 switch (GET_CODE (insn
))
452 /* After a label, any pending dead registers that weren't yet
453 used can be made dead. */
454 pending_dead_regs
&= ~needed
.regs
;
455 res
->regs
&= ~pending_dead_regs
;
456 CLEAR_HARD_REG_SET (pending_dead_regs
);
466 if (GET_CODE (PATTERN (insn
)) == USE
)
468 /* If INSN is a USE made by update_block, we care about the
469 underlying insn. Any registers set by the underlying insn
470 are live since the insn is being done somewhere else. */
471 if (INSN_P (XEXP (PATTERN (insn
), 0)))
472 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0,
475 /* All other USE insns are to be ignored. */
478 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
480 else if (rtx_sequence
*seq
=
481 dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
483 /* An unconditional jump can be used to fill the delay slot
484 of a call, so search for a JUMP_INSN in any position. */
485 for (i
= 0; i
< seq
->len (); i
++)
487 this_insn
= seq
->insn (i
);
488 if (JUMP_P (this_insn
))
497 if (rtx_jump_insn
*this_jump_insn
=
498 dyn_cast
<rtx_jump_insn
*> (this_insn
))
500 if (jump_count
++ < 10)
502 if (any_uncondjump_p (this_jump_insn
)
503 || ANY_RETURN_P (PATTERN (this_jump_insn
)))
505 rtx lab_or_return
= this_jump_insn
->jump_label ();
506 if (ANY_RETURN_P (lab_or_return
))
509 next_insn
= as_a
<rtx_insn
*> (lab_or_return
);
514 *jump_target
= JUMP_LABEL (this_jump_insn
);
517 else if (any_condjump_p (this_jump_insn
))
519 struct resources target_set
, target_res
;
520 struct resources fallthrough_res
;
522 /* We can handle conditional branches here by following
523 both paths, and then IOR the results of the two paths
524 together, which will give us registers that are dead
525 on both paths. Since this is expensive, we give it
526 a much higher cost than unconditional branches. The
527 cost was chosen so that we will follow at most 1
528 conditional branch. */
531 if (jump_count
>= 10)
534 mark_referenced_resources (insn
, &needed
, true);
536 /* For an annulled branch, mark_set_resources ignores slots
537 filled by instructions from the target. This is correct
538 if the branch is not taken. Since we are following both
539 paths from the branch, we must also compute correct info
540 if the branch is taken. We do this by inverting all of
541 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
542 and then inverting the INSN_FROM_TARGET_P bits again. */
544 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
545 && INSN_ANNULLED_BRANCH_P (this_jump_insn
))
547 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
548 for (i
= 1; i
< seq
->len (); i
++)
549 INSN_FROM_TARGET_P (seq
->element (i
))
550 = ! INSN_FROM_TARGET_P (seq
->element (i
));
553 mark_set_resources (insn
, &target_set
, 0,
556 for (i
= 1; i
< seq
->len (); i
++)
557 INSN_FROM_TARGET_P (seq
->element (i
))
558 = ! INSN_FROM_TARGET_P (seq
->element (i
));
560 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
564 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
569 scratch
= target_set
.regs
& ~needed
.regs
;
570 target_res
.regs
&= ~scratch
;
572 fallthrough_res
= *res
;
573 scratch
= set
.regs
& ~needed
.regs
;
574 fallthrough_res
.regs
&= ~scratch
;
576 if (!ANY_RETURN_P (this_jump_insn
->jump_label ()))
577 find_dead_or_set_registers
578 (this_jump_insn
->jump_target (),
579 &target_res
, 0, jump_count
, target_set
, needed
);
580 find_dead_or_set_registers (next_insn
,
581 &fallthrough_res
, 0, jump_count
,
583 fallthrough_res
.regs
|= target_res
.regs
;
584 res
->regs
&= fallthrough_res
.regs
;
592 /* Don't try this optimization if we expired our jump count
593 above, since that would mean there may be an infinite loop
594 in the function being compiled. */
600 mark_referenced_resources (insn
, &needed
, true);
601 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
603 scratch
= set
.regs
& ~needed
.regs
;
604 res
->regs
&= ~scratch
;
610 /* Given X, a part of an insn, and a pointer to a `struct resource',
611 RES, indicate which resources are modified by the insn. If
612 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
613 set by the called routine.
615 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
616 objects are being referenced instead of set.
618 We never mark the insn as modifying the condition code unless it explicitly
619 SETs CC0 even though this is not totally correct. The reason for this is
620 that we require a SET of CC0 to immediately precede the reference to CC0.
621 So if some other insn sets CC0 as a side-effect, we know it cannot affect
622 our computation and thus may be placed in a delay slot. */
625 mark_set_resources (rtx x
, struct resources
*res
, int in_dest
,
626 enum mark_resource_type mark_type
)
631 const char *format_ptr
;
649 /* These don't set any resources. */
658 /* Called routine modifies the condition code, memory, any registers
659 that aren't saved across calls, global registers and anything
660 explicitly CLOBBERed immediately after the CALL_INSN. */
662 if (mark_type
== MARK_SRC_DEST_CALL
)
664 rtx_call_insn
*call_insn
= as_a
<rtx_call_insn
*> (x
);
667 res
->cc
= res
->memory
= 1;
669 res
->regs
|= insn_callee_abi (call_insn
).full_reg_clobbers ();
671 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
672 link
; link
= XEXP (link
, 1))
674 /* We could support CLOBBER_HIGH and treat it in the same way as
675 HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that
677 gcc_assert (GET_CODE (XEXP (link
, 0)) != CLOBBER_HIGH
);
678 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
679 mark_set_resources (SET_DEST (XEXP (link
, 0)), res
, 1,
683 /* Check for a REG_SETJMP. If it exists, then we must
684 assume that this call can clobber any register. */
685 if (find_reg_note (call_insn
, REG_SETJMP
, NULL
))
686 SET_HARD_REG_SET (res
->regs
);
689 /* ... and also what its RTL says it modifies, if anything. */
695 /* An insn consisting of just a CLOBBER (or USE) is just for flow
696 and doesn't actually do anything, so we ignore it. */
698 if (mark_type
!= MARK_SRC_DEST_CALL
699 && INSN_SETS_ARE_DELAYED (as_a
<rtx_insn
*> (x
)))
703 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
708 /* If the source of a SET is a CALL, this is actually done by
709 the called routine. So only include it if we are to include the
710 effects of the calling routine. */
712 mark_set_resources (SET_DEST (x
), res
,
713 (mark_type
== MARK_SRC_DEST_CALL
714 || GET_CODE (SET_SRC (x
)) != CALL
),
717 mark_set_resources (SET_SRC (x
), res
, 0, MARK_SRC_DEST
);
721 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
725 /* No current target supports both branch delay slots and CLOBBER_HIGH.
726 We'd need more elaborate liveness tracking to handle that
732 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (x
);
733 rtx control
= seq
->element (0);
734 bool annul_p
= JUMP_P (control
) && INSN_ANNULLED_BRANCH_P (control
);
736 mark_set_resources (control
, res
, 0, mark_type
);
737 for (i
= seq
->len () - 1; i
>= 0; --i
)
739 rtx elt
= seq
->element (i
);
740 if (!annul_p
&& INSN_FROM_TARGET_P (elt
))
741 mark_set_resources (elt
, res
, 0, mark_type
);
750 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
755 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
756 mark_set_resources (XEXP (XEXP (x
, 1), 0), res
, 0, MARK_SRC_DEST
);
757 mark_set_resources (XEXP (XEXP (x
, 1), 1), res
, 0, MARK_SRC_DEST
);
762 mark_set_resources (XEXP (x
, 0), res
, in_dest
, MARK_SRC_DEST
);
763 mark_set_resources (XEXP (x
, 1), res
, 0, MARK_SRC_DEST
);
764 mark_set_resources (XEXP (x
, 2), res
, 0, MARK_SRC_DEST
);
771 res
->volatil
|= MEM_VOLATILE_P (x
);
774 mark_set_resources (XEXP (x
, 0), res
, 0, MARK_SRC_DEST
);
780 if (!REG_P (SUBREG_REG (x
)))
781 mark_set_resources (SUBREG_REG (x
), res
, in_dest
, mark_type
);
784 unsigned int regno
= subreg_regno (x
);
785 unsigned int last_regno
= regno
+ subreg_nregs (x
);
787 gcc_assert (last_regno
<= FIRST_PSEUDO_REGISTER
);
788 for (r
= regno
; r
< last_regno
; r
++)
789 SET_HARD_REG_BIT (res
->regs
, r
);
797 gcc_assert (HARD_REGISTER_P (x
));
798 add_to_hard_reg_set (&res
->regs
, GET_MODE (x
), REGNO (x
));
802 case UNSPEC_VOLATILE
:
804 /* Traditional asm's are always volatile. */
813 res
->volatil
|= MEM_VOLATILE_P (x
);
815 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
816 We cannot just fall through here since then we would be confused
817 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
818 traditional asms unlike their normal usage. */
820 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
821 mark_set_resources (ASM_OPERANDS_INPUT (x
, i
), res
, in_dest
,
829 /* Process each sub-expression and flag what it needs. */
830 format_ptr
= GET_RTX_FORMAT (code
);
831 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
832 switch (*format_ptr
++)
835 mark_set_resources (XEXP (x
, i
), res
, in_dest
, mark_type
);
839 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
840 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
, mark_type
);
845 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
848 return_insn_p (const_rtx insn
)
850 if (JUMP_P (insn
) && ANY_RETURN_P (PATTERN (insn
)))
853 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
854 return return_insn_p (XVECEXP (PATTERN (insn
), 0, 0));
859 /* Set the resources that are live at TARGET.
861 If TARGET is zero, we refer to the end of the current function and can
862 return our precomputed value.
864 Otherwise, we try to find out what is live by consulting the basic block
865 information. This is tricky, because we must consider the actions of
866 reload and jump optimization, which occur after the basic block information
869 Accordingly, we proceed as follows::
871 We find the previous BARRIER and look at all immediately following labels
872 (with no intervening active insns) to see if any of them start a basic
873 block. If we hit the start of the function first, we use block 0.
875 Once we have found a basic block and a corresponding first insn, we can
876 accurately compute the live status (by starting at a label following a
877 BARRIER, we are immune to actions taken by reload and jump.) Then we
878 scan all insns between that point and our target. For each CLOBBER (or
879 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
880 registers are dead. For a SET, mark them as live.
882 We have to be careful when using REG_DEAD notes because they are not
883 updated by such things as find_equiv_reg. So keep track of registers
884 marked as dead that haven't been assigned to, and mark them dead at the
885 next CODE_LABEL since reload and jump won't propagate values across labels.
887 If we cannot find the start of a basic block (should be a very rare
888 case, if it can happen at all), mark everything as potentially live.
890 Next, scan forward from TARGET looking for things set or clobbered
891 before they are used. These are not live.
893 Because we can be called many times on the same target, save our results
894 in a hash table indexed by INSN_UID. This is only done if the function
895 init_resource_info () was invoked before we are called. */
898 mark_target_live_regs (rtx_insn
*insns
, rtx target_maybe_return
, struct resources
*res
)
902 struct target_info
*tinfo
= NULL
;
905 HARD_REG_SET scratch
;
906 struct resources set
, needed
;
908 /* Handle end of function. */
909 if (target_maybe_return
== 0 || ANY_RETURN_P (target_maybe_return
))
911 *res
= end_of_function_needs
;
915 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
917 rtx_insn
*target
= as_a
<rtx_insn
*> (target_maybe_return
);
919 /* Handle return insn. */
920 if (return_insn_p (target
))
922 *res
= end_of_function_needs
;
923 mark_referenced_resources (target
, res
, false);
927 /* We have to assume memory is needed, but the CC isn't. */
932 /* See if we have computed this value already. */
933 if (target_hash_table
!= NULL
)
935 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
936 tinfo
; tinfo
= tinfo
->next
)
937 if (tinfo
->uid
== INSN_UID (target
))
940 /* Start by getting the basic block number. If we have saved
941 information, we can get it from there unless the insn at the
942 start of the basic block has been deleted. */
943 if (tinfo
&& tinfo
->block
!= -1
944 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun
, tinfo
->block
))->deleted ())
949 b
= find_basic_block (target
, MAX_DELAY_SLOT_LIVE_SEARCH
);
951 if (target_hash_table
!= NULL
)
955 /* If the information is up-to-date, use it. Otherwise, we will
957 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
959 res
->regs
= tinfo
->live_regs
;
965 /* Allocate a place to put our results and chain it into the
967 tinfo
= XNEW (struct target_info
);
968 tinfo
->uid
= INSN_UID (target
);
971 = target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
972 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
976 CLEAR_HARD_REG_SET (pending_dead_regs
);
978 /* If we found a basic block, get the live registers from it and update
979 them with anything set or killed between its start and the insn before
980 TARGET; this custom life analysis is really about registers so we need
981 to use the LR problem. Otherwise, we must assume everything is live. */
984 regset regs_live
= DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun
, b
));
985 rtx_insn
*start_insn
, *stop_insn
;
988 /* Compute hard regs live at start of block. */
989 REG_SET_TO_HARD_REG_SET (current_live_regs
, regs_live
);
990 FOR_EACH_ARTIFICIAL_DEF (def
, b
)
991 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
992 SET_HARD_REG_BIT (current_live_regs
, DF_REF_REGNO (def
));
994 /* Get starting and ending insn, handling the case where each might
996 start_insn
= (b
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
->index
?
997 insns
: BB_HEAD (BASIC_BLOCK_FOR_FN (cfun
, b
)));
1000 if (NONJUMP_INSN_P (start_insn
)
1001 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
1002 start_insn
= as_a
<rtx_sequence
*> (PATTERN (start_insn
))->insn (0);
1004 if (NONJUMP_INSN_P (stop_insn
)
1005 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
1006 stop_insn
= next_insn (PREV_INSN (stop_insn
));
1008 for (insn
= start_insn
; insn
!= stop_insn
;
1009 insn
= next_insn_no_annul (insn
))
1012 rtx_insn
*real_insn
= insn
;
1013 enum rtx_code code
= GET_CODE (insn
);
1015 if (DEBUG_INSN_P (insn
))
1018 /* If this insn is from the target of a branch, it isn't going to
1019 be used in the sequel. If it is used in both cases, this
1020 test will not be true. */
1021 if ((code
== INSN
|| code
== JUMP_INSN
|| code
== CALL_INSN
)
1022 && INSN_FROM_TARGET_P (insn
))
1025 /* If this insn is a USE made by update_block, we care about the
1028 && GET_CODE (PATTERN (insn
)) == USE
1029 && INSN_P (XEXP (PATTERN (insn
), 0)))
1030 real_insn
= as_a
<rtx_insn
*> (XEXP (PATTERN (insn
), 0));
1032 if (CALL_P (real_insn
))
1034 /* Values in call-clobbered registers survive a COND_EXEC CALL
1035 if that is not executed; this matters for resoure use because
1036 they may be used by a complementarily (or more strictly)
1037 predicated instruction, or if the CALL is NORETURN. */
1038 if (GET_CODE (PATTERN (real_insn
)) != COND_EXEC
)
1040 HARD_REG_SET regs_invalidated_by_this_call
1041 = insn_callee_abi (real_insn
).full_reg_clobbers ();
1042 /* CALL clobbers all call-used regs that aren't fixed except
1043 sp, ap, and fp. Do this before setting the result of the
1045 current_live_regs
&= ~regs_invalidated_by_this_call
;
1048 /* A CALL_INSN sets any global register live, since it may
1049 have been modified by the call. */
1050 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1052 SET_HARD_REG_BIT (current_live_regs
, i
);
1055 /* Mark anything killed in an insn to be deadened at the next
1056 label. Ignore USE insns; the only REG_DEAD notes will be for
1057 parameters. But they might be early. A CALL_INSN will usually
1058 clobber registers used for parameters. It isn't worth bothering
1059 with the unlikely case when it won't. */
1060 if ((NONJUMP_INSN_P (real_insn
)
1061 && GET_CODE (PATTERN (real_insn
)) != USE
1062 && GET_CODE (PATTERN (real_insn
)) != CLOBBER
)
1063 || JUMP_P (real_insn
)
1064 || CALL_P (real_insn
))
1066 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
1067 if (REG_NOTE_KIND (link
) == REG_DEAD
1068 && REG_P (XEXP (link
, 0))
1069 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
1070 add_to_hard_reg_set (&pending_dead_regs
,
1071 GET_MODE (XEXP (link
, 0)),
1072 REGNO (XEXP (link
, 0)));
1074 note_stores (real_insn
, update_live_status
, NULL
);
1076 /* If any registers were unused after this insn, kill them.
1077 These notes will always be accurate. */
1078 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
1079 if (REG_NOTE_KIND (link
) == REG_UNUSED
1080 && REG_P (XEXP (link
, 0))
1081 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
1082 remove_from_hard_reg_set (¤t_live_regs
,
1083 GET_MODE (XEXP (link
, 0)),
1084 REGNO (XEXP (link
, 0)));
1087 else if (LABEL_P (real_insn
))
1091 /* A label clobbers the pending dead registers since neither
1092 reload nor jump will propagate a value across a label. */
1093 current_live_regs
&= ~pending_dead_regs
;
1094 CLEAR_HARD_REG_SET (pending_dead_regs
);
1096 /* We must conservatively assume that all registers that used
1097 to be live here still are. The fallthrough edge may have
1098 left a live register uninitialized. */
1099 bb
= BLOCK_FOR_INSN (real_insn
);
1102 HARD_REG_SET extra_live
;
1104 REG_SET_TO_HARD_REG_SET (extra_live
, DF_LR_IN (bb
));
1105 current_live_regs
|= extra_live
;
1109 /* The beginning of the epilogue corresponds to the end of the
1110 RTL chain when there are no epilogue insns. Certain resources
1111 are implicitly required at that point. */
1112 else if (NOTE_P (real_insn
)
1113 && NOTE_KIND (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
1114 current_live_regs
|= start_of_epilogue_needs
.regs
;
1117 res
->regs
= current_live_regs
;
1121 tinfo
->bb_tick
= bb_ticks
[b
];
1125 /* We didn't find the start of a basic block. Assume everything
1126 in use. This should happen only extremely rarely. */
1127 SET_HARD_REG_SET (res
->regs
);
1129 CLEAR_RESOURCE (&set
);
1130 CLEAR_RESOURCE (&needed
);
1132 rtx_insn
*jump_insn
= find_dead_or_set_registers (target
, res
, &jump_target
,
1135 /* If we hit an unconditional branch, we have another way of finding out
1136 what is live: we can see what is live at the branch target and include
1137 anything used but not set before the branch. We add the live
1138 resources found using the test below to those found until now. */
1142 struct resources new_resources
;
1143 rtx_insn
*stop_insn
= next_active_insn (jump_insn
);
1145 if (!ANY_RETURN_P (jump_target
))
1146 jump_target
= next_active_insn (as_a
<rtx_insn
*> (jump_target
));
1147 mark_target_live_regs (insns
, jump_target
, &new_resources
);
1148 CLEAR_RESOURCE (&set
);
1149 CLEAR_RESOURCE (&needed
);
1151 /* Include JUMP_INSN in the needed registers. */
1152 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
1154 mark_referenced_resources (insn
, &needed
, true);
1156 scratch
= needed
.regs
& ~set
.regs
;
1157 new_resources
.regs
|= scratch
;
1159 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1162 res
->regs
|= new_resources
.regs
;
1166 tinfo
->live_regs
= res
->regs
;
1169 /* Initialize the resources required by mark_target_live_regs ().
1170 This should be invoked before the first call to mark_target_live_regs. */
1173 init_resource_info (rtx_insn
*epilogue_insn
)
1178 /* Indicate what resources are required to be valid at the end of the current
1179 function. The condition code never is and memory always is.
1180 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1181 and there is an epilogue that restores the original stack pointer
1182 from the frame pointer. Registers used to return the function value
1183 are needed. Registers holding global variables are needed. */
1185 end_of_function_needs
.cc
= 0;
1186 end_of_function_needs
.memory
= 1;
1187 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
1189 if (frame_pointer_needed
)
1191 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
1192 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
)
1193 SET_HARD_REG_BIT (end_of_function_needs
.regs
,
1194 HARD_FRAME_POINTER_REGNUM
);
1196 if (!(frame_pointer_needed
1197 && EXIT_IGNORE_STACK
1199 && !crtl
->sp_is_unchanging
))
1200 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
1202 if (crtl
->return_rtx
!= 0)
1203 mark_referenced_resources (crtl
->return_rtx
,
1204 &end_of_function_needs
, true);
1206 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1207 if (global_regs
[i
] || EPILOGUE_USES (i
))
1208 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
1210 /* The registers required to be live at the end of the function are
1211 represented in the flow information as being dead just prior to
1212 reaching the end of the function. For example, the return of a value
1213 might be represented by a USE of the return register immediately
1214 followed by an unconditional jump to the return label where the
1215 return label is the end of the RTL chain. The end of the RTL chain
1216 is then taken to mean that the return register is live.
1218 This sequence is no longer maintained when epilogue instructions are
1219 added to the RTL chain. To reconstruct the original meaning, the
1220 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1221 point where these registers become live (start_of_epilogue_needs).
1222 If epilogue instructions are present, the registers set by those
1223 instructions won't have been processed by flow. Thus, those
1224 registers are additionally required at the end of the RTL chain
1225 (end_of_function_needs). */
1227 start_of_epilogue_needs
= end_of_function_needs
;
1229 while ((epilogue_insn
= next_nonnote_insn (epilogue_insn
)))
1231 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0,
1232 MARK_SRC_DEST_CALL
);
1233 if (return_insn_p (epilogue_insn
))
1237 /* Allocate and initialize the tables used by mark_target_live_regs. */
1238 target_hash_table
= XCNEWVEC (struct target_info
*, TARGET_HASH_PRIME
);
1239 bb_ticks
= XCNEWVEC (int, last_basic_block_for_fn (cfun
));
1241 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1242 FOR_EACH_BB_FN (bb
, cfun
)
1243 if (LABEL_P (BB_HEAD (bb
)))
1244 BLOCK_FOR_INSN (BB_HEAD (bb
)) = bb
;
1247 /* Free up the resources allocated to mark_target_live_regs (). This
1248 should be invoked after the last call to mark_target_live_regs (). */
1251 free_resource_info (void)
1255 if (target_hash_table
!= NULL
)
1259 for (i
= 0; i
< TARGET_HASH_PRIME
; ++i
)
1261 struct target_info
*ti
= target_hash_table
[i
];
1265 struct target_info
*next
= ti
->next
;
1271 free (target_hash_table
);
1272 target_hash_table
= NULL
;
1275 if (bb_ticks
!= NULL
)
1281 FOR_EACH_BB_FN (bb
, cfun
)
1282 if (LABEL_P (BB_HEAD (bb
)))
1283 BLOCK_FOR_INSN (BB_HEAD (bb
)) = NULL
;
1286 /* Clear any hashed information that we have stored for INSN. */
1289 clear_hashed_info_for_insn (rtx_insn
*insn
)
1291 struct target_info
*tinfo
;
1293 if (target_hash_table
!= NULL
)
1295 for (tinfo
= target_hash_table
[INSN_UID (insn
) % TARGET_HASH_PRIME
];
1296 tinfo
; tinfo
= tinfo
->next
)
1297 if (tinfo
->uid
== INSN_UID (insn
))
1305 /* Increment the tick count for the basic block that contains INSN. */
1308 incr_ticks_for_insn (rtx_insn
*insn
)
1310 int b
= find_basic_block (insn
, MAX_DELAY_SLOT_LIVE_SEARCH
);
1316 /* Add TRIAL to the set of resources used at the end of the current
1319 mark_end_of_function_resources (rtx trial
, bool include_delayed_effects
)
1321 mark_referenced_resources (trial
, &end_of_function_needs
,
1322 include_delayed_effects
);