1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "diagnostic-core.h"
27 #include "hard-reg-set.h"
37 #include "dominance.h"
40 #include "basic-block.h"
43 #include "insn-attr.h"
47 /* This structure is used to record liveness information at the targets or
48 fallthrough insns of branches. We will most likely need the information
49 at targets again, so save them in a hash table rather than recomputing them
54 int uid
; /* INSN_UID of target. */
55 struct target_info
*next
; /* Next info for same hash bucket. */
56 HARD_REG_SET live_regs
; /* Registers live at target. */
57 int block
; /* Basic block number containing target. */
58 int bb_tick
; /* Generation count of basic block info. */
61 #define TARGET_HASH_PRIME 257
63 /* Indicates what resources are required at the beginning of the epilogue. */
64 static struct resources start_of_epilogue_needs
;
66 /* Indicates what resources are required at function end. */
67 static struct resources end_of_function_needs
;
69 /* Define the hash table itself. */
70 static struct target_info
**target_hash_table
= NULL
;
72 /* For each basic block, we maintain a generation number of its basic
73 block info, which is updated each time we move an insn from the
74 target of a jump. This is the generation number indexed by block
79 /* Marks registers possibly live at the current place being scanned by
80 mark_target_live_regs. Also used by update_live_status. */
82 static HARD_REG_SET current_live_regs
;
84 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
85 Also only used by the next two functions. */
87 static HARD_REG_SET pending_dead_regs
;
89 static void update_live_status (rtx
, const_rtx
, void *);
90 static int find_basic_block (rtx_insn
*, int);
91 static rtx_insn
*next_insn_no_annul (rtx_insn
*);
92 static rtx_insn
*find_dead_or_set_registers (rtx_insn
*, struct resources
*,
93 rtx
*, int, struct resources
,
96 /* Utility function called from mark_target_live_regs via note_stores.
97 It deadens any CLOBBERed registers and livens any SET registers. */
100 update_live_status (rtx dest
, const_rtx x
, void *data ATTRIBUTE_UNUSED
)
102 int first_regno
, last_regno
;
106 && (GET_CODE (dest
) != SUBREG
|| !REG_P (SUBREG_REG (dest
))))
109 if (GET_CODE (dest
) == SUBREG
)
111 first_regno
= subreg_regno (dest
);
112 last_regno
= first_regno
+ subreg_nregs (dest
);
117 first_regno
= REGNO (dest
);
118 last_regno
= END_REGNO (dest
);
121 if (GET_CODE (x
) == CLOBBER
)
122 for (i
= first_regno
; i
< last_regno
; i
++)
123 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
125 for (i
= first_regno
; i
< last_regno
; i
++)
127 SET_HARD_REG_BIT (current_live_regs
, i
);
128 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
132 /* Find the number of the basic block with correct live register
133 information that starts closest to INSN. Return -1 if we couldn't
134 find such a basic block or the beginning is more than
135 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
138 The delay slot filling code destroys the control-flow graph so,
139 instead of finding the basic block containing INSN, we search
140 backwards toward a BARRIER where the live register information is
144 find_basic_block (rtx_insn
*insn
, int search_limit
)
146 /* Scan backwards to the previous BARRIER. Then see if we can find a
147 label that starts a basic block. Return the basic block number. */
148 for (insn
= prev_nonnote_insn (insn
);
149 insn
&& !BARRIER_P (insn
) && search_limit
!= 0;
150 insn
= prev_nonnote_insn (insn
), --search_limit
)
153 /* The closest BARRIER is too far away. */
154 if (search_limit
== 0)
157 /* The start of the function. */
159 return ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
->index
;
161 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
162 anything other than a CODE_LABEL or note, we can't find this code. */
163 for (insn
= next_nonnote_insn (insn
);
164 insn
&& LABEL_P (insn
);
165 insn
= next_nonnote_insn (insn
))
166 if (BLOCK_FOR_INSN (insn
))
167 return BLOCK_FOR_INSN (insn
)->index
;
172 /* Similar to next_insn, but ignores insns in the delay slots of
173 an annulled branch. */
176 next_insn_no_annul (rtx_insn
*insn
)
180 /* If INSN is an annulled branch, skip any insns from the target
183 && INSN_ANNULLED_BRANCH_P (insn
)
184 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
186 rtx_insn
*next
= NEXT_INSN (insn
);
188 while ((NONJUMP_INSN_P (next
) || JUMP_P (next
) || CALL_P (next
))
189 && INSN_FROM_TARGET_P (next
))
192 next
= NEXT_INSN (insn
);
196 insn
= NEXT_INSN (insn
);
197 if (insn
&& NONJUMP_INSN_P (insn
)
198 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
199 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
205 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
206 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
207 is TRUE, resources used by the called routine will be included for
211 mark_referenced_resources (rtx x
, struct resources
*res
,
212 bool include_delayed_effects
)
214 enum rtx_code code
= GET_CODE (x
);
217 const char *format_ptr
;
219 /* Handle leaf items for which we set resource flags. Also, special-case
220 CALL, SET and CLOBBER operators. */
231 if (!REG_P (SUBREG_REG (x
)))
232 mark_referenced_resources (SUBREG_REG (x
), res
, false);
235 unsigned int regno
= subreg_regno (x
);
236 unsigned int last_regno
= regno
+ subreg_nregs (x
);
238 gcc_assert (last_regno
<= FIRST_PSEUDO_REGISTER
);
239 for (r
= regno
; r
< last_regno
; r
++)
240 SET_HARD_REG_BIT (res
->regs
, r
);
245 gcc_assert (HARD_REGISTER_P (x
));
246 add_to_hard_reg_set (&res
->regs
, GET_MODE (x
), REGNO (x
));
250 /* If this memory shouldn't change, it really isn't referencing
252 if (! MEM_READONLY_P (x
))
254 res
->volatil
|= MEM_VOLATILE_P (x
);
256 /* Mark registers used to access memory. */
257 mark_referenced_resources (XEXP (x
, 0), res
, false);
264 case UNSPEC_VOLATILE
:
267 /* Traditional asm's are always volatile. */
272 res
->volatil
|= MEM_VOLATILE_P (x
);
274 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
275 We can not just fall through here since then we would be confused
276 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
277 traditional asms unlike their normal usage. */
279 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
280 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, false);
284 /* The first operand will be a (MEM (xxx)) but doesn't really reference
285 memory. The second operand may be referenced, though. */
286 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, false);
287 mark_referenced_resources (XEXP (x
, 1), res
, false);
291 /* Usually, the first operand of SET is set, not referenced. But
292 registers used to access memory are referenced. SET_DEST is
293 also referenced if it is a ZERO_EXTRACT. */
295 mark_referenced_resources (SET_SRC (x
), res
, false);
298 if (GET_CODE (x
) == ZERO_EXTRACT
299 || GET_CODE (x
) == STRICT_LOW_PART
)
300 mark_referenced_resources (x
, res
, false);
301 else if (GET_CODE (x
) == SUBREG
)
304 mark_referenced_resources (XEXP (x
, 0), res
, false);
311 if (include_delayed_effects
)
313 /* A CALL references memory, the frame pointer if it exists, the
314 stack pointer, any global registers and any registers given in
315 USE insns immediately in front of the CALL.
317 However, we may have moved some of the parameter loading insns
318 into the delay slot of this CALL. If so, the USE's for them
319 don't count and should be skipped. */
320 rtx_insn
*insn
= PREV_INSN (as_a
<rtx_insn
*> (x
));
321 rtx_sequence
*sequence
= 0;
325 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
326 if (NEXT_INSN (insn
) != x
)
328 sequence
= as_a
<rtx_sequence
*> (PATTERN (NEXT_INSN (insn
)));
329 seq_size
= sequence
->len ();
330 gcc_assert (GET_CODE (sequence
) == SEQUENCE
);
334 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
335 if (frame_pointer_needed
)
337 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
338 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
)
339 SET_HARD_REG_BIT (res
->regs
, HARD_FRAME_POINTER_REGNUM
);
342 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
344 SET_HARD_REG_BIT (res
->regs
, i
);
346 /* Check for a REG_SETJMP. If it exists, then we must
347 assume that this call can need any register.
349 This is done to be more conservative about how we handle setjmp.
350 We assume that they both use and set all registers. Using all
351 registers ensures that a register will not be considered dead
352 just because it crosses a setjmp call. A register should be
353 considered dead only if the setjmp call returns nonzero. */
354 if (find_reg_note (x
, REG_SETJMP
, NULL
))
355 SET_HARD_REG_SET (res
->regs
);
360 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
362 link
= XEXP (link
, 1))
363 if (GET_CODE (XEXP (link
, 0)) == USE
)
365 for (i
= 1; i
< seq_size
; i
++)
367 rtx slot_pat
= PATTERN (sequence
->element (i
));
368 if (GET_CODE (slot_pat
) == SET
369 && rtx_equal_p (SET_DEST (slot_pat
),
370 XEXP (XEXP (link
, 0), 0)))
374 mark_referenced_resources (XEXP (XEXP (link
, 0), 0),
380 /* ... fall through to other INSN processing ... */
385 if (GET_CODE (PATTERN (x
)) == COND_EXEC
)
386 /* In addition to the usual references, also consider all outputs
387 as referenced, to compensate for mark_set_resources treating
388 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
389 handling, execpt that we got a partial incidence instead of a partial
391 mark_set_resources (x
, res
, 0,
392 include_delayed_effects
393 ? MARK_SRC_DEST_CALL
: MARK_SRC_DEST
);
395 if (! include_delayed_effects
396 && INSN_REFERENCES_ARE_DELAYED (as_a
<rtx_insn
*> (x
)))
399 /* No special processing, just speed up. */
400 mark_referenced_resources (PATTERN (x
), res
, include_delayed_effects
);
407 /* Process each sub-expression and flag what it needs. */
408 format_ptr
= GET_RTX_FORMAT (code
);
409 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
410 switch (*format_ptr
++)
413 mark_referenced_resources (XEXP (x
, i
), res
, include_delayed_effects
);
417 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
418 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
419 include_delayed_effects
);
424 /* A subroutine of mark_target_live_regs. Search forward from TARGET
425 looking for registers that are set before they are used. These are dead.
426 Stop after passing a few conditional jumps, and/or a small
427 number of unconditional branches. */
430 find_dead_or_set_registers (rtx_insn
*target
, struct resources
*res
,
431 rtx
*jump_target
, int jump_count
,
432 struct resources set
, struct resources needed
)
434 HARD_REG_SET scratch
;
437 rtx_insn
*jump_insn
= 0;
440 for (insn
= target
; insn
; insn
= next_insn
)
442 rtx_insn
*this_insn
= insn
;
444 next_insn
= NEXT_INSN (insn
);
446 /* If this instruction can throw an exception, then we don't
447 know where we might end up next. That means that we have to
448 assume that whatever we have already marked as live really is
450 if (can_throw_internal (insn
))
453 switch (GET_CODE (insn
))
456 /* After a label, any pending dead registers that weren't yet
457 used can be made dead. */
458 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
459 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
460 CLEAR_HARD_REG_SET (pending_dead_regs
);
469 if (GET_CODE (PATTERN (insn
)) == USE
)
471 /* If INSN is a USE made by update_block, we care about the
472 underlying insn. Any registers set by the underlying insn
473 are live since the insn is being done somewhere else. */
474 if (INSN_P (XEXP (PATTERN (insn
), 0)))
475 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0,
478 /* All other USE insns are to be ignored. */
481 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
483 else if (rtx_sequence
*seq
=
484 dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
486 /* An unconditional jump can be used to fill the delay slot
487 of a call, so search for a JUMP_INSN in any position. */
488 for (i
= 0; i
< seq
->len (); i
++)
490 this_insn
= seq
->insn (i
);
491 if (JUMP_P (this_insn
))
500 if (rtx_jump_insn
*this_jump_insn
=
501 dyn_cast
<rtx_jump_insn
*> (this_insn
))
503 if (jump_count
++ < 10)
505 if (any_uncondjump_p (this_jump_insn
)
506 || ANY_RETURN_P (PATTERN (this_jump_insn
)))
508 rtx lab_or_return
= this_jump_insn
->jump_label ();
509 if (ANY_RETURN_P (lab_or_return
))
512 next_insn
= as_a
<rtx_insn
*> (lab_or_return
);
517 *jump_target
= JUMP_LABEL (this_jump_insn
);
520 else if (any_condjump_p (this_jump_insn
))
522 struct resources target_set
, target_res
;
523 struct resources fallthrough_res
;
525 /* We can handle conditional branches here by following
526 both paths, and then IOR the results of the two paths
527 together, which will give us registers that are dead
528 on both paths. Since this is expensive, we give it
529 a much higher cost than unconditional branches. The
530 cost was chosen so that we will follow at most 1
531 conditional branch. */
534 if (jump_count
>= 10)
537 mark_referenced_resources (insn
, &needed
, true);
539 /* For an annulled branch, mark_set_resources ignores slots
540 filled by instructions from the target. This is correct
541 if the branch is not taken. Since we are following both
542 paths from the branch, we must also compute correct info
543 if the branch is taken. We do this by inverting all of
544 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
545 and then inverting the INSN_FROM_TARGET_P bits again. */
547 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
548 && INSN_ANNULLED_BRANCH_P (this_jump_insn
))
550 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
551 for (i
= 1; i
< seq
->len (); i
++)
552 INSN_FROM_TARGET_P (seq
->element (i
))
553 = ! INSN_FROM_TARGET_P (seq
->element (i
));
556 mark_set_resources (insn
, &target_set
, 0,
559 for (i
= 1; i
< seq
->len (); i
++)
560 INSN_FROM_TARGET_P (seq
->element (i
))
561 = ! INSN_FROM_TARGET_P (seq
->element (i
));
563 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
567 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
572 COPY_HARD_REG_SET (scratch
, target_set
.regs
);
573 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
574 AND_COMPL_HARD_REG_SET (target_res
.regs
, scratch
);
576 fallthrough_res
= *res
;
577 COPY_HARD_REG_SET (scratch
, set
.regs
);
578 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
579 AND_COMPL_HARD_REG_SET (fallthrough_res
.regs
, scratch
);
581 if (!ANY_RETURN_P (this_jump_insn
->jump_label ()))
582 find_dead_or_set_registers
583 (this_jump_insn
->jump_target (),
584 &target_res
, 0, jump_count
, target_set
, needed
);
585 find_dead_or_set_registers (next_insn
,
586 &fallthrough_res
, 0, jump_count
,
588 IOR_HARD_REG_SET (fallthrough_res
.regs
, target_res
.regs
);
589 AND_HARD_REG_SET (res
->regs
, fallthrough_res
.regs
);
597 /* Don't try this optimization if we expired our jump count
598 above, since that would mean there may be an infinite loop
599 in the function being compiled. */
605 mark_referenced_resources (insn
, &needed
, true);
606 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
608 COPY_HARD_REG_SET (scratch
, set
.regs
);
609 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
610 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
616 /* Given X, a part of an insn, and a pointer to a `struct resource',
617 RES, indicate which resources are modified by the insn. If
618 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
619 set by the called routine.
621 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
622 objects are being referenced instead of set.
624 We never mark the insn as modifying the condition code unless it explicitly
625 SETs CC0 even though this is not totally correct. The reason for this is
626 that we require a SET of CC0 to immediately precede the reference to CC0.
627 So if some other insn sets CC0 as a side-effect, we know it cannot affect
628 our computation and thus may be placed in a delay slot. */
631 mark_set_resources (rtx x
, struct resources
*res
, int in_dest
,
632 enum mark_resource_type mark_type
)
637 const char *format_ptr
;
654 /* These don't set any resources. */
663 /* Called routine modifies the condition code, memory, any registers
664 that aren't saved across calls, global registers and anything
665 explicitly CLOBBERed immediately after the CALL_INSN. */
667 if (mark_type
== MARK_SRC_DEST_CALL
)
669 rtx_call_insn
*call_insn
= as_a
<rtx_call_insn
*> (x
);
673 res
->cc
= res
->memory
= 1;
675 get_call_reg_set_usage (call_insn
, ®s
, regs_invalidated_by_call
);
676 IOR_HARD_REG_SET (res
->regs
, regs
);
678 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
679 link
; link
= XEXP (link
, 1))
680 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
681 mark_set_resources (SET_DEST (XEXP (link
, 0)), res
, 1,
684 /* Check for a REG_SETJMP. If it exists, then we must
685 assume that this call can clobber any register. */
686 if (find_reg_note (call_insn
, REG_SETJMP
, NULL
))
687 SET_HARD_REG_SET (res
->regs
);
690 /* ... and also what its RTL says it modifies, if anything. */
695 /* An insn consisting of just a CLOBBER (or USE) is just for flow
696 and doesn't actually do anything, so we ignore it. */
698 if (mark_type
!= MARK_SRC_DEST_CALL
699 && INSN_SETS_ARE_DELAYED (as_a
<rtx_insn
*> (x
)))
703 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
708 /* If the source of a SET is a CALL, this is actually done by
709 the called routine. So only include it if we are to include the
710 effects of the calling routine. */
712 mark_set_resources (SET_DEST (x
), res
,
713 (mark_type
== MARK_SRC_DEST_CALL
714 || GET_CODE (SET_SRC (x
)) != CALL
),
717 mark_set_resources (SET_SRC (x
), res
, 0, MARK_SRC_DEST
);
721 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
726 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (x
);
727 rtx control
= seq
->element (0);
728 bool annul_p
= JUMP_P (control
) && INSN_ANNULLED_BRANCH_P (control
);
730 mark_set_resources (control
, res
, 0, mark_type
);
731 for (i
= seq
->len () - 1; i
>= 0; --i
)
733 rtx elt
= seq
->element (i
);
734 if (!annul_p
&& INSN_FROM_TARGET_P (elt
))
735 mark_set_resources (elt
, res
, 0, mark_type
);
744 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
749 mark_set_resources (XEXP (x
, 0), res
, 1, MARK_SRC_DEST
);
750 mark_set_resources (XEXP (XEXP (x
, 1), 0), res
, 0, MARK_SRC_DEST
);
751 mark_set_resources (XEXP (XEXP (x
, 1), 1), res
, 0, MARK_SRC_DEST
);
756 mark_set_resources (XEXP (x
, 0), res
, in_dest
, MARK_SRC_DEST
);
757 mark_set_resources (XEXP (x
, 1), res
, 0, MARK_SRC_DEST
);
758 mark_set_resources (XEXP (x
, 2), res
, 0, MARK_SRC_DEST
);
765 res
->volatil
|= MEM_VOLATILE_P (x
);
768 mark_set_resources (XEXP (x
, 0), res
, 0, MARK_SRC_DEST
);
774 if (!REG_P (SUBREG_REG (x
)))
775 mark_set_resources (SUBREG_REG (x
), res
, in_dest
, mark_type
);
778 unsigned int regno
= subreg_regno (x
);
779 unsigned int last_regno
= regno
+ subreg_nregs (x
);
781 gcc_assert (last_regno
<= FIRST_PSEUDO_REGISTER
);
782 for (r
= regno
; r
< last_regno
; r
++)
783 SET_HARD_REG_BIT (res
->regs
, r
);
791 gcc_assert (HARD_REGISTER_P (x
));
792 add_to_hard_reg_set (&res
->regs
, GET_MODE (x
), REGNO (x
));
796 case UNSPEC_VOLATILE
:
798 /* Traditional asm's are always volatile. */
807 res
->volatil
|= MEM_VOLATILE_P (x
);
809 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
810 We can not just fall through here since then we would be confused
811 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
812 traditional asms unlike their normal usage. */
814 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
815 mark_set_resources (ASM_OPERANDS_INPUT (x
, i
), res
, in_dest
,
823 /* Process each sub-expression and flag what it needs. */
824 format_ptr
= GET_RTX_FORMAT (code
);
825 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
826 switch (*format_ptr
++)
829 mark_set_resources (XEXP (x
, i
), res
, in_dest
, mark_type
);
833 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
834 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
, mark_type
);
839 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
842 return_insn_p (const_rtx insn
)
844 if (JUMP_P (insn
) && ANY_RETURN_P (PATTERN (insn
)))
847 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
848 return return_insn_p (XVECEXP (PATTERN (insn
), 0, 0));
853 /* Set the resources that are live at TARGET.
855 If TARGET is zero, we refer to the end of the current function and can
856 return our precomputed value.
858 Otherwise, we try to find out what is live by consulting the basic block
859 information. This is tricky, because we must consider the actions of
860 reload and jump optimization, which occur after the basic block information
863 Accordingly, we proceed as follows::
865 We find the previous BARRIER and look at all immediately following labels
866 (with no intervening active insns) to see if any of them start a basic
867 block. If we hit the start of the function first, we use block 0.
869 Once we have found a basic block and a corresponding first insn, we can
870 accurately compute the live status (by starting at a label following a
871 BARRIER, we are immune to actions taken by reload and jump.) Then we
872 scan all insns between that point and our target. For each CLOBBER (or
873 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
874 registers are dead. For a SET, mark them as live.
876 We have to be careful when using REG_DEAD notes because they are not
877 updated by such things as find_equiv_reg. So keep track of registers
878 marked as dead that haven't been assigned to, and mark them dead at the
879 next CODE_LABEL since reload and jump won't propagate values across labels.
881 If we cannot find the start of a basic block (should be a very rare
882 case, if it can happen at all), mark everything as potentially live.
884 Next, scan forward from TARGET looking for things set or clobbered
885 before they are used. These are not live.
887 Because we can be called many times on the same target, save our results
888 in a hash table indexed by INSN_UID. This is only done if the function
889 init_resource_info () was invoked before we are called. */
892 mark_target_live_regs (rtx_insn
*insns
, rtx target_maybe_return
, struct resources
*res
)
896 struct target_info
*tinfo
= NULL
;
900 HARD_REG_SET scratch
;
901 struct resources set
, needed
;
903 /* Handle end of function. */
904 if (target_maybe_return
== 0 || ANY_RETURN_P (target_maybe_return
))
906 *res
= end_of_function_needs
;
910 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
912 rtx_insn
*target
= as_a
<rtx_insn
*> (target_maybe_return
);
914 /* Handle return insn. */
915 if (return_insn_p (target
))
917 *res
= end_of_function_needs
;
918 mark_referenced_resources (target
, res
, false);
922 /* We have to assume memory is needed, but the CC isn't. */
927 /* See if we have computed this value already. */
928 if (target_hash_table
!= NULL
)
930 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
931 tinfo
; tinfo
= tinfo
->next
)
932 if (tinfo
->uid
== INSN_UID (target
))
935 /* Start by getting the basic block number. If we have saved
936 information, we can get it from there unless the insn at the
937 start of the basic block has been deleted. */
938 if (tinfo
&& tinfo
->block
!= -1
939 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun
, tinfo
->block
))->deleted ())
944 b
= find_basic_block (target
, MAX_DELAY_SLOT_LIVE_SEARCH
);
946 if (target_hash_table
!= NULL
)
950 /* If the information is up-to-date, use it. Otherwise, we will
952 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
954 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
960 /* Allocate a place to put our results and chain it into the
962 tinfo
= XNEW (struct target_info
);
963 tinfo
->uid
= INSN_UID (target
);
966 = target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
967 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
971 CLEAR_HARD_REG_SET (pending_dead_regs
);
973 /* If we found a basic block, get the live registers from it and update
974 them with anything set or killed between its start and the insn before
975 TARGET; this custom life analysis is really about registers so we need
976 to use the LR problem. Otherwise, we must assume everything is live. */
979 regset regs_live
= DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun
, b
));
980 rtx_insn
*start_insn
, *stop_insn
;
982 /* Compute hard regs live at start of block. */
983 REG_SET_TO_HARD_REG_SET (current_live_regs
, regs_live
);
985 /* Get starting and ending insn, handling the case where each might
987 start_insn
= (b
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
->index
?
988 insns
: BB_HEAD (BASIC_BLOCK_FOR_FN (cfun
, b
)));
991 if (NONJUMP_INSN_P (start_insn
)
992 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
993 start_insn
= as_a
<rtx_sequence
*> (PATTERN (start_insn
))->insn (0);
995 if (NONJUMP_INSN_P (stop_insn
)
996 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
997 stop_insn
= next_insn (PREV_INSN (stop_insn
));
999 for (insn
= start_insn
; insn
!= stop_insn
;
1000 insn
= next_insn_no_annul (insn
))
1003 rtx_insn
*real_insn
= insn
;
1004 enum rtx_code code
= GET_CODE (insn
);
1006 if (DEBUG_INSN_P (insn
))
1009 /* If this insn is from the target of a branch, it isn't going to
1010 be used in the sequel. If it is used in both cases, this
1011 test will not be true. */
1012 if ((code
== INSN
|| code
== JUMP_INSN
|| code
== CALL_INSN
)
1013 && INSN_FROM_TARGET_P (insn
))
1016 /* If this insn is a USE made by update_block, we care about the
1019 && GET_CODE (PATTERN (insn
)) == USE
1020 && INSN_P (XEXP (PATTERN (insn
), 0)))
1021 real_insn
= as_a
<rtx_insn
*> (XEXP (PATTERN (insn
), 0));
1023 if (CALL_P (real_insn
))
1025 /* Values in call-clobbered registers survive a COND_EXEC CALL
1026 if that is not executed; this matters for resoure use because
1027 they may be used by a complementarily (or more strictly)
1028 predicated instruction, or if the CALL is NORETURN. */
1029 if (GET_CODE (PATTERN (real_insn
)) != COND_EXEC
)
1031 HARD_REG_SET regs_invalidated_by_this_call
;
1032 get_call_reg_set_usage (real_insn
,
1033 ®s_invalidated_by_this_call
,
1034 regs_invalidated_by_call
);
1035 /* CALL clobbers all call-used regs that aren't fixed except
1036 sp, ap, and fp. Do this before setting the result of the
1038 AND_COMPL_HARD_REG_SET (current_live_regs
,
1039 regs_invalidated_by_this_call
);
1042 /* A CALL_INSN sets any global register live, since it may
1043 have been modified by the call. */
1044 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1046 SET_HARD_REG_BIT (current_live_regs
, i
);
1049 /* Mark anything killed in an insn to be deadened at the next
1050 label. Ignore USE insns; the only REG_DEAD notes will be for
1051 parameters. But they might be early. A CALL_INSN will usually
1052 clobber registers used for parameters. It isn't worth bothering
1053 with the unlikely case when it won't. */
1054 if ((NONJUMP_INSN_P (real_insn
)
1055 && GET_CODE (PATTERN (real_insn
)) != USE
1056 && GET_CODE (PATTERN (real_insn
)) != CLOBBER
)
1057 || JUMP_P (real_insn
)
1058 || CALL_P (real_insn
))
1060 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
1061 if (REG_NOTE_KIND (link
) == REG_DEAD
1062 && REG_P (XEXP (link
, 0))
1063 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
1064 add_to_hard_reg_set (&pending_dead_regs
,
1065 GET_MODE (XEXP (link
, 0)),
1066 REGNO (XEXP (link
, 0)));
1068 note_stores (PATTERN (real_insn
), update_live_status
, NULL
);
1070 /* If any registers were unused after this insn, kill them.
1071 These notes will always be accurate. */
1072 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
1073 if (REG_NOTE_KIND (link
) == REG_UNUSED
1074 && REG_P (XEXP (link
, 0))
1075 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
1076 remove_from_hard_reg_set (¤t_live_regs
,
1077 GET_MODE (XEXP (link
, 0)),
1078 REGNO (XEXP (link
, 0)));
1081 else if (LABEL_P (real_insn
))
1085 /* A label clobbers the pending dead registers since neither
1086 reload nor jump will propagate a value across a label. */
1087 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
1088 CLEAR_HARD_REG_SET (pending_dead_regs
);
1090 /* We must conservatively assume that all registers that used
1091 to be live here still are. The fallthrough edge may have
1092 left a live register uninitialized. */
1093 bb
= BLOCK_FOR_INSN (real_insn
);
1096 HARD_REG_SET extra_live
;
1098 REG_SET_TO_HARD_REG_SET (extra_live
, DF_LR_IN (bb
));
1099 IOR_HARD_REG_SET (current_live_regs
, extra_live
);
1103 /* The beginning of the epilogue corresponds to the end of the
1104 RTL chain when there are no epilogue insns. Certain resources
1105 are implicitly required at that point. */
1106 else if (NOTE_P (real_insn
)
1107 && NOTE_KIND (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
1108 IOR_HARD_REG_SET (current_live_regs
, start_of_epilogue_needs
.regs
);
1111 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
1115 tinfo
->bb_tick
= bb_ticks
[b
];
1119 /* We didn't find the start of a basic block. Assume everything
1120 in use. This should happen only extremely rarely. */
1121 SET_HARD_REG_SET (res
->regs
);
1123 CLEAR_RESOURCE (&set
);
1124 CLEAR_RESOURCE (&needed
);
1126 jump_insn
= find_dead_or_set_registers (target
, res
, &jump_target
, 0,
1129 /* If we hit an unconditional branch, we have another way of finding out
1130 what is live: we can see what is live at the branch target and include
1131 anything used but not set before the branch. We add the live
1132 resources found using the test below to those found until now. */
1136 struct resources new_resources
;
1137 rtx_insn
*stop_insn
= next_active_insn (jump_insn
);
1139 if (!ANY_RETURN_P (jump_target
))
1140 jump_target
= next_active_insn (jump_target
);
1141 mark_target_live_regs (insns
, jump_target
, &new_resources
);
1142 CLEAR_RESOURCE (&set
);
1143 CLEAR_RESOURCE (&needed
);
1145 /* Include JUMP_INSN in the needed registers. */
1146 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
1148 mark_referenced_resources (insn
, &needed
, true);
1150 COPY_HARD_REG_SET (scratch
, needed
.regs
);
1151 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
1152 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
1154 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1157 IOR_HARD_REG_SET (res
->regs
, new_resources
.regs
);
1162 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
1166 /* Initialize the resources required by mark_target_live_regs ().
1167 This should be invoked before the first call to mark_target_live_regs. */
1170 init_resource_info (rtx_insn
*epilogue_insn
)
1175 /* Indicate what resources are required to be valid at the end of the current
1176 function. The condition code never is and memory always is.
1177 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1178 and there is an epilogue that restores the original stack pointer
1179 from the frame pointer. Registers used to return the function value
1180 are needed. Registers holding global variables are needed. */
1182 end_of_function_needs
.cc
= 0;
1183 end_of_function_needs
.memory
= 1;
1184 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
1186 if (frame_pointer_needed
)
1188 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
1189 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
)
1190 SET_HARD_REG_BIT (end_of_function_needs
.regs
,
1191 HARD_FRAME_POINTER_REGNUM
);
1193 if (!(frame_pointer_needed
1194 && EXIT_IGNORE_STACK
1196 && !crtl
->sp_is_unchanging
))
1197 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
1199 if (crtl
->return_rtx
!= 0)
1200 mark_referenced_resources (crtl
->return_rtx
,
1201 &end_of_function_needs
, true);
1203 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1204 if (global_regs
[i
] || EPILOGUE_USES (i
))
1205 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
1207 /* The registers required to be live at the end of the function are
1208 represented in the flow information as being dead just prior to
1209 reaching the end of the function. For example, the return of a value
1210 might be represented by a USE of the return register immediately
1211 followed by an unconditional jump to the return label where the
1212 return label is the end of the RTL chain. The end of the RTL chain
1213 is then taken to mean that the return register is live.
1215 This sequence is no longer maintained when epilogue instructions are
1216 added to the RTL chain. To reconstruct the original meaning, the
1217 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1218 point where these registers become live (start_of_epilogue_needs).
1219 If epilogue instructions are present, the registers set by those
1220 instructions won't have been processed by flow. Thus, those
1221 registers are additionally required at the end of the RTL chain
1222 (end_of_function_needs). */
1224 start_of_epilogue_needs
= end_of_function_needs
;
1226 while ((epilogue_insn
= next_nonnote_insn (epilogue_insn
)))
1228 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0,
1229 MARK_SRC_DEST_CALL
);
1230 if (return_insn_p (epilogue_insn
))
1234 /* Allocate and initialize the tables used by mark_target_live_regs. */
1235 target_hash_table
= XCNEWVEC (struct target_info
*, TARGET_HASH_PRIME
);
1236 bb_ticks
= XCNEWVEC (int, last_basic_block_for_fn (cfun
));
1238 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1239 FOR_EACH_BB_FN (bb
, cfun
)
1240 if (LABEL_P (BB_HEAD (bb
)))
1241 BLOCK_FOR_INSN (BB_HEAD (bb
)) = bb
;
1244 /* Free up the resources allocated to mark_target_live_regs (). This
1245 should be invoked after the last call to mark_target_live_regs (). */
1248 free_resource_info (void)
1252 if (target_hash_table
!= NULL
)
1256 for (i
= 0; i
< TARGET_HASH_PRIME
; ++i
)
1258 struct target_info
*ti
= target_hash_table
[i
];
1262 struct target_info
*next
= ti
->next
;
1268 free (target_hash_table
);
1269 target_hash_table
= NULL
;
1272 if (bb_ticks
!= NULL
)
1278 FOR_EACH_BB_FN (bb
, cfun
)
1279 if (LABEL_P (BB_HEAD (bb
)))
1280 BLOCK_FOR_INSN (BB_HEAD (bb
)) = NULL
;
1283 /* Clear any hashed information that we have stored for INSN. */
1286 clear_hashed_info_for_insn (rtx_insn
*insn
)
1288 struct target_info
*tinfo
;
1290 if (target_hash_table
!= NULL
)
1292 for (tinfo
= target_hash_table
[INSN_UID (insn
) % TARGET_HASH_PRIME
];
1293 tinfo
; tinfo
= tinfo
->next
)
1294 if (tinfo
->uid
== INSN_UID (insn
))
1302 /* Increment the tick count for the basic block that contains INSN. */
1305 incr_ticks_for_insn (rtx_insn
*insn
)
1307 int b
= find_basic_block (insn
, MAX_DELAY_SLOT_LIVE_SEARCH
);
1313 /* Add TRIAL to the set of resources used at the end of the current
1316 mark_end_of_function_resources (rtx trial
, bool include_delayed_effects
)
1318 mark_referenced_resources (trial
, &end_of_function_needs
,
1319 include_delayed_effects
);