Add a function for getting the ABI of a call insn target
[gcc.git] / gcc / resource.c
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "emit-rtl.h"
30 #include "resource.h"
31 #include "insn-attr.h"
32 #include "params.h"
33 #include "function-abi.h"
34
35 /* This structure is used to record liveness information at the targets or
36 fallthrough insns of branches. We will most likely need the information
37 at targets again, so save them in a hash table rather than recomputing them
38 each time. */
39
40 struct target_info
41 {
42 int uid; /* INSN_UID of target. */
43 struct target_info *next; /* Next info for same hash bucket. */
44 HARD_REG_SET live_regs; /* Registers live at target. */
45 int block; /* Basic block number containing target. */
46 int bb_tick; /* Generation count of basic block info. */
47 };
48
49 #define TARGET_HASH_PRIME 257
50
51 /* Indicates what resources are required at the beginning of the epilogue. */
52 static struct resources start_of_epilogue_needs;
53
54 /* Indicates what resources are required at function end. */
55 static struct resources end_of_function_needs;
56
57 /* Define the hash table itself. */
58 static struct target_info **target_hash_table = NULL;
59
60 /* For each basic block, we maintain a generation number of its basic
61 block info, which is updated each time we move an insn from the
62 target of a jump. This is the generation number indexed by block
63 number. */
64
65 static int *bb_ticks;
66
67 /* Marks registers possibly live at the current place being scanned by
68 mark_target_live_regs. Also used by update_live_status. */
69
70 static HARD_REG_SET current_live_regs;
71
72 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
73 Also only used by the next two functions. */
74
75 static HARD_REG_SET pending_dead_regs;
76 \f
77 static void update_live_status (rtx, const_rtx, void *);
78 static int find_basic_block (rtx_insn *, int);
79 static rtx_insn *next_insn_no_annul (rtx_insn *);
80 static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
81 rtx *, int, struct resources,
82 struct resources);
83 \f
84 /* Utility function called from mark_target_live_regs via note_stores.
85 It deadens any CLOBBERed registers and livens any SET registers. */
86
87 static void
88 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
89 {
90 int first_regno, last_regno;
91 int i;
92
93 if (!REG_P (dest)
94 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
95 return;
96
97 if (GET_CODE (dest) == SUBREG)
98 {
99 first_regno = subreg_regno (dest);
100 last_regno = first_regno + subreg_nregs (dest);
101
102 }
103 else
104 {
105 first_regno = REGNO (dest);
106 last_regno = END_REGNO (dest);
107 }
108
109 if (GET_CODE (x) == CLOBBER)
110 for (i = first_regno; i < last_regno; i++)
111 CLEAR_HARD_REG_BIT (current_live_regs, i);
112 else if (GET_CODE (x) == CLOBBER_HIGH)
113 /* No current target supports both branch delay slots and CLOBBER_HIGH.
114 We'd need more elaborate liveness tracking to handle that
115 combination. */
116 gcc_unreachable ();
117 else
118 for (i = first_regno; i < last_regno; i++)
119 {
120 SET_HARD_REG_BIT (current_live_regs, i);
121 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
122 }
123 }
124
125 /* Find the number of the basic block with correct live register
126 information that starts closest to INSN. Return -1 if we couldn't
127 find such a basic block or the beginning is more than
128 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
129 an unlimited search.
130
131 The delay slot filling code destroys the control-flow graph so,
132 instead of finding the basic block containing INSN, we search
133 backwards toward a BARRIER where the live register information is
134 correct. */
135
136 static int
137 find_basic_block (rtx_insn *insn, int search_limit)
138 {
139 /* Scan backwards to the previous BARRIER. Then see if we can find a
140 label that starts a basic block. Return the basic block number. */
141 for (insn = prev_nonnote_insn (insn);
142 insn && !BARRIER_P (insn) && search_limit != 0;
143 insn = prev_nonnote_insn (insn), --search_limit)
144 ;
145
146 /* The closest BARRIER is too far away. */
147 if (search_limit == 0)
148 return -1;
149
150 /* The start of the function. */
151 else if (insn == 0)
152 return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
153
154 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
155 anything other than a CODE_LABEL or note, we can't find this code. */
156 for (insn = next_nonnote_insn (insn);
157 insn && LABEL_P (insn);
158 insn = next_nonnote_insn (insn))
159 if (BLOCK_FOR_INSN (insn))
160 return BLOCK_FOR_INSN (insn)->index;
161
162 return -1;
163 }
164 \f
165 /* Similar to next_insn, but ignores insns in the delay slots of
166 an annulled branch. */
167
168 static rtx_insn *
169 next_insn_no_annul (rtx_insn *insn)
170 {
171 if (insn)
172 {
173 /* If INSN is an annulled branch, skip any insns from the target
174 of the branch. */
175 if (JUMP_P (insn)
176 && INSN_ANNULLED_BRANCH_P (insn)
177 && NEXT_INSN (PREV_INSN (insn)) != insn)
178 {
179 rtx_insn *next = NEXT_INSN (insn);
180
181 while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
182 && INSN_FROM_TARGET_P (next))
183 {
184 insn = next;
185 next = NEXT_INSN (insn);
186 }
187 }
188
189 insn = NEXT_INSN (insn);
190 if (insn && NONJUMP_INSN_P (insn)
191 && GET_CODE (PATTERN (insn)) == SEQUENCE)
192 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
193 }
194
195 return insn;
196 }
197 \f
198 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
199 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
200 is TRUE, resources used by the called routine will be included for
201 CALL_INSNs. */
202
203 void
204 mark_referenced_resources (rtx x, struct resources *res,
205 bool include_delayed_effects)
206 {
207 enum rtx_code code = GET_CODE (x);
208 int i, j;
209 unsigned int r;
210 const char *format_ptr;
211
212 /* Handle leaf items for which we set resource flags. Also, special-case
213 CALL, SET and CLOBBER operators. */
214 switch (code)
215 {
216 case CONST:
217 CASE_CONST_ANY:
218 case PC:
219 case SYMBOL_REF:
220 case LABEL_REF:
221 case DEBUG_INSN:
222 return;
223
224 case SUBREG:
225 if (!REG_P (SUBREG_REG (x)))
226 mark_referenced_resources (SUBREG_REG (x), res, false);
227 else
228 {
229 unsigned int regno = subreg_regno (x);
230 unsigned int last_regno = regno + subreg_nregs (x);
231
232 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
233 for (r = regno; r < last_regno; r++)
234 SET_HARD_REG_BIT (res->regs, r);
235 }
236 return;
237
238 case REG:
239 gcc_assert (HARD_REGISTER_P (x));
240 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
241 return;
242
243 case MEM:
244 /* If this memory shouldn't change, it really isn't referencing
245 memory. */
246 if (! MEM_READONLY_P (x))
247 res->memory = 1;
248 res->volatil |= MEM_VOLATILE_P (x);
249
250 /* Mark registers used to access memory. */
251 mark_referenced_resources (XEXP (x, 0), res, false);
252 return;
253
254 case CC0:
255 res->cc = 1;
256 return;
257
258 case UNSPEC_VOLATILE:
259 case TRAP_IF:
260 case ASM_INPUT:
261 /* Traditional asm's are always volatile. */
262 res->volatil = 1;
263 break;
264
265 case ASM_OPERANDS:
266 res->volatil |= MEM_VOLATILE_P (x);
267
268 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
269 We cannot just fall through here since then we would be confused
270 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
271 traditional asms unlike their normal usage. */
272
273 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
274 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
275 return;
276
277 case CALL:
278 /* The first operand will be a (MEM (xxx)) but doesn't really reference
279 memory. The second operand may be referenced, though. */
280 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
281 mark_referenced_resources (XEXP (x, 1), res, false);
282 return;
283
284 case SET:
285 /* Usually, the first operand of SET is set, not referenced. But
286 registers used to access memory are referenced. SET_DEST is
287 also referenced if it is a ZERO_EXTRACT. */
288
289 mark_referenced_resources (SET_SRC (x), res, false);
290
291 x = SET_DEST (x);
292 if (GET_CODE (x) == ZERO_EXTRACT
293 || GET_CODE (x) == STRICT_LOW_PART)
294 mark_referenced_resources (x, res, false);
295 else if (GET_CODE (x) == SUBREG)
296 x = SUBREG_REG (x);
297 if (MEM_P (x))
298 mark_referenced_resources (XEXP (x, 0), res, false);
299 return;
300
301 case CLOBBER:
302 case CLOBBER_HIGH:
303 return;
304
305 case CALL_INSN:
306 if (include_delayed_effects)
307 {
308 /* A CALL references memory, the frame pointer if it exists, the
309 stack pointer, any global registers and any registers given in
310 USE insns immediately in front of the CALL.
311
312 However, we may have moved some of the parameter loading insns
313 into the delay slot of this CALL. If so, the USE's for them
314 don't count and should be skipped. */
315 rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
316 rtx_sequence *sequence = 0;
317 int seq_size = 0;
318 int i;
319
320 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
321 if (NEXT_INSN (insn) != x)
322 {
323 sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
324 seq_size = sequence->len ();
325 gcc_assert (GET_CODE (sequence) == SEQUENCE);
326 }
327
328 res->memory = 1;
329 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
330 if (frame_pointer_needed)
331 {
332 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
333 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
334 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
335 }
336
337 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
338 if (global_regs[i])
339 SET_HARD_REG_BIT (res->regs, i);
340
341 /* Check for a REG_SETJMP. If it exists, then we must
342 assume that this call can need any register.
343
344 This is done to be more conservative about how we handle setjmp.
345 We assume that they both use and set all registers. Using all
346 registers ensures that a register will not be considered dead
347 just because it crosses a setjmp call. A register should be
348 considered dead only if the setjmp call returns nonzero. */
349 if (find_reg_note (x, REG_SETJMP, NULL))
350 SET_HARD_REG_SET (res->regs);
351
352 {
353 rtx link;
354
355 for (link = CALL_INSN_FUNCTION_USAGE (x);
356 link;
357 link = XEXP (link, 1))
358 if (GET_CODE (XEXP (link, 0)) == USE)
359 {
360 for (i = 1; i < seq_size; i++)
361 {
362 rtx slot_pat = PATTERN (sequence->element (i));
363 if (GET_CODE (slot_pat) == SET
364 && rtx_equal_p (SET_DEST (slot_pat),
365 XEXP (XEXP (link, 0), 0)))
366 break;
367 }
368 if (i >= seq_size)
369 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
370 res, false);
371 }
372 }
373 }
374
375 /* ... fall through to other INSN processing ... */
376 gcc_fallthrough ();
377
378 case INSN:
379 case JUMP_INSN:
380
381 if (GET_CODE (PATTERN (x)) == COND_EXEC)
382 /* In addition to the usual references, also consider all outputs
383 as referenced, to compensate for mark_set_resources treating
384 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
385 handling, execpt that we got a partial incidence instead of a partial
386 width. */
387 mark_set_resources (x, res, 0,
388 include_delayed_effects
389 ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
390
391 if (! include_delayed_effects
392 && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
393 return;
394
395 /* No special processing, just speed up. */
396 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
397 return;
398
399 default:
400 break;
401 }
402
403 /* Process each sub-expression and flag what it needs. */
404 format_ptr = GET_RTX_FORMAT (code);
405 for (i = 0; i < GET_RTX_LENGTH (code); i++)
406 switch (*format_ptr++)
407 {
408 case 'e':
409 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
410 break;
411
412 case 'E':
413 for (j = 0; j < XVECLEN (x, i); j++)
414 mark_referenced_resources (XVECEXP (x, i, j), res,
415 include_delayed_effects);
416 break;
417 }
418 }
419 \f
420 /* A subroutine of mark_target_live_regs. Search forward from TARGET
421 looking for registers that are set before they are used. These are dead.
422 Stop after passing a few conditional jumps, and/or a small
423 number of unconditional branches. */
424
425 static rtx_insn *
426 find_dead_or_set_registers (rtx_insn *target, struct resources *res,
427 rtx *jump_target, int jump_count,
428 struct resources set, struct resources needed)
429 {
430 HARD_REG_SET scratch;
431 rtx_insn *insn;
432 rtx_insn *next_insn;
433 rtx_insn *jump_insn = 0;
434 int i;
435
436 for (insn = target; insn; insn = next_insn)
437 {
438 rtx_insn *this_insn = insn;
439
440 next_insn = NEXT_INSN (insn);
441
442 /* If this instruction can throw an exception, then we don't
443 know where we might end up next. That means that we have to
444 assume that whatever we have already marked as live really is
445 live. */
446 if (can_throw_internal (insn))
447 break;
448
449 switch (GET_CODE (insn))
450 {
451 case CODE_LABEL:
452 /* After a label, any pending dead registers that weren't yet
453 used can be made dead. */
454 pending_dead_regs &= ~needed.regs;
455 res->regs &= ~pending_dead_regs;
456 CLEAR_HARD_REG_SET (pending_dead_regs);
457
458 continue;
459
460 case BARRIER:
461 case NOTE:
462 case DEBUG_INSN:
463 continue;
464
465 case INSN:
466 if (GET_CODE (PATTERN (insn)) == USE)
467 {
468 /* If INSN is a USE made by update_block, we care about the
469 underlying insn. Any registers set by the underlying insn
470 are live since the insn is being done somewhere else. */
471 if (INSN_P (XEXP (PATTERN (insn), 0)))
472 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
473 MARK_SRC_DEST_CALL);
474
475 /* All other USE insns are to be ignored. */
476 continue;
477 }
478 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
479 continue;
480 else if (rtx_sequence *seq =
481 dyn_cast <rtx_sequence *> (PATTERN (insn)))
482 {
483 /* An unconditional jump can be used to fill the delay slot
484 of a call, so search for a JUMP_INSN in any position. */
485 for (i = 0; i < seq->len (); i++)
486 {
487 this_insn = seq->insn (i);
488 if (JUMP_P (this_insn))
489 break;
490 }
491 }
492
493 default:
494 break;
495 }
496
497 if (rtx_jump_insn *this_jump_insn =
498 dyn_cast <rtx_jump_insn *> (this_insn))
499 {
500 if (jump_count++ < 10)
501 {
502 if (any_uncondjump_p (this_jump_insn)
503 || ANY_RETURN_P (PATTERN (this_jump_insn)))
504 {
505 rtx lab_or_return = this_jump_insn->jump_label ();
506 if (ANY_RETURN_P (lab_or_return))
507 next_insn = NULL;
508 else
509 next_insn = as_a <rtx_insn *> (lab_or_return);
510 if (jump_insn == 0)
511 {
512 jump_insn = insn;
513 if (jump_target)
514 *jump_target = JUMP_LABEL (this_jump_insn);
515 }
516 }
517 else if (any_condjump_p (this_jump_insn))
518 {
519 struct resources target_set, target_res;
520 struct resources fallthrough_res;
521
522 /* We can handle conditional branches here by following
523 both paths, and then IOR the results of the two paths
524 together, which will give us registers that are dead
525 on both paths. Since this is expensive, we give it
526 a much higher cost than unconditional branches. The
527 cost was chosen so that we will follow at most 1
528 conditional branch. */
529
530 jump_count += 4;
531 if (jump_count >= 10)
532 break;
533
534 mark_referenced_resources (insn, &needed, true);
535
536 /* For an annulled branch, mark_set_resources ignores slots
537 filled by instructions from the target. This is correct
538 if the branch is not taken. Since we are following both
539 paths from the branch, we must also compute correct info
540 if the branch is taken. We do this by inverting all of
541 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
542 and then inverting the INSN_FROM_TARGET_P bits again. */
543
544 if (GET_CODE (PATTERN (insn)) == SEQUENCE
545 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
546 {
547 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
548 for (i = 1; i < seq->len (); i++)
549 INSN_FROM_TARGET_P (seq->element (i))
550 = ! INSN_FROM_TARGET_P (seq->element (i));
551
552 target_set = set;
553 mark_set_resources (insn, &target_set, 0,
554 MARK_SRC_DEST_CALL);
555
556 for (i = 1; i < seq->len (); i++)
557 INSN_FROM_TARGET_P (seq->element (i))
558 = ! INSN_FROM_TARGET_P (seq->element (i));
559
560 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
561 }
562 else
563 {
564 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
565 target_set = set;
566 }
567
568 target_res = *res;
569 scratch = target_set.regs & ~needed.regs;
570 target_res.regs &= ~scratch;
571
572 fallthrough_res = *res;
573 scratch = set.regs & ~needed.regs;
574 fallthrough_res.regs &= ~scratch;
575
576 if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
577 find_dead_or_set_registers
578 (this_jump_insn->jump_target (),
579 &target_res, 0, jump_count, target_set, needed);
580 find_dead_or_set_registers (next_insn,
581 &fallthrough_res, 0, jump_count,
582 set, needed);
583 fallthrough_res.regs |= target_res.regs;
584 res->regs &= fallthrough_res.regs;
585 break;
586 }
587 else
588 break;
589 }
590 else
591 {
592 /* Don't try this optimization if we expired our jump count
593 above, since that would mean there may be an infinite loop
594 in the function being compiled. */
595 jump_insn = 0;
596 break;
597 }
598 }
599
600 mark_referenced_resources (insn, &needed, true);
601 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
602
603 scratch = set.regs & ~needed.regs;
604 res->regs &= ~scratch;
605 }
606
607 return jump_insn;
608 }
609 \f
610 /* Given X, a part of an insn, and a pointer to a `struct resource',
611 RES, indicate which resources are modified by the insn. If
612 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
613 set by the called routine.
614
615 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
616 objects are being referenced instead of set.
617
618 We never mark the insn as modifying the condition code unless it explicitly
619 SETs CC0 even though this is not totally correct. The reason for this is
620 that we require a SET of CC0 to immediately precede the reference to CC0.
621 So if some other insn sets CC0 as a side-effect, we know it cannot affect
622 our computation and thus may be placed in a delay slot. */
623
624 void
625 mark_set_resources (rtx x, struct resources *res, int in_dest,
626 enum mark_resource_type mark_type)
627 {
628 enum rtx_code code;
629 int i, j;
630 unsigned int r;
631 const char *format_ptr;
632
633 restart:
634
635 code = GET_CODE (x);
636
637 switch (code)
638 {
639 case NOTE:
640 case BARRIER:
641 case CODE_LABEL:
642 case USE:
643 CASE_CONST_ANY:
644 case LABEL_REF:
645 case SYMBOL_REF:
646 case CONST:
647 case PC:
648 case DEBUG_INSN:
649 /* These don't set any resources. */
650 return;
651
652 case CC0:
653 if (in_dest)
654 res->cc = 1;
655 return;
656
657 case CALL_INSN:
658 /* Called routine modifies the condition code, memory, any registers
659 that aren't saved across calls, global registers and anything
660 explicitly CLOBBERed immediately after the CALL_INSN. */
661
662 if (mark_type == MARK_SRC_DEST_CALL)
663 {
664 rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
665 rtx link;
666
667 res->cc = res->memory = 1;
668
669 res->regs |= insn_callee_abi (call_insn).full_reg_clobbers ();
670
671 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
672 link; link = XEXP (link, 1))
673 {
674 /* We could support CLOBBER_HIGH and treat it in the same way as
675 HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that
676 yet. */
677 gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
678 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
679 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
680 MARK_SRC_DEST);
681 }
682
683 /* Check for a REG_SETJMP. If it exists, then we must
684 assume that this call can clobber any register. */
685 if (find_reg_note (call_insn, REG_SETJMP, NULL))
686 SET_HARD_REG_SET (res->regs);
687 }
688
689 /* ... and also what its RTL says it modifies, if anything. */
690 gcc_fallthrough ();
691
692 case JUMP_INSN:
693 case INSN:
694
695 /* An insn consisting of just a CLOBBER (or USE) is just for flow
696 and doesn't actually do anything, so we ignore it. */
697
698 if (mark_type != MARK_SRC_DEST_CALL
699 && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
700 return;
701
702 x = PATTERN (x);
703 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
704 goto restart;
705 return;
706
707 case SET:
708 /* If the source of a SET is a CALL, this is actually done by
709 the called routine. So only include it if we are to include the
710 effects of the calling routine. */
711
712 mark_set_resources (SET_DEST (x), res,
713 (mark_type == MARK_SRC_DEST_CALL
714 || GET_CODE (SET_SRC (x)) != CALL),
715 mark_type);
716
717 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
718 return;
719
720 case CLOBBER:
721 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
722 return;
723
724 case CLOBBER_HIGH:
725 /* No current target supports both branch delay slots and CLOBBER_HIGH.
726 We'd need more elaborate liveness tracking to handle that
727 combination. */
728 gcc_unreachable ();
729
730 case SEQUENCE:
731 {
732 rtx_sequence *seq = as_a <rtx_sequence *> (x);
733 rtx control = seq->element (0);
734 bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
735
736 mark_set_resources (control, res, 0, mark_type);
737 for (i = seq->len () - 1; i >= 0; --i)
738 {
739 rtx elt = seq->element (i);
740 if (!annul_p && INSN_FROM_TARGET_P (elt))
741 mark_set_resources (elt, res, 0, mark_type);
742 }
743 }
744 return;
745
746 case POST_INC:
747 case PRE_INC:
748 case POST_DEC:
749 case PRE_DEC:
750 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
751 return;
752
753 case PRE_MODIFY:
754 case POST_MODIFY:
755 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
756 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
757 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
758 return;
759
760 case SIGN_EXTRACT:
761 case ZERO_EXTRACT:
762 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
763 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
764 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
765 return;
766
767 case MEM:
768 if (in_dest)
769 {
770 res->memory = 1;
771 res->volatil |= MEM_VOLATILE_P (x);
772 }
773
774 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
775 return;
776
777 case SUBREG:
778 if (in_dest)
779 {
780 if (!REG_P (SUBREG_REG (x)))
781 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
782 else
783 {
784 unsigned int regno = subreg_regno (x);
785 unsigned int last_regno = regno + subreg_nregs (x);
786
787 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
788 for (r = regno; r < last_regno; r++)
789 SET_HARD_REG_BIT (res->regs, r);
790 }
791 }
792 return;
793
794 case REG:
795 if (in_dest)
796 {
797 gcc_assert (HARD_REGISTER_P (x));
798 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
799 }
800 return;
801
802 case UNSPEC_VOLATILE:
803 case ASM_INPUT:
804 /* Traditional asm's are always volatile. */
805 res->volatil = 1;
806 return;
807
808 case TRAP_IF:
809 res->volatil = 1;
810 break;
811
812 case ASM_OPERANDS:
813 res->volatil |= MEM_VOLATILE_P (x);
814
815 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
816 We cannot just fall through here since then we would be confused
817 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
818 traditional asms unlike their normal usage. */
819
820 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
821 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
822 MARK_SRC_DEST);
823 return;
824
825 default:
826 break;
827 }
828
829 /* Process each sub-expression and flag what it needs. */
830 format_ptr = GET_RTX_FORMAT (code);
831 for (i = 0; i < GET_RTX_LENGTH (code); i++)
832 switch (*format_ptr++)
833 {
834 case 'e':
835 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
836 break;
837
838 case 'E':
839 for (j = 0; j < XVECLEN (x, i); j++)
840 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
841 break;
842 }
843 }
844 \f
845 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
846
847 static bool
848 return_insn_p (const_rtx insn)
849 {
850 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
851 return true;
852
853 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
854 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
855
856 return false;
857 }
858
859 /* Set the resources that are live at TARGET.
860
861 If TARGET is zero, we refer to the end of the current function and can
862 return our precomputed value.
863
864 Otherwise, we try to find out what is live by consulting the basic block
865 information. This is tricky, because we must consider the actions of
866 reload and jump optimization, which occur after the basic block information
867 has been computed.
868
869 Accordingly, we proceed as follows::
870
871 We find the previous BARRIER and look at all immediately following labels
872 (with no intervening active insns) to see if any of them start a basic
873 block. If we hit the start of the function first, we use block 0.
874
875 Once we have found a basic block and a corresponding first insn, we can
876 accurately compute the live status (by starting at a label following a
877 BARRIER, we are immune to actions taken by reload and jump.) Then we
878 scan all insns between that point and our target. For each CLOBBER (or
879 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
880 registers are dead. For a SET, mark them as live.
881
882 We have to be careful when using REG_DEAD notes because they are not
883 updated by such things as find_equiv_reg. So keep track of registers
884 marked as dead that haven't been assigned to, and mark them dead at the
885 next CODE_LABEL since reload and jump won't propagate values across labels.
886
887 If we cannot find the start of a basic block (should be a very rare
888 case, if it can happen at all), mark everything as potentially live.
889
890 Next, scan forward from TARGET looking for things set or clobbered
891 before they are used. These are not live.
892
893 Because we can be called many times on the same target, save our results
894 in a hash table indexed by INSN_UID. This is only done if the function
895 init_resource_info () was invoked before we are called. */
896
897 void
898 mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
899 {
900 int b = -1;
901 unsigned int i;
902 struct target_info *tinfo = NULL;
903 rtx_insn *insn;
904 rtx jump_target;
905 HARD_REG_SET scratch;
906 struct resources set, needed;
907
908 /* Handle end of function. */
909 if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
910 {
911 *res = end_of_function_needs;
912 return;
913 }
914
915 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
916 instruction. */
917 rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
918
919 /* Handle return insn. */
920 if (return_insn_p (target))
921 {
922 *res = end_of_function_needs;
923 mark_referenced_resources (target, res, false);
924 return;
925 }
926
927 /* We have to assume memory is needed, but the CC isn't. */
928 res->memory = 1;
929 res->volatil = 0;
930 res->cc = 0;
931
932 /* See if we have computed this value already. */
933 if (target_hash_table != NULL)
934 {
935 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
936 tinfo; tinfo = tinfo->next)
937 if (tinfo->uid == INSN_UID (target))
938 break;
939
940 /* Start by getting the basic block number. If we have saved
941 information, we can get it from there unless the insn at the
942 start of the basic block has been deleted. */
943 if (tinfo && tinfo->block != -1
944 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
945 b = tinfo->block;
946 }
947
948 if (b == -1)
949 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
950
951 if (target_hash_table != NULL)
952 {
953 if (tinfo)
954 {
955 /* If the information is up-to-date, use it. Otherwise, we will
956 update it below. */
957 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
958 {
959 res->regs = tinfo->live_regs;
960 return;
961 }
962 }
963 else
964 {
965 /* Allocate a place to put our results and chain it into the
966 hash table. */
967 tinfo = XNEW (struct target_info);
968 tinfo->uid = INSN_UID (target);
969 tinfo->block = b;
970 tinfo->next
971 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
972 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
973 }
974 }
975
976 CLEAR_HARD_REG_SET (pending_dead_regs);
977
978 /* If we found a basic block, get the live registers from it and update
979 them with anything set or killed between its start and the insn before
980 TARGET; this custom life analysis is really about registers so we need
981 to use the LR problem. Otherwise, we must assume everything is live. */
982 if (b != -1)
983 {
984 regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
985 rtx_insn *start_insn, *stop_insn;
986 df_ref def;
987
988 /* Compute hard regs live at start of block. */
989 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
990 FOR_EACH_ARTIFICIAL_DEF (def, b)
991 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
992 SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def));
993
994 /* Get starting and ending insn, handling the case where each might
995 be a SEQUENCE. */
996 start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
997 insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
998 stop_insn = target;
999
1000 if (NONJUMP_INSN_P (start_insn)
1001 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
1002 start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
1003
1004 if (NONJUMP_INSN_P (stop_insn)
1005 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1006 stop_insn = next_insn (PREV_INSN (stop_insn));
1007
1008 for (insn = start_insn; insn != stop_insn;
1009 insn = next_insn_no_annul (insn))
1010 {
1011 rtx link;
1012 rtx_insn *real_insn = insn;
1013 enum rtx_code code = GET_CODE (insn);
1014
1015 if (DEBUG_INSN_P (insn))
1016 continue;
1017
1018 /* If this insn is from the target of a branch, it isn't going to
1019 be used in the sequel. If it is used in both cases, this
1020 test will not be true. */
1021 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1022 && INSN_FROM_TARGET_P (insn))
1023 continue;
1024
1025 /* If this insn is a USE made by update_block, we care about the
1026 underlying insn. */
1027 if (code == INSN
1028 && GET_CODE (PATTERN (insn)) == USE
1029 && INSN_P (XEXP (PATTERN (insn), 0)))
1030 real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
1031
1032 if (CALL_P (real_insn))
1033 {
1034 /* Values in call-clobbered registers survive a COND_EXEC CALL
1035 if that is not executed; this matters for resoure use because
1036 they may be used by a complementarily (or more strictly)
1037 predicated instruction, or if the CALL is NORETURN. */
1038 if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1039 {
1040 HARD_REG_SET regs_invalidated_by_this_call
1041 = insn_callee_abi (real_insn).full_reg_clobbers ();
1042 /* CALL clobbers all call-used regs that aren't fixed except
1043 sp, ap, and fp. Do this before setting the result of the
1044 call live. */
1045 current_live_regs &= ~regs_invalidated_by_this_call;
1046 }
1047
1048 /* A CALL_INSN sets any global register live, since it may
1049 have been modified by the call. */
1050 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1051 if (global_regs[i])
1052 SET_HARD_REG_BIT (current_live_regs, i);
1053 }
1054
1055 /* Mark anything killed in an insn to be deadened at the next
1056 label. Ignore USE insns; the only REG_DEAD notes will be for
1057 parameters. But they might be early. A CALL_INSN will usually
1058 clobber registers used for parameters. It isn't worth bothering
1059 with the unlikely case when it won't. */
1060 if ((NONJUMP_INSN_P (real_insn)
1061 && GET_CODE (PATTERN (real_insn)) != USE
1062 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1063 || JUMP_P (real_insn)
1064 || CALL_P (real_insn))
1065 {
1066 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1067 if (REG_NOTE_KIND (link) == REG_DEAD
1068 && REG_P (XEXP (link, 0))
1069 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1070 add_to_hard_reg_set (&pending_dead_regs,
1071 GET_MODE (XEXP (link, 0)),
1072 REGNO (XEXP (link, 0)));
1073
1074 note_stores (real_insn, update_live_status, NULL);
1075
1076 /* If any registers were unused after this insn, kill them.
1077 These notes will always be accurate. */
1078 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1079 if (REG_NOTE_KIND (link) == REG_UNUSED
1080 && REG_P (XEXP (link, 0))
1081 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1082 remove_from_hard_reg_set (&current_live_regs,
1083 GET_MODE (XEXP (link, 0)),
1084 REGNO (XEXP (link, 0)));
1085 }
1086
1087 else if (LABEL_P (real_insn))
1088 {
1089 basic_block bb;
1090
1091 /* A label clobbers the pending dead registers since neither
1092 reload nor jump will propagate a value across a label. */
1093 current_live_regs &= ~pending_dead_regs;
1094 CLEAR_HARD_REG_SET (pending_dead_regs);
1095
1096 /* We must conservatively assume that all registers that used
1097 to be live here still are. The fallthrough edge may have
1098 left a live register uninitialized. */
1099 bb = BLOCK_FOR_INSN (real_insn);
1100 if (bb)
1101 {
1102 HARD_REG_SET extra_live;
1103
1104 REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1105 current_live_regs |= extra_live;
1106 }
1107 }
1108
1109 /* The beginning of the epilogue corresponds to the end of the
1110 RTL chain when there are no epilogue insns. Certain resources
1111 are implicitly required at that point. */
1112 else if (NOTE_P (real_insn)
1113 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1114 current_live_regs |= start_of_epilogue_needs.regs;
1115 }
1116
1117 res->regs = current_live_regs;
1118 if (tinfo != NULL)
1119 {
1120 tinfo->block = b;
1121 tinfo->bb_tick = bb_ticks[b];
1122 }
1123 }
1124 else
1125 /* We didn't find the start of a basic block. Assume everything
1126 in use. This should happen only extremely rarely. */
1127 SET_HARD_REG_SET (res->regs);
1128
1129 CLEAR_RESOURCE (&set);
1130 CLEAR_RESOURCE (&needed);
1131
1132 rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1133 0, set, needed);
1134
1135 /* If we hit an unconditional branch, we have another way of finding out
1136 what is live: we can see what is live at the branch target and include
1137 anything used but not set before the branch. We add the live
1138 resources found using the test below to those found until now. */
1139
1140 if (jump_insn)
1141 {
1142 struct resources new_resources;
1143 rtx_insn *stop_insn = next_active_insn (jump_insn);
1144
1145 if (!ANY_RETURN_P (jump_target))
1146 jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1147 mark_target_live_regs (insns, jump_target, &new_resources);
1148 CLEAR_RESOURCE (&set);
1149 CLEAR_RESOURCE (&needed);
1150
1151 /* Include JUMP_INSN in the needed registers. */
1152 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1153 {
1154 mark_referenced_resources (insn, &needed, true);
1155
1156 scratch = needed.regs & ~set.regs;
1157 new_resources.regs |= scratch;
1158
1159 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1160 }
1161
1162 res->regs |= new_resources.regs;
1163 }
1164
1165 if (tinfo != NULL)
1166 tinfo->live_regs = res->regs;
1167 }
1168 \f
1169 /* Initialize the resources required by mark_target_live_regs ().
1170 This should be invoked before the first call to mark_target_live_regs. */
1171
1172 void
1173 init_resource_info (rtx_insn *epilogue_insn)
1174 {
1175 int i;
1176 basic_block bb;
1177
1178 /* Indicate what resources are required to be valid at the end of the current
1179 function. The condition code never is and memory always is.
1180 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1181 and there is an epilogue that restores the original stack pointer
1182 from the frame pointer. Registers used to return the function value
1183 are needed. Registers holding global variables are needed. */
1184
1185 end_of_function_needs.cc = 0;
1186 end_of_function_needs.memory = 1;
1187 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1188
1189 if (frame_pointer_needed)
1190 {
1191 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1192 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1193 SET_HARD_REG_BIT (end_of_function_needs.regs,
1194 HARD_FRAME_POINTER_REGNUM);
1195 }
1196 if (!(frame_pointer_needed
1197 && EXIT_IGNORE_STACK
1198 && epilogue_insn
1199 && !crtl->sp_is_unchanging))
1200 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1201
1202 if (crtl->return_rtx != 0)
1203 mark_referenced_resources (crtl->return_rtx,
1204 &end_of_function_needs, true);
1205
1206 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1207 if (global_regs[i] || EPILOGUE_USES (i))
1208 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1209
1210 /* The registers required to be live at the end of the function are
1211 represented in the flow information as being dead just prior to
1212 reaching the end of the function. For example, the return of a value
1213 might be represented by a USE of the return register immediately
1214 followed by an unconditional jump to the return label where the
1215 return label is the end of the RTL chain. The end of the RTL chain
1216 is then taken to mean that the return register is live.
1217
1218 This sequence is no longer maintained when epilogue instructions are
1219 added to the RTL chain. To reconstruct the original meaning, the
1220 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1221 point where these registers become live (start_of_epilogue_needs).
1222 If epilogue instructions are present, the registers set by those
1223 instructions won't have been processed by flow. Thus, those
1224 registers are additionally required at the end of the RTL chain
1225 (end_of_function_needs). */
1226
1227 start_of_epilogue_needs = end_of_function_needs;
1228
1229 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1230 {
1231 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1232 MARK_SRC_DEST_CALL);
1233 if (return_insn_p (epilogue_insn))
1234 break;
1235 }
1236
1237 /* Allocate and initialize the tables used by mark_target_live_regs. */
1238 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1239 bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1240
1241 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1242 FOR_EACH_BB_FN (bb, cfun)
1243 if (LABEL_P (BB_HEAD (bb)))
1244 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1245 }
1246 \f
1247 /* Free up the resources allocated to mark_target_live_regs (). This
1248 should be invoked after the last call to mark_target_live_regs (). */
1249
1250 void
1251 free_resource_info (void)
1252 {
1253 basic_block bb;
1254
1255 if (target_hash_table != NULL)
1256 {
1257 int i;
1258
1259 for (i = 0; i < TARGET_HASH_PRIME; ++i)
1260 {
1261 struct target_info *ti = target_hash_table[i];
1262
1263 while (ti)
1264 {
1265 struct target_info *next = ti->next;
1266 free (ti);
1267 ti = next;
1268 }
1269 }
1270
1271 free (target_hash_table);
1272 target_hash_table = NULL;
1273 }
1274
1275 if (bb_ticks != NULL)
1276 {
1277 free (bb_ticks);
1278 bb_ticks = NULL;
1279 }
1280
1281 FOR_EACH_BB_FN (bb, cfun)
1282 if (LABEL_P (BB_HEAD (bb)))
1283 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1284 }
1285 \f
1286 /* Clear any hashed information that we have stored for INSN. */
1287
1288 void
1289 clear_hashed_info_for_insn (rtx_insn *insn)
1290 {
1291 struct target_info *tinfo;
1292
1293 if (target_hash_table != NULL)
1294 {
1295 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1296 tinfo; tinfo = tinfo->next)
1297 if (tinfo->uid == INSN_UID (insn))
1298 break;
1299
1300 if (tinfo)
1301 tinfo->block = -1;
1302 }
1303 }
1304 \f
1305 /* Increment the tick count for the basic block that contains INSN. */
1306
1307 void
1308 incr_ticks_for_insn (rtx_insn *insn)
1309 {
1310 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1311
1312 if (b != -1)
1313 bb_ticks[b]++;
1314 }
1315 \f
1316 /* Add TRIAL to the set of resources used at the end of the current
1317 function. */
1318 void
1319 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1320 {
1321 mark_referenced_resources (trial, &end_of_function_needs,
1322 include_delayed_effects);
1323 }