gcc/testsuite/
[gcc.git] / gcc / resource.c
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "output.h"
32 #include "resource.h"
33 #include "except.h"
34 #include "insn-attr.h"
35 #include "params.h"
36 #include "df.h"
37
38 /* This structure is used to record liveness information at the targets or
39 fallthrough insns of branches. We will most likely need the information
40 at targets again, so save them in a hash table rather than recomputing them
41 each time. */
42
43 struct target_info
44 {
45 int uid; /* INSN_UID of target. */
46 struct target_info *next; /* Next info for same hash bucket. */
47 HARD_REG_SET live_regs; /* Registers live at target. */
48 int block; /* Basic block number containing target. */
49 int bb_tick; /* Generation count of basic block info. */
50 };
51
52 #define TARGET_HASH_PRIME 257
53
54 /* Indicates what resources are required at the beginning of the epilogue. */
55 static struct resources start_of_epilogue_needs;
56
57 /* Indicates what resources are required at function end. */
58 static struct resources end_of_function_needs;
59
60 /* Define the hash table itself. */
61 static struct target_info **target_hash_table = NULL;
62
63 /* For each basic block, we maintain a generation number of its basic
64 block info, which is updated each time we move an insn from the
65 target of a jump. This is the generation number indexed by block
66 number. */
67
68 static int *bb_ticks;
69
70 /* Marks registers possibly live at the current place being scanned by
71 mark_target_live_regs. Also used by update_live_status. */
72
73 static HARD_REG_SET current_live_regs;
74
75 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
76 Also only used by the next two functions. */
77
78 static HARD_REG_SET pending_dead_regs;
79 \f
80 static void update_live_status (rtx, const_rtx, void *);
81 static int find_basic_block (rtx, int);
82 static rtx next_insn_no_annul (rtx);
83 static rtx find_dead_or_set_registers (rtx, struct resources*,
84 rtx*, int, struct resources,
85 struct resources);
86 \f
87 /* Utility function called from mark_target_live_regs via note_stores.
88 It deadens any CLOBBERed registers and livens any SET registers. */
89
90 static void
91 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
92 {
93 int first_regno, last_regno;
94 int i;
95
96 if (!REG_P (dest)
97 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
98 return;
99
100 if (GET_CODE (dest) == SUBREG)
101 {
102 first_regno = subreg_regno (dest);
103 last_regno = first_regno + subreg_nregs (dest);
104
105 }
106 else
107 {
108 first_regno = REGNO (dest);
109 last_regno = END_HARD_REGNO (dest);
110 }
111
112 if (GET_CODE (x) == CLOBBER)
113 for (i = first_regno; i < last_regno; i++)
114 CLEAR_HARD_REG_BIT (current_live_regs, i);
115 else
116 for (i = first_regno; i < last_regno; i++)
117 {
118 SET_HARD_REG_BIT (current_live_regs, i);
119 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
120 }
121 }
122
123 /* Find the number of the basic block with correct live register
124 information that starts closest to INSN. Return -1 if we couldn't
125 find such a basic block or the beginning is more than
126 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
127 an unlimited search.
128
129 The delay slot filling code destroys the control-flow graph so,
130 instead of finding the basic block containing INSN, we search
131 backwards toward a BARRIER where the live register information is
132 correct. */
133
134 static int
135 find_basic_block (rtx insn, int search_limit)
136 {
137 /* Scan backwards to the previous BARRIER. Then see if we can find a
138 label that starts a basic block. Return the basic block number. */
139 for (insn = prev_nonnote_insn (insn);
140 insn && !BARRIER_P (insn) && search_limit != 0;
141 insn = prev_nonnote_insn (insn), --search_limit)
142 ;
143
144 /* The closest BARRIER is too far away. */
145 if (search_limit == 0)
146 return -1;
147
148 /* The start of the function. */
149 else if (insn == 0)
150 return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
151
152 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
153 anything other than a CODE_LABEL or note, we can't find this code. */
154 for (insn = next_nonnote_insn (insn);
155 insn && LABEL_P (insn);
156 insn = next_nonnote_insn (insn))
157 if (BLOCK_FOR_INSN (insn))
158 return BLOCK_FOR_INSN (insn)->index;
159
160 return -1;
161 }
162 \f
163 /* Similar to next_insn, but ignores insns in the delay slots of
164 an annulled branch. */
165
166 static rtx
167 next_insn_no_annul (rtx insn)
168 {
169 if (insn)
170 {
171 /* If INSN is an annulled branch, skip any insns from the target
172 of the branch. */
173 if (JUMP_P (insn)
174 && INSN_ANNULLED_BRANCH_P (insn)
175 && NEXT_INSN (PREV_INSN (insn)) != insn)
176 {
177 rtx next = NEXT_INSN (insn);
178
179 while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
180 && INSN_FROM_TARGET_P (next))
181 {
182 insn = next;
183 next = NEXT_INSN (insn);
184 }
185 }
186
187 insn = NEXT_INSN (insn);
188 if (insn && NONJUMP_INSN_P (insn)
189 && GET_CODE (PATTERN (insn)) == SEQUENCE)
190 insn = XVECEXP (PATTERN (insn), 0, 0);
191 }
192
193 return insn;
194 }
195 \f
196 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
197 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
198 is TRUE, resources used by the called routine will be included for
199 CALL_INSNs. */
200
201 void
202 mark_referenced_resources (rtx x, struct resources *res,
203 bool include_delayed_effects)
204 {
205 enum rtx_code code = GET_CODE (x);
206 int i, j;
207 unsigned int r;
208 const char *format_ptr;
209
210 /* Handle leaf items for which we set resource flags. Also, special-case
211 CALL, SET and CLOBBER operators. */
212 switch (code)
213 {
214 case CONST:
215 CASE_CONST_ANY:
216 case PC:
217 case SYMBOL_REF:
218 case LABEL_REF:
219 return;
220
221 case SUBREG:
222 if (!REG_P (SUBREG_REG (x)))
223 mark_referenced_resources (SUBREG_REG (x), res, false);
224 else
225 {
226 unsigned int regno = subreg_regno (x);
227 unsigned int last_regno = regno + subreg_nregs (x);
228
229 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
230 for (r = regno; r < last_regno; r++)
231 SET_HARD_REG_BIT (res->regs, r);
232 }
233 return;
234
235 case REG:
236 gcc_assert (HARD_REGISTER_P (x));
237 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
238 return;
239
240 case MEM:
241 /* If this memory shouldn't change, it really isn't referencing
242 memory. */
243 if (! MEM_READONLY_P (x))
244 res->memory = 1;
245 res->volatil |= MEM_VOLATILE_P (x);
246
247 /* Mark registers used to access memory. */
248 mark_referenced_resources (XEXP (x, 0), res, false);
249 return;
250
251 case CC0:
252 res->cc = 1;
253 return;
254
255 case UNSPEC_VOLATILE:
256 case TRAP_IF:
257 case ASM_INPUT:
258 /* Traditional asm's are always volatile. */
259 res->volatil = 1;
260 break;
261
262 case ASM_OPERANDS:
263 res->volatil |= MEM_VOLATILE_P (x);
264
265 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
266 We can not just fall through here since then we would be confused
267 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
268 traditional asms unlike their normal usage. */
269
270 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
271 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
272 return;
273
274 case CALL:
275 /* The first operand will be a (MEM (xxx)) but doesn't really reference
276 memory. The second operand may be referenced, though. */
277 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
278 mark_referenced_resources (XEXP (x, 1), res, false);
279 return;
280
281 case SET:
282 /* Usually, the first operand of SET is set, not referenced. But
283 registers used to access memory are referenced. SET_DEST is
284 also referenced if it is a ZERO_EXTRACT. */
285
286 mark_referenced_resources (SET_SRC (x), res, false);
287
288 x = SET_DEST (x);
289 if (GET_CODE (x) == ZERO_EXTRACT
290 || GET_CODE (x) == STRICT_LOW_PART)
291 mark_referenced_resources (x, res, false);
292 else if (GET_CODE (x) == SUBREG)
293 x = SUBREG_REG (x);
294 if (MEM_P (x))
295 mark_referenced_resources (XEXP (x, 0), res, false);
296 return;
297
298 case CLOBBER:
299 return;
300
301 case CALL_INSN:
302 if (include_delayed_effects)
303 {
304 /* A CALL references memory, the frame pointer if it exists, the
305 stack pointer, any global registers and any registers given in
306 USE insns immediately in front of the CALL.
307
308 However, we may have moved some of the parameter loading insns
309 into the delay slot of this CALL. If so, the USE's for them
310 don't count and should be skipped. */
311 rtx insn = PREV_INSN (x);
312 rtx sequence = 0;
313 int seq_size = 0;
314 int i;
315
316 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
317 if (NEXT_INSN (insn) != x)
318 {
319 sequence = PATTERN (NEXT_INSN (insn));
320 seq_size = XVECLEN (sequence, 0);
321 gcc_assert (GET_CODE (sequence) == SEQUENCE);
322 }
323
324 res->memory = 1;
325 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
326 if (frame_pointer_needed)
327 {
328 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
329 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
330 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
331 #endif
332 }
333
334 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
335 if (global_regs[i])
336 SET_HARD_REG_BIT (res->regs, i);
337
338 /* Check for a REG_SETJMP. If it exists, then we must
339 assume that this call can need any register.
340
341 This is done to be more conservative about how we handle setjmp.
342 We assume that they both use and set all registers. Using all
343 registers ensures that a register will not be considered dead
344 just because it crosses a setjmp call. A register should be
345 considered dead only if the setjmp call returns nonzero. */
346 if (find_reg_note (x, REG_SETJMP, NULL))
347 SET_HARD_REG_SET (res->regs);
348
349 {
350 rtx link;
351
352 for (link = CALL_INSN_FUNCTION_USAGE (x);
353 link;
354 link = XEXP (link, 1))
355 if (GET_CODE (XEXP (link, 0)) == USE)
356 {
357 for (i = 1; i < seq_size; i++)
358 {
359 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
360 if (GET_CODE (slot_pat) == SET
361 && rtx_equal_p (SET_DEST (slot_pat),
362 XEXP (XEXP (link, 0), 0)))
363 break;
364 }
365 if (i >= seq_size)
366 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
367 res, false);
368 }
369 }
370 }
371
372 /* ... fall through to other INSN processing ... */
373
374 case INSN:
375 case JUMP_INSN:
376
377 if (GET_CODE (PATTERN (x)) == COND_EXEC)
378 /* In addition to the usual references, also consider all outputs
379 as referenced, to compensate for mark_set_resources treating
380 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
381 handling, execpt that we got a partial incidence instead of a partial
382 width. */
383 mark_set_resources (x, res, 0,
384 include_delayed_effects
385 ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
386
387 #ifdef INSN_REFERENCES_ARE_DELAYED
388 if (! include_delayed_effects
389 && INSN_REFERENCES_ARE_DELAYED (x))
390 return;
391 #endif
392
393 /* No special processing, just speed up. */
394 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
395 return;
396
397 default:
398 break;
399 }
400
401 /* Process each sub-expression and flag what it needs. */
402 format_ptr = GET_RTX_FORMAT (code);
403 for (i = 0; i < GET_RTX_LENGTH (code); i++)
404 switch (*format_ptr++)
405 {
406 case 'e':
407 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
408 break;
409
410 case 'E':
411 for (j = 0; j < XVECLEN (x, i); j++)
412 mark_referenced_resources (XVECEXP (x, i, j), res,
413 include_delayed_effects);
414 break;
415 }
416 }
417 \f
418 /* A subroutine of mark_target_live_regs. Search forward from TARGET
419 looking for registers that are set before they are used. These are dead.
420 Stop after passing a few conditional jumps, and/or a small
421 number of unconditional branches. */
422
423 static rtx
424 find_dead_or_set_registers (rtx target, struct resources *res,
425 rtx *jump_target, int jump_count,
426 struct resources set, struct resources needed)
427 {
428 HARD_REG_SET scratch;
429 rtx insn, next;
430 rtx jump_insn = 0;
431 int i;
432
433 for (insn = target; insn; insn = next)
434 {
435 rtx this_jump_insn = insn;
436
437 next = NEXT_INSN (insn);
438
439 /* If this instruction can throw an exception, then we don't
440 know where we might end up next. That means that we have to
441 assume that whatever we have already marked as live really is
442 live. */
443 if (can_throw_internal (insn))
444 break;
445
446 switch (GET_CODE (insn))
447 {
448 case CODE_LABEL:
449 /* After a label, any pending dead registers that weren't yet
450 used can be made dead. */
451 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
452 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
453 CLEAR_HARD_REG_SET (pending_dead_regs);
454
455 continue;
456
457 case BARRIER:
458 case NOTE:
459 continue;
460
461 case INSN:
462 if (GET_CODE (PATTERN (insn)) == USE)
463 {
464 /* If INSN is a USE made by update_block, we care about the
465 underlying insn. Any registers set by the underlying insn
466 are live since the insn is being done somewhere else. */
467 if (INSN_P (XEXP (PATTERN (insn), 0)))
468 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
469 MARK_SRC_DEST_CALL);
470
471 /* All other USE insns are to be ignored. */
472 continue;
473 }
474 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
475 continue;
476 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
477 {
478 /* An unconditional jump can be used to fill the delay slot
479 of a call, so search for a JUMP_INSN in any position. */
480 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
481 {
482 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
483 if (JUMP_P (this_jump_insn))
484 break;
485 }
486 }
487
488 default:
489 break;
490 }
491
492 if (JUMP_P (this_jump_insn))
493 {
494 if (jump_count++ < 10)
495 {
496 if (any_uncondjump_p (this_jump_insn)
497 || ANY_RETURN_P (PATTERN (this_jump_insn)))
498 {
499 next = JUMP_LABEL (this_jump_insn);
500 if (ANY_RETURN_P (next))
501 next = NULL_RTX;
502 if (jump_insn == 0)
503 {
504 jump_insn = insn;
505 if (jump_target)
506 *jump_target = JUMP_LABEL (this_jump_insn);
507 }
508 }
509 else if (any_condjump_p (this_jump_insn))
510 {
511 struct resources target_set, target_res;
512 struct resources fallthrough_res;
513
514 /* We can handle conditional branches here by following
515 both paths, and then IOR the results of the two paths
516 together, which will give us registers that are dead
517 on both paths. Since this is expensive, we give it
518 a much higher cost than unconditional branches. The
519 cost was chosen so that we will follow at most 1
520 conditional branch. */
521
522 jump_count += 4;
523 if (jump_count >= 10)
524 break;
525
526 mark_referenced_resources (insn, &needed, true);
527
528 /* For an annulled branch, mark_set_resources ignores slots
529 filled by instructions from the target. This is correct
530 if the branch is not taken. Since we are following both
531 paths from the branch, we must also compute correct info
532 if the branch is taken. We do this by inverting all of
533 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
534 and then inverting the INSN_FROM_TARGET_P bits again. */
535
536 if (GET_CODE (PATTERN (insn)) == SEQUENCE
537 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
538 {
539 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
540 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
541 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
542
543 target_set = set;
544 mark_set_resources (insn, &target_set, 0,
545 MARK_SRC_DEST_CALL);
546
547 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
548 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
549 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
550
551 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
552 }
553 else
554 {
555 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
556 target_set = set;
557 }
558
559 target_res = *res;
560 COPY_HARD_REG_SET (scratch, target_set.regs);
561 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
562 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
563
564 fallthrough_res = *res;
565 COPY_HARD_REG_SET (scratch, set.regs);
566 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
567 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
568
569 if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
570 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
571 &target_res, 0, jump_count,
572 target_set, needed);
573 find_dead_or_set_registers (next,
574 &fallthrough_res, 0, jump_count,
575 set, needed);
576 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
577 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
578 break;
579 }
580 else
581 break;
582 }
583 else
584 {
585 /* Don't try this optimization if we expired our jump count
586 above, since that would mean there may be an infinite loop
587 in the function being compiled. */
588 jump_insn = 0;
589 break;
590 }
591 }
592
593 mark_referenced_resources (insn, &needed, true);
594 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
595
596 COPY_HARD_REG_SET (scratch, set.regs);
597 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
598 AND_COMPL_HARD_REG_SET (res->regs, scratch);
599 }
600
601 return jump_insn;
602 }
603 \f
604 /* Given X, a part of an insn, and a pointer to a `struct resource',
605 RES, indicate which resources are modified by the insn. If
606 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
607 set by the called routine.
608
609 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
610 objects are being referenced instead of set.
611
612 We never mark the insn as modifying the condition code unless it explicitly
613 SETs CC0 even though this is not totally correct. The reason for this is
614 that we require a SET of CC0 to immediately precede the reference to CC0.
615 So if some other insn sets CC0 as a side-effect, we know it cannot affect
616 our computation and thus may be placed in a delay slot. */
617
618 void
619 mark_set_resources (rtx x, struct resources *res, int in_dest,
620 enum mark_resource_type mark_type)
621 {
622 enum rtx_code code;
623 int i, j;
624 unsigned int r;
625 const char *format_ptr;
626
627 restart:
628
629 code = GET_CODE (x);
630
631 switch (code)
632 {
633 case NOTE:
634 case BARRIER:
635 case CODE_LABEL:
636 case USE:
637 CASE_CONST_ANY:
638 case LABEL_REF:
639 case SYMBOL_REF:
640 case CONST:
641 case PC:
642 /* These don't set any resources. */
643 return;
644
645 case CC0:
646 if (in_dest)
647 res->cc = 1;
648 return;
649
650 case CALL_INSN:
651 /* Called routine modifies the condition code, memory, any registers
652 that aren't saved across calls, global registers and anything
653 explicitly CLOBBERed immediately after the CALL_INSN. */
654
655 if (mark_type == MARK_SRC_DEST_CALL)
656 {
657 rtx link;
658 HARD_REG_SET regs;
659
660 res->cc = res->memory = 1;
661
662 get_call_reg_set_usage (x, &regs, regs_invalidated_by_call);
663 IOR_HARD_REG_SET (res->regs, regs);
664
665 for (link = CALL_INSN_FUNCTION_USAGE (x);
666 link; link = XEXP (link, 1))
667 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
668 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
669 MARK_SRC_DEST);
670
671 /* Check for a REG_SETJMP. If it exists, then we must
672 assume that this call can clobber any register. */
673 if (find_reg_note (x, REG_SETJMP, NULL))
674 SET_HARD_REG_SET (res->regs);
675 }
676
677 /* ... and also what its RTL says it modifies, if anything. */
678
679 case JUMP_INSN:
680 case INSN:
681
682 /* An insn consisting of just a CLOBBER (or USE) is just for flow
683 and doesn't actually do anything, so we ignore it. */
684
685 #ifdef INSN_SETS_ARE_DELAYED
686 if (mark_type != MARK_SRC_DEST_CALL
687 && INSN_SETS_ARE_DELAYED (x))
688 return;
689 #endif
690
691 x = PATTERN (x);
692 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
693 goto restart;
694 return;
695
696 case SET:
697 /* If the source of a SET is a CALL, this is actually done by
698 the called routine. So only include it if we are to include the
699 effects of the calling routine. */
700
701 mark_set_resources (SET_DEST (x), res,
702 (mark_type == MARK_SRC_DEST_CALL
703 || GET_CODE (SET_SRC (x)) != CALL),
704 mark_type);
705
706 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
707 return;
708
709 case CLOBBER:
710 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
711 return;
712
713 case SEQUENCE:
714 {
715 rtx control = XVECEXP (x, 0, 0);
716 bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
717
718 mark_set_resources (control, res, 0, mark_type);
719 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
720 {
721 rtx elt = XVECEXP (x, 0, i);
722 if (!annul_p && INSN_FROM_TARGET_P (elt))
723 mark_set_resources (elt, res, 0, mark_type);
724 }
725 }
726 return;
727
728 case POST_INC:
729 case PRE_INC:
730 case POST_DEC:
731 case PRE_DEC:
732 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
733 return;
734
735 case PRE_MODIFY:
736 case POST_MODIFY:
737 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
738 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
739 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
740 return;
741
742 case SIGN_EXTRACT:
743 case ZERO_EXTRACT:
744 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
745 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
746 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
747 return;
748
749 case MEM:
750 if (in_dest)
751 {
752 res->memory = 1;
753 res->volatil |= MEM_VOLATILE_P (x);
754 }
755
756 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
757 return;
758
759 case SUBREG:
760 if (in_dest)
761 {
762 if (!REG_P (SUBREG_REG (x)))
763 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
764 else
765 {
766 unsigned int regno = subreg_regno (x);
767 unsigned int last_regno = regno + subreg_nregs (x);
768
769 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
770 for (r = regno; r < last_regno; r++)
771 SET_HARD_REG_BIT (res->regs, r);
772 }
773 }
774 return;
775
776 case REG:
777 if (in_dest)
778 {
779 gcc_assert (HARD_REGISTER_P (x));
780 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
781 }
782 return;
783
784 case UNSPEC_VOLATILE:
785 case ASM_INPUT:
786 /* Traditional asm's are always volatile. */
787 res->volatil = 1;
788 return;
789
790 case TRAP_IF:
791 res->volatil = 1;
792 break;
793
794 case ASM_OPERANDS:
795 res->volatil |= MEM_VOLATILE_P (x);
796
797 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
798 We can not just fall through here since then we would be confused
799 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
800 traditional asms unlike their normal usage. */
801
802 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
803 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
804 MARK_SRC_DEST);
805 return;
806
807 default:
808 break;
809 }
810
811 /* Process each sub-expression and flag what it needs. */
812 format_ptr = GET_RTX_FORMAT (code);
813 for (i = 0; i < GET_RTX_LENGTH (code); i++)
814 switch (*format_ptr++)
815 {
816 case 'e':
817 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
818 break;
819
820 case 'E':
821 for (j = 0; j < XVECLEN (x, i); j++)
822 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
823 break;
824 }
825 }
826 \f
827 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
828
829 static bool
830 return_insn_p (const_rtx insn)
831 {
832 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
833 return true;
834
835 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
836 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
837
838 return false;
839 }
840
841 /* Set the resources that are live at TARGET.
842
843 If TARGET is zero, we refer to the end of the current function and can
844 return our precomputed value.
845
846 Otherwise, we try to find out what is live by consulting the basic block
847 information. This is tricky, because we must consider the actions of
848 reload and jump optimization, which occur after the basic block information
849 has been computed.
850
851 Accordingly, we proceed as follows::
852
853 We find the previous BARRIER and look at all immediately following labels
854 (with no intervening active insns) to see if any of them start a basic
855 block. If we hit the start of the function first, we use block 0.
856
857 Once we have found a basic block and a corresponding first insn, we can
858 accurately compute the live status (by starting at a label following a
859 BARRIER, we are immune to actions taken by reload and jump.) Then we
860 scan all insns between that point and our target. For each CLOBBER (or
861 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
862 registers are dead. For a SET, mark them as live.
863
864 We have to be careful when using REG_DEAD notes because they are not
865 updated by such things as find_equiv_reg. So keep track of registers
866 marked as dead that haven't been assigned to, and mark them dead at the
867 next CODE_LABEL since reload and jump won't propagate values across labels.
868
869 If we cannot find the start of a basic block (should be a very rare
870 case, if it can happen at all), mark everything as potentially live.
871
872 Next, scan forward from TARGET looking for things set or clobbered
873 before they are used. These are not live.
874
875 Because we can be called many times on the same target, save our results
876 in a hash table indexed by INSN_UID. This is only done if the function
877 init_resource_info () was invoked before we are called. */
878
879 void
880 mark_target_live_regs (rtx insns, rtx target, struct resources *res)
881 {
882 int b = -1;
883 unsigned int i;
884 struct target_info *tinfo = NULL;
885 rtx insn;
886 rtx jump_insn = 0;
887 rtx jump_target;
888 HARD_REG_SET scratch;
889 struct resources set, needed;
890
891 /* Handle end of function. */
892 if (target == 0 || ANY_RETURN_P (target))
893 {
894 *res = end_of_function_needs;
895 return;
896 }
897
898 /* Handle return insn. */
899 else if (return_insn_p (target))
900 {
901 *res = end_of_function_needs;
902 mark_referenced_resources (target, res, false);
903 return;
904 }
905
906 /* We have to assume memory is needed, but the CC isn't. */
907 res->memory = 1;
908 res->volatil = 0;
909 res->cc = 0;
910
911 /* See if we have computed this value already. */
912 if (target_hash_table != NULL)
913 {
914 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
915 tinfo; tinfo = tinfo->next)
916 if (tinfo->uid == INSN_UID (target))
917 break;
918
919 /* Start by getting the basic block number. If we have saved
920 information, we can get it from there unless the insn at the
921 start of the basic block has been deleted. */
922 if (tinfo && tinfo->block != -1
923 && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK_FOR_FN (cfun,
924 tinfo->block))))
925 b = tinfo->block;
926 }
927
928 if (b == -1)
929 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
930
931 if (target_hash_table != NULL)
932 {
933 if (tinfo)
934 {
935 /* If the information is up-to-date, use it. Otherwise, we will
936 update it below. */
937 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
938 {
939 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
940 return;
941 }
942 }
943 else
944 {
945 /* Allocate a place to put our results and chain it into the
946 hash table. */
947 tinfo = XNEW (struct target_info);
948 tinfo->uid = INSN_UID (target);
949 tinfo->block = b;
950 tinfo->next
951 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
952 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
953 }
954 }
955
956 CLEAR_HARD_REG_SET (pending_dead_regs);
957
958 /* If we found a basic block, get the live registers from it and update
959 them with anything set or killed between its start and the insn before
960 TARGET; this custom life analysis is really about registers so we need
961 to use the LR problem. Otherwise, we must assume everything is live. */
962 if (b != -1)
963 {
964 regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
965 rtx start_insn, stop_insn;
966
967 /* Compute hard regs live at start of block. */
968 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
969
970 /* Get starting and ending insn, handling the case where each might
971 be a SEQUENCE. */
972 start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
973 insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
974 stop_insn = target;
975
976 if (NONJUMP_INSN_P (start_insn)
977 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
978 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
979
980 if (NONJUMP_INSN_P (stop_insn)
981 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
982 stop_insn = next_insn (PREV_INSN (stop_insn));
983
984 for (insn = start_insn; insn != stop_insn;
985 insn = next_insn_no_annul (insn))
986 {
987 rtx link;
988 rtx real_insn = insn;
989 enum rtx_code code = GET_CODE (insn);
990
991 if (DEBUG_INSN_P (insn))
992 continue;
993
994 /* If this insn is from the target of a branch, it isn't going to
995 be used in the sequel. If it is used in both cases, this
996 test will not be true. */
997 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
998 && INSN_FROM_TARGET_P (insn))
999 continue;
1000
1001 /* If this insn is a USE made by update_block, we care about the
1002 underlying insn. */
1003 if (code == INSN
1004 && GET_CODE (PATTERN (insn)) == USE
1005 && INSN_P (XEXP (PATTERN (insn), 0)))
1006 real_insn = XEXP (PATTERN (insn), 0);
1007
1008 if (CALL_P (real_insn))
1009 {
1010 /* Values in call-clobbered registers survive a COND_EXEC CALL
1011 if that is not executed; this matters for resoure use because
1012 they may be used by a complementarily (or more strictly)
1013 predicated instruction, or if the CALL is NORETURN. */
1014 if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1015 {
1016 HARD_REG_SET regs_invalidated_by_this_call;
1017 get_call_reg_set_usage (real_insn,
1018 &regs_invalidated_by_this_call,
1019 regs_invalidated_by_call);
1020 /* CALL clobbers all call-used regs that aren't fixed except
1021 sp, ap, and fp. Do this before setting the result of the
1022 call live. */
1023 AND_COMPL_HARD_REG_SET (current_live_regs,
1024 regs_invalidated_by_this_call);
1025 }
1026
1027 /* A CALL_INSN sets any global register live, since it may
1028 have been modified by the call. */
1029 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1030 if (global_regs[i])
1031 SET_HARD_REG_BIT (current_live_regs, i);
1032 }
1033
1034 /* Mark anything killed in an insn to be deadened at the next
1035 label. Ignore USE insns; the only REG_DEAD notes will be for
1036 parameters. But they might be early. A CALL_INSN will usually
1037 clobber registers used for parameters. It isn't worth bothering
1038 with the unlikely case when it won't. */
1039 if ((NONJUMP_INSN_P (real_insn)
1040 && GET_CODE (PATTERN (real_insn)) != USE
1041 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1042 || JUMP_P (real_insn)
1043 || CALL_P (real_insn))
1044 {
1045 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1046 if (REG_NOTE_KIND (link) == REG_DEAD
1047 && REG_P (XEXP (link, 0))
1048 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1049 add_to_hard_reg_set (&pending_dead_regs,
1050 GET_MODE (XEXP (link, 0)),
1051 REGNO (XEXP (link, 0)));
1052
1053 note_stores (PATTERN (real_insn), update_live_status, NULL);
1054
1055 /* If any registers were unused after this insn, kill them.
1056 These notes will always be accurate. */
1057 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1058 if (REG_NOTE_KIND (link) == REG_UNUSED
1059 && REG_P (XEXP (link, 0))
1060 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1061 remove_from_hard_reg_set (&current_live_regs,
1062 GET_MODE (XEXP (link, 0)),
1063 REGNO (XEXP (link, 0)));
1064 }
1065
1066 else if (LABEL_P (real_insn))
1067 {
1068 basic_block bb;
1069
1070 /* A label clobbers the pending dead registers since neither
1071 reload nor jump will propagate a value across a label. */
1072 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1073 CLEAR_HARD_REG_SET (pending_dead_regs);
1074
1075 /* We must conservatively assume that all registers that used
1076 to be live here still are. The fallthrough edge may have
1077 left a live register uninitialized. */
1078 bb = BLOCK_FOR_INSN (real_insn);
1079 if (bb)
1080 {
1081 HARD_REG_SET extra_live;
1082
1083 REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1084 IOR_HARD_REG_SET (current_live_regs, extra_live);
1085 }
1086 }
1087
1088 /* The beginning of the epilogue corresponds to the end of the
1089 RTL chain when there are no epilogue insns. Certain resources
1090 are implicitly required at that point. */
1091 else if (NOTE_P (real_insn)
1092 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1093 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1094 }
1095
1096 COPY_HARD_REG_SET (res->regs, current_live_regs);
1097 if (tinfo != NULL)
1098 {
1099 tinfo->block = b;
1100 tinfo->bb_tick = bb_ticks[b];
1101 }
1102 }
1103 else
1104 /* We didn't find the start of a basic block. Assume everything
1105 in use. This should happen only extremely rarely. */
1106 SET_HARD_REG_SET (res->regs);
1107
1108 CLEAR_RESOURCE (&set);
1109 CLEAR_RESOURCE (&needed);
1110
1111 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1112 set, needed);
1113
1114 /* If we hit an unconditional branch, we have another way of finding out
1115 what is live: we can see what is live at the branch target and include
1116 anything used but not set before the branch. We add the live
1117 resources found using the test below to those found until now. */
1118
1119 if (jump_insn)
1120 {
1121 struct resources new_resources;
1122 rtx stop_insn = next_active_insn (jump_insn);
1123
1124 if (!ANY_RETURN_P (jump_target))
1125 jump_target = next_active_insn (jump_target);
1126 mark_target_live_regs (insns, jump_target, &new_resources);
1127 CLEAR_RESOURCE (&set);
1128 CLEAR_RESOURCE (&needed);
1129
1130 /* Include JUMP_INSN in the needed registers. */
1131 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1132 {
1133 mark_referenced_resources (insn, &needed, true);
1134
1135 COPY_HARD_REG_SET (scratch, needed.regs);
1136 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1137 IOR_HARD_REG_SET (new_resources.regs, scratch);
1138
1139 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1140 }
1141
1142 IOR_HARD_REG_SET (res->regs, new_resources.regs);
1143 }
1144
1145 if (tinfo != NULL)
1146 {
1147 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1148 }
1149 }
1150 \f
1151 /* Initialize the resources required by mark_target_live_regs ().
1152 This should be invoked before the first call to mark_target_live_regs. */
1153
1154 void
1155 init_resource_info (rtx epilogue_insn)
1156 {
1157 int i;
1158 basic_block bb;
1159
1160 /* Indicate what resources are required to be valid at the end of the current
1161 function. The condition code never is and memory always is.
1162 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1163 and there is an epilogue that restores the original stack pointer
1164 from the frame pointer. Registers used to return the function value
1165 are needed. Registers holding global variables are needed. */
1166
1167 end_of_function_needs.cc = 0;
1168 end_of_function_needs.memory = 1;
1169 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1170
1171 if (frame_pointer_needed)
1172 {
1173 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1174 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1175 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1176 #endif
1177 }
1178 if (!(frame_pointer_needed
1179 && EXIT_IGNORE_STACK
1180 && epilogue_insn
1181 && !crtl->sp_is_unchanging))
1182 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1183
1184 if (crtl->return_rtx != 0)
1185 mark_referenced_resources (crtl->return_rtx,
1186 &end_of_function_needs, true);
1187
1188 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1189 if (global_regs[i]
1190 #ifdef EPILOGUE_USES
1191 || EPILOGUE_USES (i)
1192 #endif
1193 )
1194 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1195
1196 /* The registers required to be live at the end of the function are
1197 represented in the flow information as being dead just prior to
1198 reaching the end of the function. For example, the return of a value
1199 might be represented by a USE of the return register immediately
1200 followed by an unconditional jump to the return label where the
1201 return label is the end of the RTL chain. The end of the RTL chain
1202 is then taken to mean that the return register is live.
1203
1204 This sequence is no longer maintained when epilogue instructions are
1205 added to the RTL chain. To reconstruct the original meaning, the
1206 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1207 point where these registers become live (start_of_epilogue_needs).
1208 If epilogue instructions are present, the registers set by those
1209 instructions won't have been processed by flow. Thus, those
1210 registers are additionally required at the end of the RTL chain
1211 (end_of_function_needs). */
1212
1213 start_of_epilogue_needs = end_of_function_needs;
1214
1215 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1216 {
1217 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1218 MARK_SRC_DEST_CALL);
1219 if (return_insn_p (epilogue_insn))
1220 break;
1221 }
1222
1223 /* Allocate and initialize the tables used by mark_target_live_regs. */
1224 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1225 bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1226
1227 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1228 FOR_EACH_BB_FN (bb, cfun)
1229 if (LABEL_P (BB_HEAD (bb)))
1230 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1231 }
1232 \f
1233 /* Free up the resources allocated to mark_target_live_regs (). This
1234 should be invoked after the last call to mark_target_live_regs (). */
1235
1236 void
1237 free_resource_info (void)
1238 {
1239 basic_block bb;
1240
1241 if (target_hash_table != NULL)
1242 {
1243 int i;
1244
1245 for (i = 0; i < TARGET_HASH_PRIME; ++i)
1246 {
1247 struct target_info *ti = target_hash_table[i];
1248
1249 while (ti)
1250 {
1251 struct target_info *next = ti->next;
1252 free (ti);
1253 ti = next;
1254 }
1255 }
1256
1257 free (target_hash_table);
1258 target_hash_table = NULL;
1259 }
1260
1261 if (bb_ticks != NULL)
1262 {
1263 free (bb_ticks);
1264 bb_ticks = NULL;
1265 }
1266
1267 FOR_EACH_BB_FN (bb, cfun)
1268 if (LABEL_P (BB_HEAD (bb)))
1269 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1270 }
1271 \f
1272 /* Clear any hashed information that we have stored for INSN. */
1273
1274 void
1275 clear_hashed_info_for_insn (rtx insn)
1276 {
1277 struct target_info *tinfo;
1278
1279 if (target_hash_table != NULL)
1280 {
1281 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1282 tinfo; tinfo = tinfo->next)
1283 if (tinfo->uid == INSN_UID (insn))
1284 break;
1285
1286 if (tinfo)
1287 tinfo->block = -1;
1288 }
1289 }
1290 \f
1291 /* Increment the tick count for the basic block that contains INSN. */
1292
1293 void
1294 incr_ticks_for_insn (rtx insn)
1295 {
1296 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1297
1298 if (b != -1)
1299 bb_ticks[b]++;
1300 }
1301 \f
1302 /* Add TRIAL to the set of resources used at the end of the current
1303 function. */
1304 void
1305 mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1306 {
1307 mark_referenced_resources (trial, &end_of_function_needs,
1308 include_delayed_effects);
1309 }