regs.h (end_hard_regno): New function.
[gcc.git] / gcc / df-scan.c
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
3
4
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
12
13 This file is part of GCC.
14
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
18 version.
19
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
24
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 02110-1301, USA.
29 */
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "rtl.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "recog.h"
39 #include "function.h"
40 #include "regs.h"
41 #include "output.h"
42 #include "alloc-pool.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
46 #include "sbitmap.h"
47 #include "bitmap.h"
48 #include "timevar.h"
49 #include "tree.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "df.h"
53
54 #ifndef HAVE_epilogue
55 #define HAVE_epilogue 0
56 #endif
57 #ifndef HAVE_prologue
58 #define HAVE_prologue 0
59 #endif
60 #ifndef HAVE_sibcall_epilogue
61 #define HAVE_sibcall_epilogue 0
62 #endif
63
64 #ifndef EPILOGUE_USES
65 #define EPILOGUE_USES(REGNO) 0
66 #endif
67
68 /* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
70
71 static bitmap_obstack persistent_obstack;
72
73 /* The set of hard registers in eliminables[i].from. */
74
75 static HARD_REG_SET elim_reg_set;
76
77 /* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
79
80 bitmap df_invalidated_by_call = NULL;
81
82 /* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
84
85 static void df_ref_record (struct dataflow *, rtx, rtx *,
86 basic_block, rtx, enum df_ref_type,
87 enum df_ref_flags, bool record_live);
88 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
89 enum df_ref_flags, bool record_live);
90 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
91 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
92 basic_block, rtx, enum df_ref_flags);
93
94 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
95 static void df_bb_refs_record (struct dataflow *, basic_block);
96 static void df_refs_record (struct dataflow *, bitmap);
97 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
98 basic_block, rtx, enum df_ref_type,
99 enum df_ref_flags);
100 static void df_record_entry_block_defs (struct dataflow *);
101 static void df_record_exit_block_uses (struct dataflow *);
102 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
103 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
104 static void df_grow_insn_info (struct df *);
105
106 \f
107 /*----------------------------------------------------------------------------
108 SCANNING DATAFLOW PROBLEM
109
110 There are several ways in which scanning looks just like the other
111 dataflow problems. It shares the all the mechanisms for local info
112 as well as basic block info. Where it differs is when and how often
113 it gets run. It also has no need for the iterative solver.
114 ----------------------------------------------------------------------------*/
115
116 /* Problem data for the scanning dataflow function. */
117 struct df_scan_problem_data
118 {
119 alloc_pool ref_pool;
120 alloc_pool insn_pool;
121 alloc_pool reg_pool;
122 alloc_pool mw_reg_pool;
123 alloc_pool mw_link_pool;
124 };
125
126 typedef struct df_scan_bb_info *df_scan_bb_info_t;
127
128 static void
129 df_scan_free_internal (struct dataflow *dflow)
130 {
131 struct df *df = dflow->df;
132 struct df_scan_problem_data *problem_data
133 = (struct df_scan_problem_data *) dflow->problem_data;
134
135 free (df->def_info.regs);
136 free (df->def_info.refs);
137 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
138
139 free (df->use_info.regs);
140 free (df->use_info.refs);
141 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
142
143 free (df->insns);
144 df->insns = NULL;
145 df->insns_size = 0;
146
147 free (dflow->block_info);
148 dflow->block_info = NULL;
149 dflow->block_info_size = 0;
150
151 BITMAP_FREE (df->hardware_regs_used);
152 BITMAP_FREE (df->entry_block_defs);
153 BITMAP_FREE (df->exit_block_uses);
154
155 free_alloc_pool (dflow->block_pool);
156 free_alloc_pool (problem_data->ref_pool);
157 free_alloc_pool (problem_data->insn_pool);
158 free_alloc_pool (problem_data->reg_pool);
159 free_alloc_pool (problem_data->mw_reg_pool);
160 free_alloc_pool (problem_data->mw_link_pool);
161 }
162
163
164 /* Get basic block info. */
165
166 struct df_scan_bb_info *
167 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
168 {
169 gcc_assert (index < dflow->block_info_size);
170 return (struct df_scan_bb_info *) dflow->block_info[index];
171 }
172
173
174 /* Set basic block info. */
175
176 static void
177 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
178 struct df_scan_bb_info *bb_info)
179 {
180 gcc_assert (index < dflow->block_info_size);
181 dflow->block_info[index] = (void *) bb_info;
182 }
183
184
185 /* Free basic block info. */
186
187 static void
188 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
189 {
190 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
191 if (bb_info)
192 {
193 df_bb_refs_delete (dflow, bb->index);
194 pool_free (dflow->block_pool, bb_info);
195 }
196 }
197
198
199 /* Allocate the problem data for the scanning problem. This should be
200 called when the problem is created or when the entire function is to
201 be rescanned. */
202
203 static void
204 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
205 bitmap all_blocks ATTRIBUTE_UNUSED)
206 {
207 struct df *df = dflow->df;
208 struct df_scan_problem_data *problem_data;
209 unsigned int insn_num = get_max_uid () + 1;
210 unsigned int block_size = 50;
211 unsigned int bb_index;
212 bitmap_iterator bi;
213
214 /* Given the number of pools, this is really faster than tearing
215 everything apart. */
216 if (dflow->problem_data)
217 df_scan_free_internal (dflow);
218
219 dflow->block_pool
220 = create_alloc_pool ("df_scan_block pool",
221 sizeof (struct df_scan_bb_info),
222 block_size);
223
224 problem_data = XNEW (struct df_scan_problem_data);
225 dflow->problem_data = problem_data;
226
227 problem_data->ref_pool
228 = create_alloc_pool ("df_scan_ref pool",
229 sizeof (struct df_ref), block_size);
230 problem_data->insn_pool
231 = create_alloc_pool ("df_scan_insn pool",
232 sizeof (struct df_insn_info), block_size);
233 problem_data->reg_pool
234 = create_alloc_pool ("df_scan_reg pool",
235 sizeof (struct df_reg_info), block_size);
236 problem_data->mw_reg_pool
237 = create_alloc_pool ("df_scan_mw_reg pool",
238 sizeof (struct df_mw_hardreg), block_size);
239 problem_data->mw_link_pool
240 = create_alloc_pool ("df_scan_mw_link pool",
241 sizeof (struct df_link), block_size);
242
243 insn_num += insn_num / 4;
244 df_grow_reg_info (dflow, &df->def_info);
245 df_grow_ref_info (&df->def_info, insn_num);
246
247 df_grow_reg_info (dflow, &df->use_info);
248 df_grow_ref_info (&df->use_info, insn_num *2);
249
250 df_grow_insn_info (df);
251 df_grow_bb_info (dflow);
252
253 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
254 {
255 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
256 if (!bb_info)
257 {
258 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
259 df_scan_set_bb_info (dflow, bb_index, bb_info);
260 }
261 bb_info->artificial_defs = NULL;
262 bb_info->artificial_uses = NULL;
263 }
264
265 df->hardware_regs_used = BITMAP_ALLOC (NULL);
266 df->entry_block_defs = BITMAP_ALLOC (NULL);
267 df->exit_block_uses = BITMAP_ALLOC (NULL);
268 }
269
270
271 /* Free all of the data associated with the scan problem. */
272
273 static void
274 df_scan_free (struct dataflow *dflow)
275 {
276 struct df *df = dflow->df;
277
278 if (dflow->problem_data)
279 {
280 df_scan_free_internal (dflow);
281 free (dflow->problem_data);
282 }
283
284 if (df->blocks_to_scan)
285 BITMAP_FREE (df->blocks_to_scan);
286
287 if (df->blocks_to_analyze)
288 BITMAP_FREE (df->blocks_to_analyze);
289
290 free (dflow);
291 }
292
293 static void
294 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
295 {
296 struct df *df = dflow->df;
297 int i;
298
299 fprintf (file, " invalidated by call \t");
300 dump_bitmap (file, df_invalidated_by_call);
301 fprintf (file, " hardware regs used \t");
302 dump_bitmap (file, df->hardware_regs_used);
303 fprintf (file, " entry block uses \t");
304 dump_bitmap (file, df->entry_block_defs);
305 fprintf (file, " exit block uses \t");
306 dump_bitmap (file, df->exit_block_uses);
307 fprintf (file, " regs ever live \t");
308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
309 if (regs_ever_live[i])
310 fprintf (file, "%d ", i);
311 fprintf (file, "\n");
312 }
313
314 static struct df_problem problem_SCAN =
315 {
316 DF_SCAN, /* Problem id. */
317 DF_NONE, /* Direction. */
318 df_scan_alloc, /* Allocate the problem specific data. */
319 NULL, /* Reset global information. */
320 df_scan_free_bb_info, /* Free basic block info. */
321 NULL, /* Local compute function. */
322 NULL, /* Init the solution specific data. */
323 NULL, /* Iterative solver. */
324 NULL, /* Confluence operator 0. */
325 NULL, /* Confluence operator n. */
326 NULL, /* Transfer function. */
327 NULL, /* Finalize function. */
328 df_scan_free, /* Free all of the problem information. */
329 df_scan_dump, /* Debugging. */
330 NULL, /* Dependent problem. */
331 0 /* Changeable flags. */
332 };
333
334
335 /* Create a new DATAFLOW instance and add it to an existing instance
336 of DF. The returned structure is what is used to get at the
337 solution. */
338
339 struct dataflow *
340 df_scan_add_problem (struct df *df, int flags)
341 {
342 return df_add_problem (df, &problem_SCAN, flags);
343 }
344
345 /*----------------------------------------------------------------------------
346 Storage Allocation Utilities
347 ----------------------------------------------------------------------------*/
348
349
350 /* First, grow the reg_info information. If the current size is less than
351 the number of psuedos, grow to 25% more than the number of
352 pseudos.
353
354 Second, assure that all of the slots up to max_reg_num have been
355 filled with reg_info structures. */
356
357 static void
358 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
359 {
360 unsigned int max_reg = max_reg_num ();
361 unsigned int new_size = max_reg;
362 struct df_scan_problem_data *problem_data
363 = (struct df_scan_problem_data *) dflow->problem_data;
364 unsigned int i;
365
366 if (ref_info->regs_size < new_size)
367 {
368 new_size += new_size / 4;
369 ref_info->regs = xrealloc (ref_info->regs,
370 new_size *sizeof (struct df_reg_info*));
371 ref_info->regs_size = new_size;
372 }
373
374 for (i = ref_info->regs_inited; i < max_reg; i++)
375 {
376 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
377 memset (reg_info, 0, sizeof (struct df_reg_info));
378 ref_info->regs[i] = reg_info;
379 }
380
381 ref_info->regs_inited = max_reg;
382 }
383
384
385 /* Grow the ref information. */
386
387 static void
388 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
389 {
390 if (ref_info->refs_size < new_size)
391 {
392 ref_info->refs = xrealloc (ref_info->refs,
393 new_size *sizeof (struct df_ref *));
394 memset (ref_info->refs + ref_info->refs_size, 0,
395 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
396 ref_info->refs_size = new_size;
397 }
398 }
399
400
401 /* Grow the ref information. If the current size is less than the
402 number of instructions, grow to 25% more than the number of
403 instructions. */
404
405 static void
406 df_grow_insn_info (struct df *df)
407 {
408 unsigned int new_size = get_max_uid () + 1;
409 if (df->insns_size < new_size)
410 {
411 new_size += new_size / 4;
412 df->insns = xrealloc (df->insns,
413 new_size *sizeof (struct df_insn_info *));
414 memset (df->insns + df->insns_size, 0,
415 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
416 df->insns_size = new_size;
417 }
418 }
419
420
421
422 \f
423 /*----------------------------------------------------------------------------
424 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
425 ----------------------------------------------------------------------------*/
426
427 /* Rescan some BLOCKS or all the blocks defined by the last call to
428 df_set_blocks if BLOCKS is NULL); */
429
430 void
431 df_rescan_blocks (struct df *df, bitmap blocks)
432 {
433 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
434
435 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
436 basic_block bb;
437
438 df->def_info.refs_organized_size = 0;
439 df->use_info.refs_organized_size = 0;
440
441 if (blocks)
442 {
443 int i;
444 unsigned int bb_index;
445 bitmap_iterator bi;
446 bool cleared_bits = false;
447
448 /* Need to assure that there are space in all of the tables. */
449 unsigned int insn_num = get_max_uid () + 1;
450 insn_num += insn_num / 4;
451
452 df_grow_reg_info (dflow, &df->def_info);
453 df_grow_ref_info (&df->def_info, insn_num);
454
455 df_grow_reg_info (dflow, &df->use_info);
456 df_grow_ref_info (&df->use_info, insn_num *2);
457
458 df_grow_insn_info (df);
459 df_grow_bb_info (dflow);
460
461 bitmap_copy (local_blocks_to_scan, blocks);
462
463 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
464 {
465 basic_block bb = BASIC_BLOCK (bb_index);
466 if (!bb)
467 {
468 bitmap_clear_bit (local_blocks_to_scan, bb_index);
469 cleared_bits = true;
470 }
471 }
472
473 if (cleared_bits)
474 bitmap_copy (blocks, local_blocks_to_scan);
475
476 df->def_info.add_refs_inline = true;
477 df->use_info.add_refs_inline = true;
478
479 for (i = df->num_problems_defined; i; i--)
480 {
481 bitmap blocks_to_reset = NULL;
482 if (dflow->problem->reset_fun)
483 {
484 if (!blocks_to_reset)
485 {
486 blocks_to_reset = BITMAP_ALLOC (NULL);
487 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
488 if (df->blocks_to_scan)
489 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
490 }
491 dflow->problem->reset_fun (dflow, blocks_to_reset);
492 }
493 if (blocks_to_reset)
494 BITMAP_FREE (blocks_to_reset);
495 }
496
497 df_refs_delete (dflow, local_blocks_to_scan);
498
499 /* This may be a mistake, but if an explicit blocks is passed in
500 and the set of blocks to analyze has been explicitly set, add
501 the extra blocks to blocks_to_analyze. The alternative is to
502 put an assert here. We do not want this to just go by
503 silently or else we may get storage leaks. */
504 if (df->blocks_to_analyze)
505 bitmap_ior_into (df->blocks_to_analyze, blocks);
506 }
507 else
508 {
509 /* If we are going to do everything, just reallocate everything.
510 Most stuff is allocated in pools so this is faster than
511 walking it. */
512 if (df->blocks_to_analyze)
513 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
514 else
515 FOR_ALL_BB (bb)
516 {
517 bitmap_set_bit (local_blocks_to_scan, bb->index);
518 }
519 df_scan_alloc (dflow, local_blocks_to_scan, NULL);
520
521 df->def_info.add_refs_inline = false;
522 df->use_info.add_refs_inline = false;
523 }
524
525 df_refs_record (dflow, local_blocks_to_scan);
526 #if 0
527 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
528 #endif
529
530 if (!df->blocks_to_scan)
531 df->blocks_to_scan = BITMAP_ALLOC (NULL);
532
533 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
534 BITMAP_FREE (local_blocks_to_scan);
535 }
536
537
538 /* Create a new ref of type DF_REF_TYPE for register REG at address
539 LOC within INSN of BB. */
540
541 struct df_ref *
542 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
543 basic_block bb,
544 enum df_ref_type ref_type,
545 enum df_ref_flags ref_flags)
546 {
547 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
548 struct df_scan_bb_info *bb_info;
549
550 df_grow_reg_info (dflow, &df->use_info);
551 df_grow_reg_info (dflow, &df->def_info);
552 df_grow_bb_info (dflow);
553
554 /* Make sure there is the bb_info for this block. */
555 bb_info = df_scan_get_bb_info (dflow, bb->index);
556 if (!bb_info)
557 {
558 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
559 df_scan_set_bb_info (dflow, bb->index, bb_info);
560 bb_info->artificial_defs = NULL;
561 bb_info->artificial_uses = NULL;
562 }
563
564 if (ref_type == DF_REF_REG_DEF)
565 df->def_info.add_refs_inline = true;
566 else
567 df->use_info.add_refs_inline = true;
568
569 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
570 }
571
572
573 \f
574 /*----------------------------------------------------------------------------
575 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
576 ----------------------------------------------------------------------------*/
577
578
579 /* Get the artificial uses for a basic block. */
580
581 struct df_ref *
582 df_get_artificial_defs (struct df *df, unsigned int bb_index)
583 {
584 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
585 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
586 }
587
588
589 /* Get the artificial uses for a basic block. */
590
591 struct df_ref *
592 df_get_artificial_uses (struct df *df, unsigned int bb_index)
593 {
594 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
595 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
596 }
597
598
599 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
600
601 void
602 df_reg_chain_create (struct df_reg_info *reg_info,
603 struct df_ref *ref)
604 {
605 struct df_ref *head = reg_info->reg_chain;
606 reg_info->reg_chain = ref;
607
608 DF_REF_NEXT_REG (ref) = head;
609
610 /* We cannot actually link to the head of the chain. */
611 DF_REF_PREV_REG (ref) = NULL;
612
613 if (head)
614 DF_REF_PREV_REG (head) = ref;
615 }
616
617
618 /* Remove REF from the CHAIN. Return the head of the chain. This
619 will be CHAIN unless the REF was at the beginning of the chain. */
620
621 static struct df_ref *
622 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
623 {
624 struct df_ref *orig_chain = chain;
625 struct df_ref *prev = NULL;
626 while (chain)
627 {
628 if (chain == ref)
629 {
630 if (prev)
631 {
632 prev->next_ref = ref->next_ref;
633 ref->next_ref = NULL;
634 return orig_chain;
635 }
636 else
637 {
638 chain = ref->next_ref;
639 ref->next_ref = NULL;
640 return chain;
641 }
642 }
643
644 prev = chain;
645 chain = chain->next_ref;
646 }
647
648 /* Someone passed in a ref that was not in the chain. */
649 gcc_unreachable ();
650 return NULL;
651 }
652
653
654 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
655 the def-use or use-def chain if it exists. Returns the next ref in
656 uses or defs chain. */
657
658 struct df_ref *
659 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
660 {
661 struct df *df = dflow->df;
662 struct df_ref *next = DF_REF_NEXT_REG (ref);
663 struct df_ref *prev = DF_REF_PREV_REG (ref);
664 struct df_scan_problem_data *problem_data
665 = (struct df_scan_problem_data *) dflow->problem_data;
666 struct df_reg_info *reg_info;
667 struct df_ref *next_ref = ref->next_ref;
668 unsigned int id = DF_REF_ID (ref);
669
670 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
671 {
672 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
673 df->def_info.bitmap_size--;
674 if (df->def_info.refs && (id < df->def_info.refs_size))
675 DF_DEFS_SET (df, id, NULL);
676 }
677 else
678 {
679 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
680 df->use_info.bitmap_size--;
681 if (df->use_info.refs && (id < df->use_info.refs_size))
682 DF_USES_SET (df, id, NULL);
683 }
684
685 /* Delete any def-use or use-def chains that start here. */
686 if (DF_REF_CHAIN (ref))
687 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
688
689 reg_info->n_refs--;
690
691 /* Unlink from the reg chain. If there is no prev, this is the
692 first of the list. If not, just join the next and prev. */
693 if (prev)
694 {
695 DF_REF_NEXT_REG (prev) = next;
696 if (next)
697 DF_REF_PREV_REG (next) = prev;
698 }
699 else
700 {
701 reg_info->reg_chain = next;
702 if (next)
703 DF_REF_PREV_REG (next) = NULL;
704 }
705
706 pool_free (problem_data->ref_pool, ref);
707 return next_ref;
708 }
709
710
711 /* Unlink REF from all def-use/use-def chains, etc. */
712
713 void
714 df_ref_remove (struct df *df, struct df_ref *ref)
715 {
716 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
717 if (DF_REF_REG_DEF_P (ref))
718 {
719 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
720 {
721 struct df_scan_bb_info *bb_info
722 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
723 bb_info->artificial_defs
724 = df_ref_unlink (bb_info->artificial_defs, ref);
725 }
726 else
727 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref))
728 = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
729
730 if (df->def_info.add_refs_inline)
731 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
732 }
733 else
734 {
735 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
736 {
737 struct df_scan_bb_info *bb_info
738 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
739 bb_info->artificial_uses
740 = df_ref_unlink (bb_info->artificial_uses, ref);
741 }
742 else
743 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref))
744 = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
745
746 if (df->use_info.add_refs_inline)
747 DF_USES_SET (df, DF_REF_ID (ref), NULL);
748 }
749
750 df_reg_chain_unlink (dflow, ref);
751 }
752
753
754 /* Create the insn record for INSN. If there was one there, zero it out. */
755
756 static struct df_insn_info *
757 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
758 {
759 struct df *df = dflow->df;
760 struct df_scan_problem_data *problem_data
761 = (struct df_scan_problem_data *) dflow->problem_data;
762
763 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
764 if (!insn_rec)
765 {
766 insn_rec = pool_alloc (problem_data->insn_pool);
767 DF_INSN_SET (df, insn, insn_rec);
768 }
769 memset (insn_rec, 0, sizeof (struct df_insn_info));
770
771 return insn_rec;
772 }
773
774
775 /* Delete all of the refs information from INSN. */
776
777 void
778 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
779 {
780 struct df *df = dflow->df;
781 unsigned int uid = INSN_UID (insn);
782 struct df_insn_info *insn_info = NULL;
783 struct df_ref *ref;
784 struct df_scan_problem_data *problem_data
785 = (struct df_scan_problem_data *) dflow->problem_data;
786
787 if (uid < df->insns_size)
788 insn_info = DF_INSN_UID_GET (df, uid);
789
790 if (insn_info)
791 {
792 struct df_mw_hardreg *hardregs = insn_info->mw_hardregs;
793
794 while (hardregs)
795 {
796 struct df_mw_hardreg *next_hr = hardregs->next;
797 struct df_link *link = hardregs->regs;
798 while (link)
799 {
800 struct df_link *next_l = link->next;
801 pool_free (problem_data->mw_link_pool, link);
802 link = next_l;
803 }
804
805 pool_free (problem_data->mw_reg_pool, hardregs);
806 hardregs = next_hr;
807 }
808
809 ref = insn_info->defs;
810 while (ref)
811 ref = df_reg_chain_unlink (dflow, ref);
812
813 ref = insn_info->uses;
814 while (ref)
815 ref = df_reg_chain_unlink (dflow, ref);
816
817 pool_free (problem_data->insn_pool, insn_info);
818 DF_INSN_SET (df, insn, NULL);
819 }
820 }
821
822
823 /* Delete all of the refs information from basic_block with BB_INDEX. */
824
825 void
826 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
827 {
828 struct df_ref *def;
829 struct df_ref *use;
830
831 struct df_scan_bb_info *bb_info
832 = df_scan_get_bb_info (dflow, bb_index);
833 rtx insn;
834 basic_block bb = BASIC_BLOCK (bb_index);
835 FOR_BB_INSNS (bb, insn)
836 {
837 if (INSN_P (insn))
838 {
839 /* Record defs within INSN. */
840 df_insn_refs_delete (dflow, insn);
841 }
842 }
843
844 /* Get rid of any artificial uses or defs. */
845 if (bb_info)
846 {
847 def = bb_info->artificial_defs;
848 while (def)
849 def = df_reg_chain_unlink (dflow, def);
850 bb_info->artificial_defs = NULL;
851 use = bb_info->artificial_uses;
852 while (use)
853 use = df_reg_chain_unlink (dflow, use);
854 bb_info->artificial_uses = NULL;
855 }
856 }
857
858
859 /* Delete all of the refs information from BLOCKS. */
860
861 void
862 df_refs_delete (struct dataflow *dflow, bitmap blocks)
863 {
864 bitmap_iterator bi;
865 unsigned int bb_index;
866
867 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
868 {
869 df_bb_refs_delete (dflow, bb_index);
870 }
871 }
872
873
874 /* Take build ref table for either the uses or defs from the reg-use
875 or reg-def chains. */
876
877 void
878 df_reorganize_refs (struct df_ref_info *ref_info)
879 {
880 unsigned int m = ref_info->regs_inited;
881 unsigned int regno;
882 unsigned int offset = 0;
883 unsigned int size = 0;
884
885 if (ref_info->refs_organized_size)
886 return;
887
888 if (ref_info->refs_size < ref_info->bitmap_size)
889 {
890 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
891 df_grow_ref_info (ref_info, new_size);
892 }
893
894 for (regno = 0; regno < m; regno++)
895 {
896 struct df_reg_info *reg_info = ref_info->regs[regno];
897 int count = 0;
898 if (reg_info)
899 {
900 struct df_ref *ref = reg_info->reg_chain;
901 reg_info->begin = offset;
902 while (ref)
903 {
904 ref_info->refs[offset] = ref;
905 DF_REF_ID (ref) = offset++;
906 ref = DF_REF_NEXT_REG (ref);
907 count++;
908 size++;
909 }
910 reg_info->n_refs = count;
911 }
912 }
913
914 /* The bitmap size is not decremented when refs are deleted. So
915 reset it now that we have squished out all of the empty
916 slots. */
917 ref_info->bitmap_size = size;
918 ref_info->refs_organized_size = size;
919 ref_info->add_refs_inline = true;
920 }
921
922 \f
923 /*----------------------------------------------------------------------------
924 Hard core instruction scanning code. No external interfaces here,
925 just a lot of routines that look inside insns.
926 ----------------------------------------------------------------------------*/
927
928 /* Create a ref and add it to the reg-def or reg-use chains. */
929
930 static struct df_ref *
931 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
932 basic_block bb, rtx insn,
933 enum df_ref_type ref_type,
934 enum df_ref_flags ref_flags)
935 {
936 struct df_ref *this_ref;
937 struct df *df = dflow->df;
938 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
939 struct df_scan_problem_data *problem_data
940 = (struct df_scan_problem_data *) dflow->problem_data;
941
942 this_ref = pool_alloc (problem_data->ref_pool);
943 DF_REF_REG (this_ref) = reg;
944 DF_REF_REGNO (this_ref) = regno;
945 DF_REF_LOC (this_ref) = loc;
946 DF_REF_INSN (this_ref) = insn;
947 DF_REF_CHAIN (this_ref) = NULL;
948 DF_REF_TYPE (this_ref) = ref_type;
949 DF_REF_FLAGS (this_ref) = ref_flags;
950 DF_REF_DATA (this_ref) = NULL;
951 DF_REF_BB (this_ref) = bb;
952
953 /* Link the ref into the reg_def and reg_use chains and keep a count
954 of the instances. */
955 switch (ref_type)
956 {
957 case DF_REF_REG_DEF:
958 {
959 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
960 unsigned int size = df->def_info.refs_organized_size
961 ? df->def_info.refs_organized_size
962 : df->def_info.bitmap_size;
963
964 /* Add the ref to the reg_def chain. */
965 reg_info->n_refs++;
966 df_reg_chain_create (reg_info, this_ref);
967 DF_REF_ID (this_ref) = size;
968 if (df->def_info.add_refs_inline)
969 {
970 if (size >= df->def_info.refs_size)
971 {
972 int new_size = size + size / 4;
973 df_grow_ref_info (&df->def_info, new_size);
974 }
975 /* Add the ref to the big array of defs. */
976 DF_DEFS_SET (df, size, this_ref);
977 if (df->def_info.refs_organized_size)
978 df->def_info.refs_organized_size++;
979 }
980
981 df->def_info.bitmap_size++;
982
983 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
984 {
985 struct df_scan_bb_info *bb_info
986 = df_scan_get_bb_info (dflow, bb->index);
987 this_ref->next_ref = bb_info->artificial_defs;
988 bb_info->artificial_defs = this_ref;
989 }
990 else
991 {
992 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
993 DF_INSN_GET (df, insn)->defs = this_ref;
994 }
995 }
996 break;
997
998 case DF_REF_REG_MEM_LOAD:
999 case DF_REF_REG_MEM_STORE:
1000 case DF_REF_REG_USE:
1001 {
1002 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
1003 unsigned int size = df->use_info.refs_organized_size
1004 ? df->use_info.refs_organized_size
1005 : df->use_info.bitmap_size;
1006
1007 /* Add the ref to the reg_use chain. */
1008 reg_info->n_refs++;
1009 df_reg_chain_create (reg_info, this_ref);
1010 DF_REF_ID (this_ref) = size;
1011 if (df->use_info.add_refs_inline)
1012 {
1013 if (size >= df->use_info.refs_size)
1014 {
1015 int new_size = size + size / 4;
1016 df_grow_ref_info (&df->use_info, new_size);
1017 }
1018 /* Add the ref to the big array of defs. */
1019 DF_USES_SET (df, size, this_ref);
1020 if (df->def_info.refs_organized_size)
1021 df->def_info.refs_organized_size++;
1022 }
1023
1024 df->use_info.bitmap_size++;
1025 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
1026 {
1027 struct df_scan_bb_info *bb_info
1028 = df_scan_get_bb_info (dflow, bb->index);
1029 this_ref->next_ref = bb_info->artificial_uses;
1030 bb_info->artificial_uses = this_ref;
1031 }
1032 else
1033 {
1034 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
1035 DF_INSN_GET (df, insn)->uses = this_ref;
1036 }
1037 }
1038 break;
1039
1040 default:
1041 gcc_unreachable ();
1042
1043 }
1044 return this_ref;
1045 }
1046
1047
1048 /* Create new references of type DF_REF_TYPE for each part of register REG
1049 at address LOC within INSN of BB. */
1050
1051 static void
1052 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1053 basic_block bb, rtx insn,
1054 enum df_ref_type ref_type,
1055 enum df_ref_flags ref_flags,
1056 bool record_live)
1057 {
1058 struct df *df = dflow->df;
1059 rtx oldreg = reg;
1060 unsigned int regno;
1061
1062 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1063
1064 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1065 all. Notably only those representing a word extraction from a multi-word
1066 reg. As written in the docu those should have the form
1067 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1068 XXX Is that true? We could also use the global word_mode variable. */
1069 if ((dflow->flags & DF_SUBREGS) == 0
1070 && GET_CODE (reg) == SUBREG
1071 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1072 || GET_MODE_SIZE (GET_MODE (reg))
1073 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1074 {
1075 loc = &SUBREG_REG (reg);
1076 reg = *loc;
1077 ref_flags |= DF_REF_STRIPPED;
1078 }
1079
1080 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1081 if (regno < FIRST_PSEUDO_REGISTER)
1082 {
1083 unsigned int i;
1084 unsigned int endregno;
1085 struct df_mw_hardreg *hardreg = NULL;
1086 struct df_scan_problem_data *problem_data
1087 = (struct df_scan_problem_data *) dflow->problem_data;
1088
1089 if (!(dflow->flags & DF_HARD_REGS))
1090 return;
1091
1092 if (GET_CODE (reg) == SUBREG)
1093 {
1094 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1095 SUBREG_BYTE (reg), GET_MODE (reg));
1096 endregno = regno + subreg_nregs (reg);
1097 }
1098 else
1099 endregno = END_HARD_REGNO (reg);
1100
1101 /* If this is a multiword hardreg, we create some extra datastructures that
1102 will enable us to easily build REG_DEAD and REG_UNUSED notes. */
1103 if ((endregno != regno + 1) && insn)
1104 {
1105 struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
1106 /* Sets to a subreg of a multiword register are partial.
1107 Sets to a non-subreg of a multiword register are not. */
1108 if (GET_CODE (oldreg) == SUBREG)
1109 ref_flags |= DF_REF_PARTIAL;
1110 ref_flags |= DF_REF_MW_HARDREG;
1111 hardreg = pool_alloc (problem_data->mw_reg_pool);
1112 hardreg->next = insn_info->mw_hardregs;
1113 insn_info->mw_hardregs = hardreg;
1114 hardreg->type = ref_type;
1115 hardreg->flags = ref_flags;
1116 hardreg->mw_reg = reg;
1117 hardreg->regs = NULL;
1118
1119 }
1120
1121 for (i = regno; i < endregno; i++)
1122 {
1123 struct df_ref *ref;
1124
1125 /* Calls are handled at call site because regs_ever_live
1126 doesn't include clobbered regs, only used ones. */
1127 if (ref_type == DF_REF_REG_DEF && record_live)
1128 regs_ever_live[i] = 1;
1129 else if ((ref_type == DF_REF_REG_USE
1130 || ref_type == DF_REF_REG_MEM_STORE
1131 || ref_type == DF_REF_REG_MEM_LOAD)
1132 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1133 {
1134 /* Set regs_ever_live on uses of non-eliminable frame
1135 pointers and arg pointers. */
1136 if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
1137 && (regno == FRAME_POINTER_REGNUM
1138 || regno == ARG_POINTER_REGNUM)))
1139 regs_ever_live[i] = 1;
1140 }
1141
1142 ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1143 bb, insn, ref_type, ref_flags);
1144 if (hardreg)
1145 {
1146 struct df_link *link = pool_alloc (problem_data->mw_link_pool);
1147
1148 link->next = hardreg->regs;
1149 link->ref = ref;
1150 hardreg->regs = link;
1151 }
1152 }
1153 }
1154 else
1155 {
1156 df_ref_create_structure (dflow, reg, loc,
1157 bb, insn, ref_type, ref_flags);
1158 }
1159 }
1160
1161
1162 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1163 covered by the outer mode is smaller than that covered by the inner mode,
1164 is a read-modify-write operation.
1165 This function returns true iff the SUBREG X is such a SUBREG. */
1166
1167 bool
1168 df_read_modify_subreg_p (rtx x)
1169 {
1170 unsigned int isize, osize;
1171 if (GET_CODE (x) != SUBREG)
1172 return false;
1173 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1174 osize = GET_MODE_SIZE (GET_MODE (x));
1175 return (isize > osize && isize > UNITS_PER_WORD);
1176 }
1177
1178
1179 /* Process all the registers defined in the rtx, X.
1180 Autoincrement/decrement definitions will be picked up by
1181 df_uses_record. */
1182
1183 static void
1184 df_def_record_1 (struct dataflow *dflow, rtx x,
1185 basic_block bb, rtx insn,
1186 enum df_ref_flags flags, bool record_live)
1187 {
1188 rtx *loc;
1189 rtx dst;
1190 bool dst_in_strict_lowpart = false;
1191
1192 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1193 construct. */
1194 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1195 loc = &XEXP (x, 0);
1196 else
1197 loc = &SET_DEST (x);
1198 dst = *loc;
1199
1200 /* It is legal to have a set destination be a parallel. */
1201 if (GET_CODE (dst) == PARALLEL)
1202 {
1203 int i;
1204
1205 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1206 {
1207 rtx temp = XVECEXP (dst, 0, i);
1208 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1209 || GET_CODE (temp) == SET)
1210 df_def_record_1 (dflow, temp, bb, insn,
1211 GET_CODE (temp) == CLOBBER
1212 ? flags | DF_REF_MUST_CLOBBER : flags,
1213 record_live);
1214 }
1215 return;
1216 }
1217
1218 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1219 be handy for the reg allocator. */
1220 while (GET_CODE (dst) == STRICT_LOW_PART
1221 || GET_CODE (dst) == ZERO_EXTRACT
1222 || df_read_modify_subreg_p (dst))
1223 {
1224 #if 0
1225 /* Strict low part always contains SUBREG, but we do not want to make
1226 it appear outside, as whole register is always considered. */
1227 if (GET_CODE (dst) == STRICT_LOW_PART)
1228 {
1229 loc = &XEXP (dst, 0);
1230 dst = *loc;
1231 }
1232 #endif
1233 loc = &XEXP (dst, 0);
1234 if (GET_CODE (dst) == STRICT_LOW_PART)
1235 dst_in_strict_lowpart = true;
1236 dst = *loc;
1237 flags |= DF_REF_READ_WRITE;
1238
1239 }
1240
1241 /* Sets to a subreg of a single word register are partial sets if
1242 they are wrapped in a strict lowpart, and not partial otherwise.
1243 */
1244 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
1245 && dst_in_strict_lowpart)
1246 flags |= DF_REF_PARTIAL;
1247
1248 if (REG_P (dst)
1249 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1250 df_ref_record (dflow, dst, loc, bb, insn,
1251 DF_REF_REG_DEF, flags, record_live);
1252 }
1253
1254
1255 /* Process all the registers defined in the pattern rtx, X. */
1256
1257 static void
1258 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1259 {
1260 RTX_CODE code = GET_CODE (x);
1261
1262 if (code == SET || code == CLOBBER)
1263 {
1264 /* Mark the single def within the pattern. */
1265 df_def_record_1 (dflow, x, bb, insn,
1266 code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true);
1267 }
1268 else if (code == COND_EXEC)
1269 {
1270 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1271 }
1272 else if (code == PARALLEL)
1273 {
1274 int i;
1275
1276 /* Mark the multiple defs within the pattern. */
1277 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1278 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1279 }
1280 }
1281
1282
1283 /* Process all the registers used in the rtx at address LOC. */
1284
1285 static void
1286 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1287 basic_block bb, rtx insn, enum df_ref_flags flags)
1288 {
1289 RTX_CODE code;
1290 rtx x;
1291 retry:
1292 x = *loc;
1293 if (!x)
1294 return;
1295 code = GET_CODE (x);
1296 switch (code)
1297 {
1298 case LABEL_REF:
1299 case SYMBOL_REF:
1300 case CONST_INT:
1301 case CONST:
1302 case CONST_DOUBLE:
1303 case CONST_VECTOR:
1304 case PC:
1305 case CC0:
1306 case ADDR_VEC:
1307 case ADDR_DIFF_VEC:
1308 return;
1309
1310 case CLOBBER:
1311 /* If we are clobbering a MEM, mark any registers inside the address
1312 as being used. */
1313 if (MEM_P (XEXP (x, 0)))
1314 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1315 DF_REF_REG_MEM_STORE, bb, insn, flags);
1316
1317 /* If we're clobbering a REG then we have a def so ignore. */
1318 return;
1319
1320 case MEM:
1321 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1322 flags & DF_REF_IN_NOTE);
1323 return;
1324
1325 case SUBREG:
1326 /* While we're here, optimize this case. */
1327 flags |= DF_REF_PARTIAL;
1328 /* In case the SUBREG is not of a REG, do not optimize. */
1329 if (!REG_P (SUBREG_REG (x)))
1330 {
1331 loc = &SUBREG_REG (x);
1332 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1333 return;
1334 }
1335 /* ... Fall through ... */
1336
1337 case REG:
1338 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1339 return;
1340
1341 case SET:
1342 {
1343 rtx dst = SET_DEST (x);
1344 gcc_assert (!(flags & DF_REF_IN_NOTE));
1345 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1346
1347 switch (GET_CODE (dst))
1348 {
1349 case SUBREG:
1350 if (df_read_modify_subreg_p (dst))
1351 {
1352 df_uses_record (dflow, &SUBREG_REG (dst),
1353 DF_REF_REG_USE, bb,
1354 insn, flags | DF_REF_READ_WRITE);
1355 break;
1356 }
1357 /* Fall through. */
1358 case REG:
1359 case PARALLEL:
1360 case SCRATCH:
1361 case PC:
1362 case CC0:
1363 break;
1364 case MEM:
1365 df_uses_record (dflow, &XEXP (dst, 0),
1366 DF_REF_REG_MEM_STORE,
1367 bb, insn, flags);
1368 break;
1369 case STRICT_LOW_PART:
1370 {
1371 rtx *temp = &XEXP (dst, 0);
1372 /* A strict_low_part uses the whole REG and not just the
1373 SUBREG. */
1374 dst = XEXP (dst, 0);
1375 df_uses_record (dflow,
1376 (GET_CODE (dst) == SUBREG)
1377 ? &SUBREG_REG (dst) : temp,
1378 DF_REF_REG_USE, bb,
1379 insn, DF_REF_READ_WRITE);
1380 }
1381 break;
1382 case ZERO_EXTRACT:
1383 case SIGN_EXTRACT:
1384 df_uses_record (dflow, &XEXP (dst, 0),
1385 DF_REF_REG_USE, bb, insn,
1386 DF_REF_READ_WRITE);
1387 df_uses_record (dflow, &XEXP (dst, 1),
1388 DF_REF_REG_USE, bb, insn, flags);
1389 df_uses_record (dflow, &XEXP (dst, 2),
1390 DF_REF_REG_USE, bb, insn, flags);
1391 dst = XEXP (dst, 0);
1392 break;
1393 default:
1394 gcc_unreachable ();
1395 }
1396 return;
1397 }
1398
1399 case RETURN:
1400 break;
1401
1402 case ASM_OPERANDS:
1403 case UNSPEC_VOLATILE:
1404 case TRAP_IF:
1405 case ASM_INPUT:
1406 {
1407 /* Traditional and volatile asm instructions must be
1408 considered to use and clobber all hard registers, all
1409 pseudo-registers and all of memory. So must TRAP_IF and
1410 UNSPEC_VOLATILE operations.
1411
1412 Consider for instance a volatile asm that changes the fpu
1413 rounding mode. An insn should not be moved across this
1414 even if it only uses pseudo-regs because it might give an
1415 incorrectly rounded result.
1416
1417 However, flow.c's liveness computation did *not* do this,
1418 giving the reasoning as " ?!? Unfortunately, marking all
1419 hard registers as live causes massive problems for the
1420 register allocator and marking all pseudos as live creates
1421 mountains of uninitialized variable warnings."
1422
1423 In order to maintain the status quo with regard to liveness
1424 and uses, we do what flow.c did and just mark any regs we
1425 can find in ASM_OPERANDS as used. Later on, when liveness
1426 is computed, asm insns are scanned and regs_asm_clobbered
1427 is filled out.
1428
1429 For all ASM_OPERANDS, we must traverse the vector of input
1430 operands. We can not just fall through here since then we
1431 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1432 which do not indicate traditional asms unlike their normal
1433 usage. */
1434 if (code == ASM_OPERANDS)
1435 {
1436 int j;
1437
1438 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1439 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1440 DF_REF_REG_USE, bb, insn, flags);
1441 return;
1442 }
1443 break;
1444 }
1445
1446 case PRE_DEC:
1447 case POST_DEC:
1448 case PRE_INC:
1449 case POST_INC:
1450 case PRE_MODIFY:
1451 case POST_MODIFY:
1452 /* Catch the def of the register being modified. */
1453 flags |= DF_REF_READ_WRITE;
1454 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1455 DF_REF_REG_DEF, flags, true);
1456
1457 /* ... Fall through to handle uses ... */
1458
1459 default:
1460 break;
1461 }
1462
1463 /* Recursively scan the operands of this expression. */
1464 {
1465 const char *fmt = GET_RTX_FORMAT (code);
1466 int i;
1467
1468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1469 {
1470 if (fmt[i] == 'e')
1471 {
1472 /* Tail recursive case: save a function call level. */
1473 if (i == 0)
1474 {
1475 loc = &XEXP (x, 0);
1476 goto retry;
1477 }
1478 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1479 }
1480 else if (fmt[i] == 'E')
1481 {
1482 int j;
1483 for (j = 0; j < XVECLEN (x, i); j++)
1484 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1485 bb, insn, flags);
1486 }
1487 }
1488 }
1489 }
1490
1491 /* Return true if *LOC contains an asm. */
1492
1493 static int
1494 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1495 {
1496 if ( !*loc)
1497 return 0;
1498 if (GET_CODE (*loc) == ASM_OPERANDS)
1499 return 1;
1500 return 0;
1501 }
1502
1503
1504 /* Return true if INSN contains an ASM. */
1505
1506 static int
1507 df_insn_contains_asm (rtx insn)
1508 {
1509 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1510 }
1511
1512
1513
1514 /* Record all the refs for DF within INSN of basic block BB. */
1515
1516 static void
1517 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1518 {
1519 struct df *df = dflow->df;
1520 int i;
1521
1522 if (INSN_P (insn))
1523 {
1524 rtx note;
1525
1526 if (df_insn_contains_asm (insn))
1527 DF_INSN_CONTAINS_ASM (df, insn) = true;
1528
1529 /* Record register defs. */
1530 df_defs_record (dflow, PATTERN (insn), bb, insn);
1531
1532 if (dflow->flags & DF_EQUIV_NOTES)
1533 for (note = REG_NOTES (insn); note;
1534 note = XEXP (note, 1))
1535 {
1536 switch (REG_NOTE_KIND (note))
1537 {
1538 case REG_EQUIV:
1539 case REG_EQUAL:
1540 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1541 bb, insn, DF_REF_IN_NOTE);
1542 default:
1543 break;
1544 }
1545 }
1546
1547 if (CALL_P (insn))
1548 {
1549 rtx note;
1550
1551 /* Record the registers used to pass arguments, and explicitly
1552 noted as clobbered. */
1553 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1554 note = XEXP (note, 1))
1555 {
1556 if (GET_CODE (XEXP (note, 0)) == USE)
1557 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1558 DF_REF_REG_USE,
1559 bb, insn, 0);
1560 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1561 {
1562 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1563 if (REG_P (XEXP (XEXP (note, 0), 0)))
1564 {
1565 rtx reg = XEXP (XEXP (note, 0), 0);
1566 int regno_last;
1567 int regno_first;
1568 int i;
1569
1570 regno_last = regno_first = REGNO (reg);
1571 if (regno_first < FIRST_PSEUDO_REGISTER)
1572 regno_last
1573 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1574 for (i = regno_first; i <= regno_last; i++)
1575 regs_ever_live[i] = 1;
1576 }
1577 }
1578 }
1579
1580 /* The stack ptr is used (honorarily) by a CALL insn. */
1581 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1582 DF_REF_REG_USE, bb, insn,
1583 0);
1584
1585 if (dflow->flags & DF_HARD_REGS)
1586 {
1587 bitmap_iterator bi;
1588 unsigned int ui;
1589 /* Calls may also reference any of the global registers,
1590 so they are recorded as used. */
1591 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1592 if (global_regs[i])
1593 df_uses_record (dflow, &regno_reg_rtx[i],
1594 DF_REF_REG_USE, bb, insn,
1595 0);
1596 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1597 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb,
1598 insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false);
1599 }
1600 }
1601
1602 /* Record the register uses. */
1603 df_uses_record (dflow, &PATTERN (insn),
1604 DF_REF_REG_USE, bb, insn, 0);
1605
1606 }
1607 }
1608
1609 static bool
1610 df_has_eh_preds (basic_block bb)
1611 {
1612 edge e;
1613 edge_iterator ei;
1614
1615 FOR_EACH_EDGE (e, ei, bb->preds)
1616 {
1617 if (e->flags & EDGE_EH)
1618 return true;
1619 }
1620 return false;
1621 }
1622
1623 /* Record all the refs within the basic block BB. */
1624
1625 static void
1626 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1627 {
1628 struct df *df = dflow->df;
1629 rtx insn;
1630 int luid = 0;
1631 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1632 bitmap artificial_uses_at_bottom = NULL;
1633
1634 if (dflow->flags & DF_HARD_REGS)
1635 artificial_uses_at_bottom = BITMAP_ALLOC (NULL);
1636
1637 /* Need to make sure that there is a record in the basic block info. */
1638 if (!bb_info)
1639 {
1640 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1641 df_scan_set_bb_info (dflow, bb->index, bb_info);
1642 bb_info->artificial_defs = NULL;
1643 bb_info->artificial_uses = NULL;
1644 }
1645
1646 /* Scan the block an insn at a time from beginning to end. */
1647 FOR_BB_INSNS (bb, insn)
1648 {
1649 df_insn_create_insn_record (dflow, insn);
1650 if (INSN_P (insn))
1651 {
1652 /* Record defs within INSN. */
1653 DF_INSN_LUID (df, insn) = luid++;
1654 df_insn_refs_record (dflow, bb, insn);
1655 }
1656 DF_INSN_LUID (df, insn) = luid;
1657 }
1658
1659 #ifdef EH_RETURN_DATA_REGNO
1660 if ((dflow->flags & DF_HARD_REGS)
1661 && df_has_eh_preds (bb))
1662 {
1663 unsigned int i;
1664 /* Mark the registers that will contain data for the handler. */
1665 for (i = 0; ; ++i)
1666 {
1667 unsigned regno = EH_RETURN_DATA_REGNO (i);
1668 if (regno == INVALID_REGNUM)
1669 break;
1670 df_ref_record (dflow, regno_reg_rtx[regno], &regno_reg_rtx[regno],
1671 bb, NULL,
1672 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1673 false);
1674 }
1675 }
1676 #endif
1677
1678
1679 if ((dflow->flags & DF_HARD_REGS)
1680 && df_has_eh_preds (bb))
1681 {
1682 #ifdef EH_USES
1683 unsigned int i;
1684 /* This code is putting in a artificial ref for the use at the
1685 TOP of the block that receives the exception. It is too
1686 cumbersome to actually put the ref on the edge. We could
1687 either model this at the top of the receiver block or the
1688 bottom of the sender block.
1689
1690 The bottom of the sender block is problematic because not all
1691 out-edges of the a block are eh-edges. However, it is true
1692 that all edges into a block are either eh-edges or none of
1693 them are eh-edges. Thus, we can model this at the top of the
1694 eh-receiver for all of the edges at once. */
1695 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1696 if (EH_USES (i))
1697 df_uses_record (dflow, &regno_reg_rtx[i],
1698 DF_REF_REG_USE, bb, NULL,
1699 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1700 #endif
1701
1702 /* The following code (down thru the arg_pointer setting APPEARS
1703 to be necessary because there is nothing that actually
1704 describes what the exception handling code may actually need
1705 to keep alive. */
1706 if (reload_completed)
1707 {
1708 if (frame_pointer_needed)
1709 {
1710 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1711 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1712 bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM);
1713 #endif
1714 }
1715 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1716 if (fixed_regs[ARG_POINTER_REGNUM])
1717 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1718 #endif
1719 }
1720 }
1721
1722 if ((dflow->flags & DF_HARD_REGS)
1723 && bb->index >= NUM_FIXED_BLOCKS)
1724 {
1725 /* Before reload, there are a few registers that must be forced
1726 live everywhere -- which might not already be the case for
1727 blocks within infinite loops. */
1728 if (!reload_completed)
1729 {
1730
1731 /* Any reference to any pseudo before reload is a potential
1732 reference of the frame pointer. */
1733 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1734
1735 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1736 /* Pseudos with argument area equivalences may require
1737 reloading via the argument pointer. */
1738 if (fixed_regs[ARG_POINTER_REGNUM])
1739 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1740 #endif
1741
1742 /* Any constant, or pseudo with constant equivalences, may
1743 require reloading from memory using the pic register. */
1744 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1745 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1746 bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM);
1747 }
1748 /* The all-important stack pointer must always be live. */
1749 bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM);
1750 }
1751
1752 if (dflow->flags & DF_HARD_REGS)
1753 {
1754 bitmap_iterator bi;
1755 unsigned int regno;
1756
1757 EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi)
1758 {
1759 df_uses_record (dflow, &regno_reg_rtx[regno],
1760 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1761 }
1762
1763 BITMAP_FREE (artificial_uses_at_bottom);
1764 }
1765 }
1766
1767 /* Records the implicit definitions at targets of nonlocal gotos in BLOCKS. */
1768
1769 static void
1770 record_nonlocal_goto_receiver_defs (struct dataflow *dflow, bitmap blocks)
1771 {
1772 rtx x;
1773 basic_block bb;
1774
1775 /* See expand_builtin_setjmp_receiver; hard_frame_pointer_rtx is used in
1776 the nonlocal goto receiver, and needs to be considered defined
1777 implicitly. */
1778 if (!(dflow->flags & DF_HARD_REGS))
1779 return;
1780
1781 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1782 {
1783 bb = BLOCK_FOR_INSN (XEXP (x, 0));
1784 if (!bitmap_bit_p (blocks, bb->index))
1785 continue;
1786
1787 df_ref_record (dflow, hard_frame_pointer_rtx, &hard_frame_pointer_rtx,
1788 bb, NULL,
1789 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1790 false);
1791 }
1792 }
1793
1794 /* Record all the refs in the basic blocks specified by BLOCKS. */
1795
1796 static void
1797 df_refs_record (struct dataflow *dflow, bitmap blocks)
1798 {
1799 unsigned int bb_index;
1800 bitmap_iterator bi;
1801
1802 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1803 {
1804 basic_block bb = BASIC_BLOCK (bb_index);
1805 df_bb_refs_record (dflow, bb);
1806 }
1807
1808 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1809 df_record_exit_block_uses (dflow);
1810
1811 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1812 df_record_entry_block_defs (dflow);
1813
1814 if (current_function_has_nonlocal_label)
1815 record_nonlocal_goto_receiver_defs (dflow, blocks);
1816 }
1817
1818
1819 /*----------------------------------------------------------------------------
1820 Specialized hard register scanning functions.
1821 ----------------------------------------------------------------------------*/
1822
1823 /* Mark a register in SET. Hard registers in large modes get all
1824 of their component registers set as well. */
1825
1826 static void
1827 df_mark_reg (rtx reg, void *vset)
1828 {
1829 bitmap set = (bitmap) vset;
1830 int regno = REGNO (reg);
1831
1832 gcc_assert (GET_MODE (reg) != BLKmode);
1833
1834 bitmap_set_bit (set, regno);
1835 if (regno < FIRST_PSEUDO_REGISTER)
1836 {
1837 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1838 while (--n > 0)
1839 bitmap_set_bit (set, regno + n);
1840 }
1841 }
1842
1843
1844 /* Record the (conservative) set of hard registers that are defined on
1845 entry to the function. */
1846
1847 static void
1848 df_record_entry_block_defs (struct dataflow *dflow)
1849 {
1850 unsigned int i;
1851 bitmap_iterator bi;
1852 rtx r;
1853 struct df *df = dflow->df;
1854
1855 bitmap_clear (df->entry_block_defs);
1856
1857 if (!(dflow->flags & DF_HARD_REGS))
1858 return;
1859
1860 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1861 {
1862 if (FUNCTION_ARG_REGNO_P (i))
1863 #ifdef INCOMING_REGNO
1864 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1865 #else
1866 bitmap_set_bit (df->entry_block_defs, i);
1867 #endif
1868 }
1869
1870 /* Once the prologue has been generated, all of these registers
1871 should just show up in the first regular block. */
1872 if (HAVE_prologue && epilogue_completed)
1873 {
1874 /* Defs for the callee saved registers are inserted so that the
1875 pushes have some defining location. */
1876 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1877 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1878 bitmap_set_bit (df->entry_block_defs, i);
1879 }
1880 else
1881 {
1882 /* The always important stack pointer. */
1883 bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM);
1884
1885 #ifdef INCOMING_RETURN_ADDR_RTX
1886 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1887 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1888 #endif
1889
1890 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1891 only STATIC_CHAIN_REGNUM is defined. If they are different,
1892 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1893 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1894 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1895 #else
1896 #ifdef STATIC_CHAIN_REGNUM
1897 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1898 #endif
1899 #endif
1900
1901 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1902 if (r && REG_P (r))
1903 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1904 }
1905
1906 if ((!reload_completed) || frame_pointer_needed)
1907 {
1908 /* Any reference to any pseudo before reload is a potential
1909 reference of the frame pointer. */
1910 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1911 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1912 /* If they are different, also mark the hard frame pointer as live. */
1913 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1914 bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM);
1915 #endif
1916 }
1917
1918 /* These registers are live everywhere. */
1919 if (!reload_completed)
1920 {
1921 #ifdef EH_USES
1922 /* The ia-64, the only machine that uses this, does not define these
1923 until after reload. */
1924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1925 if (EH_USES (i))
1926 {
1927 bitmap_set_bit (df->entry_block_defs, i);
1928 }
1929 #endif
1930
1931 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1932 /* Pseudos with argument area equivalences may require
1933 reloading via the argument pointer. */
1934 if (fixed_regs[ARG_POINTER_REGNUM])
1935 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1936 #endif
1937
1938 #ifdef PIC_OFFSET_TABLE_REGNUM
1939 /* Any constant, or pseudo with constant equivalences, may
1940 require reloading from memory using the pic register. */
1941 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1942 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1943 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1944 #endif
1945 }
1946
1947 targetm.live_on_entry (df->entry_block_defs);
1948
1949 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1950 {
1951 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1952 ENTRY_BLOCK_PTR, NULL,
1953 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1954 }
1955 }
1956
1957
1958 /* Record the set of hard registers that are used in the exit block. */
1959
1960 static void
1961 df_record_exit_block_uses (struct dataflow *dflow)
1962 {
1963 unsigned int i;
1964 bitmap_iterator bi;
1965 struct df *df = dflow->df;
1966
1967 bitmap_clear (df->exit_block_uses);
1968
1969 if (!(dflow->flags & DF_HARD_REGS))
1970 return;
1971
1972 /* If exiting needs the right stack value, consider the stack
1973 pointer live at the end of the function. */
1974 if ((HAVE_epilogue && epilogue_completed)
1975 || !EXIT_IGNORE_STACK
1976 || (!FRAME_POINTER_REQUIRED
1977 && !current_function_calls_alloca
1978 && flag_omit_frame_pointer)
1979 || current_function_sp_is_unchanging)
1980 {
1981 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1982 }
1983
1984 /* Mark the frame pointer if needed at the end of the function.
1985 If we end up eliminating it, it will be removed from the live
1986 list of each basic block by reload. */
1987
1988 if ((!reload_completed) || frame_pointer_needed)
1989 {
1990 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1991 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1992 /* If they are different, also mark the hard frame pointer as live. */
1993 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1994 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1995 #endif
1996 }
1997
1998 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1999 /* Many architectures have a GP register even without flag_pic.
2000 Assume the pic register is not in use, or will be handled by
2001 other means, if it is not fixed. */
2002 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2003 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2004 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
2005 #endif
2006
2007 /* Mark all global registers, and all registers used by the
2008 epilogue as being live at the end of the function since they
2009 may be referenced by our caller. */
2010 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2011 if (global_regs[i] || EPILOGUE_USES (i))
2012 bitmap_set_bit (df->exit_block_uses, i);
2013
2014 if (HAVE_epilogue && epilogue_completed)
2015 {
2016 /* Mark all call-saved registers that we actually used. */
2017 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2018 if (regs_ever_live[i] && !LOCAL_REGNO (i)
2019 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2020 bitmap_set_bit (df->exit_block_uses, i);
2021 }
2022
2023 #ifdef EH_RETURN_DATA_REGNO
2024 /* Mark the registers that will contain data for the handler. */
2025 if (reload_completed && current_function_calls_eh_return)
2026 for (i = 0; ; ++i)
2027 {
2028 unsigned regno = EH_RETURN_DATA_REGNO (i);
2029 if (regno == INVALID_REGNUM)
2030 break;
2031 bitmap_set_bit (df->exit_block_uses, regno);
2032 }
2033 #endif
2034
2035 #ifdef EH_RETURN_STACKADJ_RTX
2036 if ((!HAVE_epilogue || ! epilogue_completed)
2037 && current_function_calls_eh_return)
2038 {
2039 rtx tmp = EH_RETURN_STACKADJ_RTX;
2040 if (tmp && REG_P (tmp))
2041 df_mark_reg (tmp, df->exit_block_uses);
2042 }
2043 #endif
2044
2045 #ifdef EH_RETURN_HANDLER_RTX
2046 if ((!HAVE_epilogue || ! epilogue_completed)
2047 && current_function_calls_eh_return)
2048 {
2049 rtx tmp = EH_RETURN_HANDLER_RTX;
2050 if (tmp && REG_P (tmp))
2051 df_mark_reg (tmp, df->exit_block_uses);
2052 }
2053 #endif
2054
2055 /* Mark function return value. */
2056 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
2057
2058 if (dflow->flags & DF_HARD_REGS)
2059 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
2060 df_uses_record (dflow, &regno_reg_rtx[i],
2061 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
2062 DF_REF_ARTIFICIAL);
2063 }
2064
2065 static bool initialized = false;
2066
2067 /* Initialize some platform specific structures. */
2068
2069 void
2070 df_hard_reg_init (void)
2071 {
2072 int i;
2073 #ifdef ELIMINABLE_REGS
2074 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2075 #endif
2076 /* After reload, some ports add certain bits to regs_ever_live so
2077 this cannot be reset. */
2078
2079 if (!reload_completed)
2080 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2081
2082 if (initialized)
2083 return;
2084
2085 bitmap_obstack_initialize (&persistent_obstack);
2086
2087 /* Record which registers will be eliminated. We use this in
2088 mark_used_regs. */
2089 CLEAR_HARD_REG_SET (elim_reg_set);
2090
2091 #ifdef ELIMINABLE_REGS
2092 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2093 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2094 #else
2095 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2096 #endif
2097
2098 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
2099
2100 /* Inconveniently, this is only readily available in hard reg set
2101 form. */
2102 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2103 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2104 bitmap_set_bit (df_invalidated_by_call, i);
2105
2106 initialized = true;
2107 }