df-scan.c (df_scan_free_bb_info): Added basic block parameter to be able to clean...
[gcc.git] / gcc / df-scan.c
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
3
4
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
12
13 This file is part of GCC.
14
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
18 version.
19
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
24
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 02110-1301, USA.
29 */
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "rtl.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "recog.h"
39 #include "function.h"
40 #include "regs.h"
41 #include "output.h"
42 #include "alloc-pool.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
46 #include "sbitmap.h"
47 #include "bitmap.h"
48 #include "timevar.h"
49 #include "df.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* Indicates where we are in the compilation. */
66 int df_state;
67
68 /* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
70
71 static bitmap_obstack persistent_obstack;
72
73 /* The set of hard registers in eliminables[i].from. */
74
75 static HARD_REG_SET elim_reg_set;
76
77 /* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
79
80 bitmap df_invalidated_by_call = NULL;
81
82 /* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
84
85 bitmap df_all_hard_regs = NULL;
86
87 static void df_ref_record (struct dataflow *, rtx, rtx *,
88 basic_block, rtx, enum df_ref_type,
89 enum df_ref_flags, bool record_live);
90 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
91 enum df_ref_flags, bool record_live);
92 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
93 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
94 basic_block, rtx, enum df_ref_flags);
95
96 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
97 static void df_bb_refs_record (struct dataflow *, basic_block);
98 static void df_refs_record (struct dataflow *, bitmap);
99 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
100 basic_block, rtx, enum df_ref_type,
101 enum df_ref_flags);
102 static void df_record_exit_block_uses (struct dataflow *);
103 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
104 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
105 static void df_grow_insn_info (struct df *);
106
107 \f
108 /*----------------------------------------------------------------------------
109 SCANNING DATAFLOW PROBLEM
110
111 There are several ways in which scanning looks just like the other
112 dataflow problems. It shares the all the mechanisms for local info
113 as well as basic block info. Where it differs is when and how often
114 it gets run. It also has no need for the iterative solver.
115 ----------------------------------------------------------------------------*/
116
117 /* Problem data for the scanning dataflow function. */
118 struct df_scan_problem_data
119 {
120 alloc_pool ref_pool;
121 alloc_pool insn_pool;
122 alloc_pool reg_pool;
123 };
124
125 typedef struct df_scan_bb_info *df_scan_bb_info_t;
126
127 static void
128 df_scan_free_internal (struct dataflow *dflow)
129 {
130 struct df *df = dflow->df;
131 struct df_scan_problem_data *problem_data =
132 (struct df_scan_problem_data *) dflow->problem_data;
133
134 free (df->def_info.regs);
135 free (df->def_info.refs);
136 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
137
138 free (df->use_info.regs);
139 free (df->use_info.refs);
140 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
141
142 free (df->insns);
143 df->insns = NULL;
144 df->insns_size = 0;
145
146 free (dflow->block_info);
147 dflow->block_info = NULL;
148 dflow->block_info_size = 0;
149
150 BITMAP_FREE (df->hardware_regs_used);
151 BITMAP_FREE (df->exit_block_uses);
152
153 free_alloc_pool (dflow->block_pool);
154 free_alloc_pool (problem_data->ref_pool);
155 free_alloc_pool (problem_data->insn_pool);
156 free_alloc_pool (problem_data->reg_pool);
157 }
158
159
160 /* Get basic block info. */
161
162 struct df_scan_bb_info *
163 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
164 {
165 gcc_assert (index < dflow->block_info_size);
166 return (struct df_scan_bb_info *) dflow->block_info[index];
167 }
168
169
170 /* Set basic block info. */
171
172 static void
173 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
174 struct df_scan_bb_info *bb_info)
175 {
176 gcc_assert (index < dflow->block_info_size);
177 dflow->block_info[index] = (void *) bb_info;
178 }
179
180
181 /* Free basic block info. */
182
183 static void
184 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
185 {
186 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
187 if (bb_info)
188 {
189 df_bb_refs_delete (dflow, bb->index);
190 pool_free (dflow->block_pool, bb_info);
191 }
192 }
193
194
195 /* Allocate the problem data for the scanning problem. This should be
196 called when the problem is created or when the entire function is to
197 be rescanned. */
198
199 static void
200 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
201 {
202 struct df *df = dflow->df;
203 struct df_scan_problem_data *problem_data;
204 unsigned int insn_num = get_max_uid () + 1;
205 unsigned int block_size = 50;
206 unsigned int bb_index;
207 bitmap_iterator bi;
208
209 /* Given the number of pools, this is really faster than tearing
210 everything apart. */
211 if (dflow->problem_data)
212 df_scan_free_internal (dflow);
213
214 dflow->block_pool
215 = create_alloc_pool ("df_scan_block pool",
216 sizeof (struct df_scan_bb_info),
217 block_size);
218
219 problem_data = xmalloc (sizeof (struct df_scan_problem_data));
220 dflow->problem_data = problem_data;
221
222 problem_data->ref_pool
223 = create_alloc_pool ("df_scan_ref pool",
224 sizeof (struct df_ref), block_size);
225 problem_data->insn_pool
226 = create_alloc_pool ("df_scan_insn pool",
227 sizeof (struct df_insn_info), block_size);
228 problem_data->reg_pool
229 = create_alloc_pool ("df_scan_reg pool",
230 sizeof (struct df_reg_info), block_size);
231
232 insn_num += insn_num / 4;
233 df_grow_reg_info (dflow, &df->def_info);
234 df_grow_ref_info (&df->def_info, insn_num);
235
236 df_grow_reg_info (dflow, &df->use_info);
237 df_grow_ref_info (&df->use_info, insn_num *2);
238
239 df_grow_insn_info (df);
240 df_grow_bb_info (dflow);
241
242 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
243 {
244 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
245 if (!bb_info)
246 {
247 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
248 df_scan_set_bb_info (dflow, bb_index, bb_info);
249 }
250 bb_info->artificial_defs = NULL;
251 bb_info->artificial_uses = NULL;
252 }
253
254 df->hardware_regs_used = BITMAP_ALLOC (NULL);
255 df->exit_block_uses = BITMAP_ALLOC (NULL);
256 }
257
258
259 /* Free all of the data associated with the scan problem. */
260
261 static void
262 df_scan_free (struct dataflow *dflow)
263 {
264 struct df *df = dflow->df;
265
266 if (dflow->problem_data)
267 {
268 df_scan_free_internal (dflow);
269 free (dflow->problem_data);
270 }
271
272 if (df->blocks_to_scan)
273 BITMAP_FREE (df->blocks_to_scan);
274
275 if (df->blocks_to_analyze)
276 BITMAP_FREE (df->blocks_to_analyze);
277
278 free (dflow);
279 }
280
281 static void
282 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
283 {
284 struct df *df = dflow->df;
285 int i;
286
287 fprintf (file, " all hard regs \t");
288 dump_bitmap (file, df_all_hard_regs);
289 fprintf (file, " invalidated by call \t");
290 dump_bitmap (file, df_invalidated_by_call);
291 fprintf (file, " hardware regs used \t");
292 dump_bitmap (file, df->hardware_regs_used);
293 fprintf (file, " exit block uses \t");
294 dump_bitmap (file, df->exit_block_uses);
295 fprintf (file, " regs ever live \t");
296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
297 if (regs_ever_live[i])
298 fprintf (file, "%d ", i);
299 fprintf (file, "\n");
300 }
301
302 static struct df_problem problem_SCAN =
303 {
304 DF_SCAN, /* Problem id. */
305 DF_NONE, /* Direction. */
306 df_scan_alloc, /* Allocate the problem specific data. */
307 df_scan_free_bb_info, /* Free basic block info. */
308 NULL, /* Local compute function. */
309 NULL, /* Init the solution specific data. */
310 NULL, /* Iterative solver. */
311 NULL, /* Confluence operator 0. */
312 NULL, /* Confluence operator n. */
313 NULL, /* Transfer function. */
314 NULL, /* Finalize function. */
315 df_scan_free, /* Free all of the problem information. */
316 df_scan_dump, /* Debugging. */
317 NULL /* Dependent problem. */
318 };
319
320
321 /* Create a new DATAFLOW instance and add it to an existing instance
322 of DF. The returned structure is what is used to get at the
323 solution. */
324
325 struct dataflow *
326 df_scan_add_problem (struct df *df)
327 {
328 return df_add_problem (df, &problem_SCAN);
329 }
330
331 /*----------------------------------------------------------------------------
332 Storage Allocation Utilities
333 ----------------------------------------------------------------------------*/
334
335
336 /* First, grow the reg_info information. If the current size is less than
337 the number of psuedos, grow to 25% more than the number of
338 pseudos.
339
340 Second, assure that all of the slots up to max_reg_num have been
341 filled with reg_info structures. */
342
343 static void
344 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
345 {
346 unsigned int max_reg = max_reg_num ();
347 unsigned int new_size = max_reg;
348 struct df_scan_problem_data *problem_data =
349 (struct df_scan_problem_data *) dflow->problem_data;
350 unsigned int i;
351
352 if (ref_info->regs_size < new_size)
353 {
354 new_size += new_size / 4;
355 ref_info->regs = xrealloc (ref_info->regs,
356 new_size *sizeof (struct df_reg_info*));
357 ref_info->regs_size = new_size;
358 }
359
360 for (i = ref_info->regs_inited; i < max_reg; i++)
361 {
362 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
363 memset (reg_info, 0, sizeof (struct df_reg_info));
364 ref_info->regs[i] = reg_info;
365 }
366
367 ref_info->regs_inited = max_reg;
368 }
369
370
371 /* Grow the ref information. */
372
373 static void
374 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
375 {
376 if (ref_info->refs_size < new_size)
377 {
378 ref_info->refs = xrealloc (ref_info->refs,
379 new_size *sizeof (struct df_ref *));
380 memset (ref_info->refs + ref_info->refs_size, 0,
381 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
382 ref_info->refs_size = new_size;
383 }
384 }
385
386
387 /* Grow the ref information. If the current size is less than the
388 number of instructions, grow to 25% more than the number of
389 instructions. */
390
391 static void
392 df_grow_insn_info (struct df *df)
393 {
394 unsigned int new_size = get_max_uid () + 1;
395 if (df->insns_size < new_size)
396 {
397 new_size += new_size / 4;
398 df->insns = xrealloc (df->insns,
399 new_size *sizeof (struct df_insn_info *));
400 memset (df->insns + df->insns_size, 0,
401 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
402 df->insns_size = new_size;
403 }
404 }
405
406
407
408 \f
409 /*----------------------------------------------------------------------------
410 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
411 ----------------------------------------------------------------------------*/
412
413 /* Rescan some BLOCKS or all the blocks defined by the last call to
414 df_set_blocks if BLOCKS is NULL); */
415
416 void
417 df_rescan_blocks (struct df *df, bitmap blocks)
418 {
419 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
420
421 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
422 basic_block bb;
423
424 df->def_info.refs_organized = false;
425 df->use_info.refs_organized = false;
426
427 if (blocks)
428 {
429 /* Need to assure that there are space in all of the tables. */
430 unsigned int insn_num = get_max_uid () + 1;
431 insn_num += insn_num / 4;
432
433 df_grow_reg_info (dflow, &df->def_info);
434 df_grow_ref_info (&df->def_info, insn_num);
435
436 df_grow_reg_info (dflow, &df->use_info);
437 df_grow_ref_info (&df->use_info, insn_num *2);
438
439 df_grow_insn_info (df);
440 df_grow_bb_info (dflow);
441
442 bitmap_copy (local_blocks_to_scan, blocks);
443 df->def_info.add_refs_inline = true;
444 df->use_info.add_refs_inline = true;
445
446 df_refs_delete (dflow, local_blocks_to_scan);
447
448 /* This may be a mistake, but if an explicit blocks is passed in
449 and the set of blocks to analyze has been explicitly set, add
450 the extra blocks to blocks_to_analyze. The alternative is to
451 put an assert here. We do not want this to just go by
452 silently or else we may get storage leaks. */
453 if (df->blocks_to_analyze)
454 bitmap_ior_into (df->blocks_to_analyze, blocks);
455 }
456 else
457 {
458 /* If we are going to do everything, just reallocate everything.
459 Most stuff is allocated in pools so this is faster than
460 walking it. */
461 if (df->blocks_to_analyze)
462 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
463 else
464 FOR_ALL_BB (bb)
465 {
466 bitmap_set_bit (local_blocks_to_scan, bb->index);
467 }
468 df_scan_alloc (dflow, local_blocks_to_scan);
469
470 df->def_info.add_refs_inline = false;
471 df->use_info.add_refs_inline = false;
472 }
473
474 df_refs_record (dflow, local_blocks_to_scan);
475 #if 0
476 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
477 #endif
478
479 if (!df->blocks_to_scan)
480 df->blocks_to_scan = BITMAP_ALLOC (NULL);
481
482 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
483 BITMAP_FREE (local_blocks_to_scan);
484 }
485
486 /* Create a new ref of type DF_REF_TYPE for register REG at address
487 LOC within INSN of BB. */
488
489 struct df_ref *
490 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
491 basic_block bb,
492 enum df_ref_type ref_type,
493 enum df_ref_flags ref_flags)
494 {
495 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
496 struct df_scan_bb_info *bb_info;
497
498 df_grow_reg_info (dflow, &df->use_info);
499 df_grow_reg_info (dflow, &df->def_info);
500 df_grow_bb_info (dflow);
501
502 /* Make sure there is the bb_info for this block. */
503 bb_info = df_scan_get_bb_info (dflow, bb->index);
504 if (!bb_info)
505 {
506 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
507 df_scan_set_bb_info (dflow, bb->index, bb_info);
508 bb_info->artificial_defs = NULL;
509 bb_info->artificial_uses = NULL;
510 }
511
512 if (ref_type == DF_REF_REG_DEF)
513 df->def_info.add_refs_inline = true;
514 else
515 df->use_info.add_refs_inline = true;
516
517 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
518 }
519
520
521 \f
522 /*----------------------------------------------------------------------------
523 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
524 ----------------------------------------------------------------------------*/
525
526
527 /* Get the artifical uses for a basic block. */
528
529 struct df_ref *
530 df_get_artificial_defs (struct df *df, unsigned int bb_index)
531 {
532 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
533 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
534 }
535
536
537 /* Get the artifical uses for a basic block. */
538
539 struct df_ref *
540 df_get_artificial_uses (struct df *df, unsigned int bb_index)
541 {
542 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
543 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
544 }
545
546
547 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
548
549 void
550 df_reg_chain_create (struct df_reg_info *reg_info,
551 struct df_ref *ref)
552 {
553 struct df_ref *head = reg_info->reg_chain;
554 reg_info->reg_chain = ref;
555
556 DF_REF_NEXT_REG (ref) = head;
557
558 /* We cannot actually link to the head of the chain. */
559 DF_REF_PREV_REG (ref) = NULL;
560
561 if (head)
562 DF_REF_PREV_REG (head) = ref;
563 }
564
565
566 /* Remove REF from the CHAIN. Return the head of the chain. This
567 will be CHAIN unless the REF was at the beginning of the chain. */
568
569 static struct df_ref *
570 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
571 {
572 struct df_ref *orig_chain = chain;
573 struct df_ref *prev = NULL;
574 while (chain)
575 {
576 if (chain == ref)
577 {
578 if (prev)
579 {
580 prev->next_ref = ref->next_ref;
581 ref->next_ref = NULL;
582 return orig_chain;
583 }
584 else
585 {
586 chain = ref->next_ref;
587 ref->next_ref = NULL;
588 return chain;
589 }
590 }
591
592 prev = chain;
593 chain = chain->next_ref;
594 }
595
596 /* Someone passed in a ref that was not in the chain. */
597 gcc_unreachable ();
598 return NULL;
599 }
600
601
602 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
603 the def-use or use-def chain if it exists. Returns the next ref in
604 uses or defs chain. */
605
606 struct df_ref *
607 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
608 {
609 struct df *df = dflow->df;
610 struct df_ref *next = DF_REF_NEXT_REG (ref);
611 struct df_ref *prev = DF_REF_PREV_REG (ref);
612 struct df_scan_problem_data *problem_data =
613 (struct df_scan_problem_data *) dflow->problem_data;
614 struct df_reg_info *reg_info;
615 struct df_ref *next_ref = ref->next_ref;
616 unsigned int id = DF_REF_ID (ref);
617
618 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
619 {
620 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
621 df->def_info.bitmap_size--;
622 if (df->def_info.refs && (id < df->def_info.refs_size))
623 DF_DEFS_SET (df, id, NULL);
624 }
625 else
626 {
627 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
628 df->use_info.bitmap_size--;
629 if (df->use_info.refs && (id < df->use_info.refs_size))
630 DF_USES_SET (df, id, NULL);
631 }
632
633 /* Delete any def-use or use-def chains that start here. */
634 if (DF_REF_CHAIN (ref))
635 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
636
637 reg_info->n_refs--;
638
639 /* Unlink from the reg chain. If there is no prev, this is the
640 first of the list. If not, just join the next and prev. */
641 if (prev)
642 {
643 DF_REF_NEXT_REG (prev) = next;
644 if (next)
645 DF_REF_PREV_REG (next) = prev;
646 }
647 else
648 {
649 reg_info->reg_chain = next;
650 if (next)
651 DF_REF_PREV_REG (next) = NULL;
652 }
653
654 pool_free (problem_data->ref_pool, ref);
655 return next_ref;
656 }
657
658
659 /* Unlink REF from all def-use/use-def chains, etc. */
660
661 void
662 df_ref_remove (struct df *df, struct df_ref *ref)
663 {
664 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
665 if (DF_REF_REG_DEF_P (ref))
666 {
667 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
668 {
669 struct df_scan_bb_info *bb_info
670 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
671 bb_info->artificial_defs
672 = df_ref_unlink (bb_info->artificial_defs, ref);
673 }
674 else
675 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) =
676 df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
677
678 if (df->def_info.add_refs_inline)
679 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
680 }
681 else
682 {
683 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
684 {
685 struct df_scan_bb_info *bb_info
686 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
687 bb_info->artificial_uses
688 = df_ref_unlink (bb_info->artificial_uses, ref);
689 }
690 else
691 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) =
692 df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
693
694 if (df->use_info.add_refs_inline)
695 DF_USES_SET (df, DF_REF_ID (ref), NULL);
696 }
697
698 df_reg_chain_unlink (dflow, ref);
699 }
700
701
702 /* Create the insn record for INSN. If there was one there, zero it out. */
703
704 static struct df_insn_info *
705 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
706 {
707 struct df *df = dflow->df;
708 struct df_scan_problem_data *problem_data =
709 (struct df_scan_problem_data *) dflow->problem_data;
710
711 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
712 if (!insn_rec)
713 {
714 insn_rec = pool_alloc (problem_data->insn_pool);
715 DF_INSN_SET (df, insn, insn_rec);
716 }
717 memset (insn_rec, 0, sizeof (struct df_insn_info));
718
719 return insn_rec;
720 }
721
722
723 /* Delete all of the refs information from INSN. */
724
725 void
726 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
727 {
728 struct df *df = dflow->df;
729 unsigned int uid = INSN_UID (insn);
730 struct df_insn_info *insn_info = DF_INSN_UID_GET (df, uid);
731 struct df_ref *ref;
732 struct df_scan_problem_data *problem_data =
733 (struct df_scan_problem_data *) dflow->problem_data;
734
735 if (insn_info)
736 {
737 ref = insn_info->defs;
738 while (ref)
739 ref = df_reg_chain_unlink (dflow, ref);
740
741 ref = insn_info->uses;
742 while (ref)
743 ref = df_reg_chain_unlink (dflow, ref);
744
745 pool_free (problem_data->insn_pool, insn_info);
746 DF_INSN_SET (df, insn, NULL);
747 }
748 }
749
750
751 /* Delete all of the refs information from basic_block with BB_INDEX. */
752
753 void
754 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
755 {
756 struct df_ref *def;
757 struct df_ref *use;
758
759 struct df_scan_bb_info *bb_info
760 = df_scan_get_bb_info (dflow, bb_index);
761 rtx insn;
762 basic_block bb = BASIC_BLOCK (bb_index);
763 FOR_BB_INSNS (bb, insn)
764 {
765 if (INSN_P (insn))
766 {
767 /* Record defs within INSN. */
768 df_insn_refs_delete (dflow, insn);
769 }
770 }
771
772 /* Get rid of any artifical uses. */
773 if (bb_info)
774 {
775 def = bb_info->artificial_defs;
776 while (def)
777 def = df_reg_chain_unlink (dflow, def);
778 bb_info->artificial_defs = NULL;
779 use = bb_info->artificial_uses;
780 while (use)
781 use = df_reg_chain_unlink (dflow, use);
782 bb_info->artificial_uses = NULL;
783 }
784 }
785
786
787 /* Delete all of the refs information from BLOCKS. */
788
789 void
790 df_refs_delete (struct dataflow *dflow, bitmap blocks)
791 {
792 bitmap_iterator bi;
793 unsigned int bb_index;
794
795 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
796 {
797 df_bb_refs_delete (dflow, bb_index);
798 }
799 }
800
801
802 /* Take build ref table for either the uses or defs from the reg-use
803 or reg-def chains. */
804
805 void
806 df_reorganize_refs (struct df_ref_info *ref_info)
807 {
808 unsigned int m = ref_info->regs_inited;
809 unsigned int regno;
810 unsigned int offset = 0;
811 unsigned int size = 0;
812
813 if (ref_info->refs_organized)
814 return;
815
816 if (ref_info->refs_size < ref_info->bitmap_size)
817 {
818 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
819 df_grow_ref_info (ref_info, new_size);
820 }
821
822 for (regno = 0; regno < m; regno++)
823 {
824 struct df_reg_info *reg_info = ref_info->regs[regno];
825 int count = 0;
826 if (reg_info)
827 {
828 struct df_ref *ref = reg_info->reg_chain;
829 reg_info->begin = offset;
830 while (ref)
831 {
832 ref_info->refs[offset] = ref;
833 DF_REF_ID (ref) = offset++;
834 ref = DF_REF_NEXT_REG (ref);
835 count++;
836 size++;
837 }
838 reg_info->n_refs = count;
839 }
840 }
841
842 /* The bitmap size is not decremented when refs are deleted. So
843 reset it now that we have squished out all of the empty
844 slots. */
845 ref_info->bitmap_size = size;
846 ref_info->refs_organized = true;
847 ref_info->add_refs_inline = true;
848 }
849
850 \f
851 /* Local miscellaneous routines. */
852
853 /* Local routines for recording refs. */
854
855 /* Set where we are in the compilation. */
856
857 void
858 df_set_state (int state)
859 {
860 df_state = state;
861 }
862
863
864 \f
865 /*----------------------------------------------------------------------------
866 Hard core instruction scanning code. No external interfaces here,
867 just a lot of routines that look inside insns.
868 ----------------------------------------------------------------------------*/
869
870 /* Create a ref and add it to the reg-def or reg-use chains. */
871
872 static struct df_ref *
873 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
874 basic_block bb, rtx insn,
875 enum df_ref_type ref_type,
876 enum df_ref_flags ref_flags)
877 {
878 struct df_ref *this_ref;
879 struct df *df = dflow->df;
880 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
881 struct df_scan_problem_data *problem_data =
882 (struct df_scan_problem_data *) dflow->problem_data;
883
884 this_ref = pool_alloc (problem_data->ref_pool);
885 DF_REF_REG (this_ref) = reg;
886 DF_REF_REGNO (this_ref) = regno;
887 DF_REF_LOC (this_ref) = loc;
888 DF_REF_INSN (this_ref) = insn;
889 DF_REF_CHAIN (this_ref) = NULL;
890 DF_REF_TYPE (this_ref) = ref_type;
891 DF_REF_FLAGS (this_ref) = ref_flags;
892 DF_REF_DATA (this_ref) = NULL;
893 DF_REF_BB (this_ref) = bb;
894
895 /* Link the ref into the reg_def and reg_use chains and keep a count
896 of the instances. */
897 if (ref_type == DF_REF_REG_DEF)
898 {
899 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
900 reg_info->n_refs++;
901
902 /* Add the ref to the reg_def chain. */
903 df_reg_chain_create (reg_info, this_ref);
904 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
905 if (df->def_info.add_refs_inline)
906 {
907 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
908 {
909 int new_size = df->def_info.bitmap_size
910 + df->def_info.bitmap_size / 4;
911 df_grow_ref_info (&df->def_info, new_size);
912 }
913 /* Add the ref to the big array of defs. */
914 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
915 df->def_info.refs_organized = false;
916 }
917
918 df->def_info.bitmap_size++;
919
920 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
921 {
922 struct df_scan_bb_info *bb_info
923 = df_scan_get_bb_info (dflow, bb->index);
924 this_ref->next_ref = bb_info->artificial_defs;
925 bb_info->artificial_defs = this_ref;
926 }
927 else
928 {
929 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
930 DF_INSN_GET (df, insn)->defs = this_ref;
931 }
932 }
933 else
934 {
935 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
936 reg_info->n_refs++;
937
938 /* Add the ref to the reg_use chain. */
939 df_reg_chain_create (reg_info, this_ref);
940 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
941 if (df->use_info.add_refs_inline)
942 {
943 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
944 {
945 int new_size = df->use_info.bitmap_size
946 + df->use_info.bitmap_size / 4;
947 df_grow_ref_info (&df->use_info, new_size);
948 }
949 /* Add the ref to the big array of defs. */
950 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
951 df->use_info.refs_organized = false;
952 }
953
954 df->use_info.bitmap_size++;
955 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
956 {
957 struct df_scan_bb_info *bb_info
958 = df_scan_get_bb_info (dflow, bb->index);
959 this_ref->next_ref = bb_info->artificial_uses;
960 bb_info->artificial_uses = this_ref;
961 }
962 else
963 {
964 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
965 DF_INSN_GET (df, insn)->uses = this_ref;
966 }
967 }
968 return this_ref;
969 }
970
971
972 /* Create new references of type DF_REF_TYPE for each part of register REG
973 at address LOC within INSN of BB. */
974
975 static void
976 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
977 basic_block bb, rtx insn,
978 enum df_ref_type ref_type,
979 enum df_ref_flags ref_flags,
980 bool record_live)
981 {
982 unsigned int regno;
983 struct df *df = dflow->df;
984
985 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
986
987 /* For the reg allocator we are interested in some SUBREG rtx's, but not
988 all. Notably only those representing a word extraction from a multi-word
989 reg. As written in the docu those should have the form
990 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
991 XXX Is that true? We could also use the global word_mode variable. */
992 if ((df->flags & DF_SUBREGS) == 0
993 && GET_CODE (reg) == SUBREG
994 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
995 || GET_MODE_SIZE (GET_MODE (reg))
996 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
997 {
998 loc = &SUBREG_REG (reg);
999 reg = *loc;
1000 ref_flags |= DF_REF_STRIPPED;
1001 }
1002
1003 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1004 if (regno < FIRST_PSEUDO_REGISTER)
1005 {
1006 int i;
1007 int endregno;
1008
1009 if (! (df->flags & DF_HARD_REGS))
1010 return;
1011
1012 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
1013 for the mode, because we only want to add references to regs, which
1014 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1015 reference the whole reg 0 in DI mode (which would also include
1016 reg 1, at least, if 0 and 1 are SImode registers). */
1017 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1018 if (GET_CODE (reg) == SUBREG)
1019 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1020 SUBREG_BYTE (reg), GET_MODE (reg));
1021 endregno += regno;
1022
1023 for (i = regno; i < endregno; i++)
1024 {
1025 /* Calls are handled at call site because regs_ever_live
1026 doesn't include clobbered regs, only used ones. */
1027 if (ref_type == DF_REF_REG_DEF && record_live)
1028 regs_ever_live[i] = 1;
1029 else if ((ref_type == DF_REF_REG_USE
1030 || ref_type == DF_REF_REG_MEM_STORE
1031 || ref_type == DF_REF_REG_MEM_LOAD)
1032 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1033 {
1034 /* Set regs_ever_live on uses of non-eliminable frame
1035 pointers and arg pointers. */
1036 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
1037 && (regno == FRAME_POINTER_REGNUM
1038 || regno == ARG_POINTER_REGNUM)))
1039 regs_ever_live[i] = 1;
1040 }
1041
1042 df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1043 bb, insn, ref_type, ref_flags);
1044 }
1045 }
1046 else
1047 {
1048 df_ref_create_structure (dflow, reg, loc,
1049 bb, insn, ref_type, ref_flags);
1050 }
1051 }
1052
1053
1054 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1055 covered by the outer mode is smaller than that covered by the inner mode,
1056 is a read-modify-write operation.
1057 This function returns true iff the SUBREG X is such a SUBREG. */
1058
1059 bool
1060 df_read_modify_subreg_p (rtx x)
1061 {
1062 unsigned int isize, osize;
1063 if (GET_CODE (x) != SUBREG)
1064 return false;
1065 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1066 osize = GET_MODE_SIZE (GET_MODE (x));
1067 return (isize > osize && isize > UNITS_PER_WORD);
1068 }
1069
1070
1071 /* Process all the registers defined in the rtx, X.
1072 Autoincrement/decrement definitions will be picked up by
1073 df_uses_record. */
1074
1075 static void
1076 df_def_record_1 (struct dataflow *dflow, rtx x,
1077 basic_block bb, rtx insn,
1078 enum df_ref_flags flags, bool record_live)
1079 {
1080 rtx *loc;
1081 rtx dst;
1082
1083 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1084 construct. */
1085 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1086 loc = &XEXP (x, 0);
1087 else
1088 loc = &SET_DEST (x);
1089 dst = *loc;
1090
1091 /* Some targets place small structures in registers for
1092 return values of functions. */
1093 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1094 {
1095 int i;
1096
1097 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1098 {
1099 rtx temp = XVECEXP (dst, 0, i);
1100 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1101 || GET_CODE (temp) == SET)
1102 df_def_record_1 (dflow, temp, bb, insn,
1103 GET_CODE (temp) == CLOBBER ? flags | DF_REF_CLOBBER : flags,
1104 record_live);
1105 }
1106 return;
1107 }
1108
1109 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1110 be handy for the reg allocator. */
1111 while (GET_CODE (dst) == STRICT_LOW_PART
1112 || GET_CODE (dst) == ZERO_EXTRACT
1113 || df_read_modify_subreg_p (dst))
1114 {
1115 #if 0
1116 /* Strict low part always contains SUBREG, but we do not want to make
1117 it appear outside, as whole register is always considered. */
1118 if (GET_CODE (dst) == STRICT_LOW_PART)
1119 {
1120 loc = &XEXP (dst, 0);
1121 dst = *loc;
1122 }
1123 #endif
1124 loc = &XEXP (dst, 0);
1125 dst = *loc;
1126 flags |= DF_REF_READ_WRITE;
1127 }
1128
1129 if (REG_P (dst)
1130 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1131 df_ref_record (dflow, dst, loc, bb, insn,
1132 DF_REF_REG_DEF, flags, record_live);
1133 }
1134
1135
1136 /* Process all the registers defined in the pattern rtx, X. */
1137
1138 static void
1139 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1140 {
1141 RTX_CODE code = GET_CODE (x);
1142
1143 if (code == SET || code == CLOBBER)
1144 {
1145 /* Mark the single def within the pattern. */
1146 df_def_record_1 (dflow, x, bb, insn,
1147 code == CLOBBER ? DF_REF_CLOBBER : 0, true);
1148 }
1149 else if (code == COND_EXEC)
1150 {
1151 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1152 }
1153 else if (code == PARALLEL)
1154 {
1155 int i;
1156
1157 /* Mark the multiple defs within the pattern. */
1158 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1159 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1160 }
1161 }
1162
1163
1164 /* Process all the registers used in the rtx at address LOC. */
1165
1166 static void
1167 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1168 basic_block bb, rtx insn, enum df_ref_flags flags)
1169 {
1170 RTX_CODE code;
1171 rtx x;
1172 retry:
1173 x = *loc;
1174 if (!x)
1175 return;
1176 code = GET_CODE (x);
1177 switch (code)
1178 {
1179 case LABEL_REF:
1180 case SYMBOL_REF:
1181 case CONST_INT:
1182 case CONST:
1183 case CONST_DOUBLE:
1184 case CONST_VECTOR:
1185 case PC:
1186 case CC0:
1187 case ADDR_VEC:
1188 case ADDR_DIFF_VEC:
1189 return;
1190
1191 case CLOBBER:
1192 /* If we are clobbering a MEM, mark any registers inside the address
1193 as being used. */
1194 if (MEM_P (XEXP (x, 0)))
1195 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1196 DF_REF_REG_MEM_STORE, bb, insn, flags);
1197
1198 /* If we're clobbering a REG then we have a def so ignore. */
1199 return;
1200
1201 case MEM:
1202 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1203 flags & DF_REF_IN_NOTE);
1204 return;
1205
1206 case SUBREG:
1207 /* While we're here, optimize this case. */
1208
1209 /* In case the SUBREG is not of a REG, do not optimize. */
1210 if (!REG_P (SUBREG_REG (x)))
1211 {
1212 loc = &SUBREG_REG (x);
1213 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1214 return;
1215 }
1216 /* ... Fall through ... */
1217
1218 case REG:
1219 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1220 return;
1221
1222 case SET:
1223 {
1224 rtx dst = SET_DEST (x);
1225 gcc_assert (!(flags & DF_REF_IN_NOTE));
1226 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1227
1228 switch (GET_CODE (dst))
1229 {
1230 case SUBREG:
1231 if (df_read_modify_subreg_p (dst))
1232 {
1233 df_uses_record (dflow, &SUBREG_REG (dst),
1234 DF_REF_REG_USE, bb,
1235 insn, DF_REF_READ_WRITE);
1236 break;
1237 }
1238 /* Fall through. */
1239 case REG:
1240 case PARALLEL:
1241 case SCRATCH:
1242 case PC:
1243 case CC0:
1244 break;
1245 case MEM:
1246 df_uses_record (dflow, &XEXP (dst, 0),
1247 DF_REF_REG_MEM_STORE,
1248 bb, insn, 0);
1249 break;
1250 case STRICT_LOW_PART:
1251 {
1252 rtx *temp = &XEXP (dst, 0);
1253 /* A strict_low_part uses the whole REG and not just the
1254 SUBREG. */
1255 dst = XEXP (dst, 0);
1256 df_uses_record (dflow,
1257 (GET_CODE (dst) == SUBREG)
1258 ? &SUBREG_REG (dst) : temp,
1259 DF_REF_REG_USE, bb,
1260 insn, DF_REF_READ_WRITE);
1261 }
1262 break;
1263 case ZERO_EXTRACT:
1264 case SIGN_EXTRACT:
1265 df_uses_record (dflow, &XEXP (dst, 0),
1266 DF_REF_REG_USE, bb, insn,
1267 DF_REF_READ_WRITE);
1268 df_uses_record (dflow, &XEXP (dst, 1),
1269 DF_REF_REG_USE, bb, insn, 0);
1270 df_uses_record (dflow, &XEXP (dst, 2),
1271 DF_REF_REG_USE, bb, insn, 0);
1272 dst = XEXP (dst, 0);
1273 break;
1274 default:
1275 gcc_unreachable ();
1276 }
1277 return;
1278 }
1279
1280 case RETURN:
1281 break;
1282
1283 case ASM_OPERANDS:
1284 case UNSPEC_VOLATILE:
1285 case TRAP_IF:
1286 case ASM_INPUT:
1287 {
1288 /* Traditional and volatile asm instructions must be
1289 considered to use and clobber all hard registers, all
1290 pseudo-registers and all of memory. So must TRAP_IF and
1291 UNSPEC_VOLATILE operations.
1292
1293 Consider for instance a volatile asm that changes the fpu
1294 rounding mode. An insn should not be moved across this
1295 even if it only uses pseudo-regs because it might give an
1296 incorrectly rounded result.
1297
1298 However, flow.c's liveness computation did *not* do this,
1299 giving the reasoning as " ?!? Unfortunately, marking all
1300 hard registers as live causes massive problems for the
1301 register allocator and marking all pseudos as live creates
1302 mountains of uninitialized variable warnings."
1303
1304 In order to maintain the status quo with regard to liveness
1305 and uses, we do what flow.c did and just mark any regs we
1306 can find in ASM_OPERANDS as used. Later on, when liveness
1307 is computed, asm insns are scanned and regs_asm_clobbered
1308 is filled out.
1309
1310 For all ASM_OPERANDS, we must traverse the vector of input
1311 operands. We can not just fall through here since then we
1312 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1313 which do not indicate traditional asms unlike their normal
1314 usage. */
1315 if (code == ASM_OPERANDS)
1316 {
1317 int j;
1318
1319 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1320 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1321 DF_REF_REG_USE, bb, insn, 0);
1322 return;
1323 }
1324 break;
1325 }
1326
1327 case PRE_DEC:
1328 case POST_DEC:
1329 case PRE_INC:
1330 case POST_INC:
1331 case PRE_MODIFY:
1332 case POST_MODIFY:
1333 /* Catch the def of the register being modified. */
1334 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1335 DF_REF_REG_DEF, DF_REF_READ_WRITE, true);
1336
1337 /* ... Fall through to handle uses ... */
1338
1339 default:
1340 break;
1341 }
1342
1343 /* Recursively scan the operands of this expression. */
1344 {
1345 const char *fmt = GET_RTX_FORMAT (code);
1346 int i;
1347
1348 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1349 {
1350 if (fmt[i] == 'e')
1351 {
1352 /* Tail recursive case: save a function call level. */
1353 if (i == 0)
1354 {
1355 loc = &XEXP (x, 0);
1356 goto retry;
1357 }
1358 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1359 }
1360 else if (fmt[i] == 'E')
1361 {
1362 int j;
1363 for (j = 0; j < XVECLEN (x, i); j++)
1364 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1365 bb, insn, flags);
1366 }
1367 }
1368 }
1369 }
1370
1371 /* Return true if *LOC contains an asm. */
1372
1373 static int
1374 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1375 {
1376 if ( !*loc)
1377 return 0;
1378 if (GET_CODE (*loc) == ASM_OPERANDS)
1379 return 1;
1380 return 0;
1381 }
1382
1383
1384 /* Return true if INSN contains an ASM. */
1385
1386 static int
1387 df_insn_contains_asm (rtx insn)
1388 {
1389 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1390 }
1391
1392
1393
1394 /* Record all the refs for DF within INSN of basic block BB. */
1395
1396 static void
1397 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1398 {
1399 int i;
1400 struct df *df = dflow->df;
1401
1402 if (INSN_P (insn))
1403 {
1404 rtx note;
1405
1406 if (df_insn_contains_asm (insn))
1407 DF_INSN_CONTAINS_ASM (df, insn) = true;
1408
1409 /* Record register defs. */
1410 df_defs_record (dflow, PATTERN (insn), bb, insn);
1411
1412 if (df->flags & DF_EQUIV_NOTES)
1413 for (note = REG_NOTES (insn); note;
1414 note = XEXP (note, 1))
1415 {
1416 switch (REG_NOTE_KIND (note))
1417 {
1418 case REG_EQUIV:
1419 case REG_EQUAL:
1420 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1421 bb, insn, DF_REF_IN_NOTE);
1422 default:
1423 break;
1424 }
1425 }
1426
1427 if (CALL_P (insn))
1428 {
1429 rtx note;
1430
1431 /* Record the registers used to pass arguments, and explicitly
1432 noted as clobbered. */
1433 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1434 note = XEXP (note, 1))
1435 {
1436 if (GET_CODE (XEXP (note, 0)) == USE)
1437 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1438 DF_REF_REG_USE,
1439 bb, insn, 0);
1440 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1441 {
1442 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1443 if (REG_P (XEXP (XEXP (note, 0), 0)))
1444 {
1445 rtx reg = XEXP (XEXP (note, 0), 0);
1446 int regno_last;
1447 int regno_first;
1448 int i;
1449
1450 regno_last = regno_first = REGNO (reg);
1451 if (regno_first < FIRST_PSEUDO_REGISTER)
1452 regno_last
1453 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1454 for (i = regno_first; i <= regno_last; i++)
1455 regs_ever_live[i] = 1;
1456 }
1457 }
1458 }
1459
1460 /* The stack ptr is used (honorarily) by a CALL insn. */
1461 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1462 DF_REF_REG_USE, bb, insn,
1463 0);
1464
1465 if (df->flags & DF_HARD_REGS)
1466 {
1467 bitmap_iterator bi;
1468 unsigned int ui;
1469 /* Calls may also reference any of the global registers,
1470 so they are recorded as used. */
1471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1472 if (global_regs[i])
1473 df_uses_record (dflow, &regno_reg_rtx[i],
1474 DF_REF_REG_USE, bb, insn,
1475 0);
1476 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1477 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb, insn,
1478 DF_REF_REG_DEF, DF_REF_CLOBBER, false);
1479 }
1480 }
1481
1482 /* Record the register uses. */
1483 df_uses_record (dflow, &PATTERN (insn),
1484 DF_REF_REG_USE, bb, insn, 0);
1485
1486 }
1487 }
1488
1489 static bool
1490 df_has_eh_preds (basic_block bb)
1491 {
1492 edge e;
1493 edge_iterator ei;
1494
1495 FOR_EACH_EDGE (e, ei, bb->preds)
1496 {
1497 if (e->flags & EDGE_EH)
1498 return true;
1499 }
1500 return false;
1501 }
1502
1503 /* Record all the refs within the basic block BB. */
1504
1505 static void
1506 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1507 {
1508 struct df *df = dflow->df;
1509 rtx insn;
1510 int luid = 0;
1511 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1512
1513 /* Need to make sure that there is a record in the basic block info. */
1514 if (!bb_info)
1515 {
1516 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1517 df_scan_set_bb_info (dflow, bb->index, bb_info);
1518 bb_info->artificial_defs = NULL;
1519 bb_info->artificial_uses = NULL;
1520 }
1521
1522 /* Scan the block an insn at a time from beginning to end. */
1523 FOR_BB_INSNS (bb, insn)
1524 {
1525 df_insn_create_insn_record (dflow, insn);
1526 if (INSN_P (insn))
1527 {
1528 /* Record defs within INSN. */
1529 DF_INSN_LUID (df, insn) = luid++;
1530 df_insn_refs_record (dflow, bb, insn);
1531 }
1532 DF_INSN_LUID (df, insn) = luid;
1533 }
1534
1535 #ifdef EH_RETURN_DATA_REGNO
1536 if ((df->flags & DF_HARD_REGS)
1537 && df_has_eh_preds (bb))
1538 {
1539 unsigned int i;
1540 /* Mark the registers that will contain data for the handler. */
1541 if (current_function_calls_eh_return)
1542 for (i = 0; ; ++i)
1543 {
1544 unsigned regno = EH_RETURN_DATA_REGNO (i);
1545 if (regno == INVALID_REGNUM)
1546 break;
1547 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i], bb, NULL,
1548 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP, false);
1549 }
1550 }
1551 #endif
1552
1553 #ifdef EH_USES
1554 /* This code is putting in a artificial ref for the use at the TOP
1555 of the block that receives the exception. It is too cumbersome
1556 to actually put the ref on the edge. We could either model this
1557 at the top of the receiver block or the bottom of the sender
1558 block.
1559
1560 The bottom of the sender block is problematic because not all
1561 out-edges of the a block are eh-edges. However, it is true that
1562 all edges into a block are either eh-edges or none of them are
1563 eh-edges. Thus, we can model this at the top of the eh-receiver
1564 for all of the edges at once. */
1565 if ((df->flags & DF_HARD_REGS)
1566 && df_has_eh_preds (bb))
1567 {
1568 unsigned int i;
1569 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1570 if (EH_USES (i))
1571 df_uses_record (dflow, &regno_reg_rtx[i],
1572 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1573 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1574 }
1575 #endif
1576
1577 if ((df->flags & DF_HARD_REGS)
1578 && bb->index >= NUM_FIXED_BLOCKS)
1579 {
1580 /* Before reload, there are a few registers that must be forced
1581 live everywhere -- which might not already be the case for
1582 blocks within infinite loops. */
1583 if (! reload_completed)
1584 {
1585
1586 /* Any reference to any pseudo before reload is a potential
1587 reference of the frame pointer. */
1588 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
1589 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1590
1591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1592 /* Pseudos with argument area equivalences may require
1593 reloading via the argument pointer. */
1594 if (fixed_regs[ARG_POINTER_REGNUM])
1595 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1596 DF_REF_REG_USE, bb, NULL,
1597 DF_REF_ARTIFICIAL);
1598 #endif
1599
1600 /* Any constant, or pseudo with constant equivalences, may
1601 require reloading from memory using the pic register. */
1602 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1603 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1604 df_uses_record (dflow, &regno_reg_rtx[PIC_OFFSET_TABLE_REGNUM],
1605 DF_REF_REG_USE, bb, NULL,
1606 DF_REF_ARTIFICIAL);
1607 }
1608 /* The all-important stack pointer must always be live. */
1609 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1610 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1611 }
1612 }
1613
1614
1615 /* Record all the refs in the basic blocks specified by BLOCKS. */
1616
1617 static void
1618 df_refs_record (struct dataflow *dflow, bitmap blocks)
1619 {
1620 unsigned int bb_index;
1621 bitmap_iterator bi;
1622
1623 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1624 {
1625 basic_block bb = BASIC_BLOCK (bb_index);
1626 df_bb_refs_record (dflow, bb);
1627 }
1628
1629 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1630 df_record_exit_block_uses (dflow);
1631 }
1632
1633
1634 /*----------------------------------------------------------------------------
1635 Specialized hard register scanning functions.
1636 ----------------------------------------------------------------------------*/
1637
1638 /* Mark a register in SET. Hard registers in large modes get all
1639 of their component registers set as well. */
1640
1641 static void
1642 df_mark_reg (rtx reg, void *vset)
1643 {
1644 bitmap set = (bitmap) vset;
1645 int regno = REGNO (reg);
1646
1647 gcc_assert (GET_MODE (reg) != BLKmode);
1648
1649 bitmap_set_bit (set, regno);
1650 if (regno < FIRST_PSEUDO_REGISTER)
1651 {
1652 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1653 while (--n > 0)
1654 bitmap_set_bit (set, regno + n);
1655 }
1656 }
1657
1658 /* Record the set of hard registers that are used in the exit block. */
1659
1660 static void
1661 df_record_exit_block_uses (struct dataflow *dflow)
1662 {
1663 unsigned int i;
1664 bitmap_iterator bi;
1665 struct df *df = dflow->df;
1666
1667 bitmap_clear (df->exit_block_uses);
1668
1669 if (! (df->flags & DF_HARD_REGS))
1670 return;
1671
1672 /* If exiting needs the right stack value, consider the stack
1673 pointer live at the end of the function. */
1674 if ((HAVE_epilogue && epilogue_completed)
1675 || ! EXIT_IGNORE_STACK
1676 || (! FRAME_POINTER_REQUIRED
1677 && ! current_function_calls_alloca
1678 && flag_omit_frame_pointer)
1679 || current_function_sp_is_unchanging)
1680 {
1681 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1682 }
1683
1684 /* Mark the frame pointer if needed at the end of the function.
1685 If we end up eliminating it, it will be removed from the live
1686 list of each basic block by reload. */
1687
1688 if (! reload_completed || frame_pointer_needed)
1689 {
1690 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1691 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1692 /* If they are different, also mark the hard frame pointer as live. */
1693 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1694 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1695 #endif
1696 }
1697
1698 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1699 /* Many architectures have a GP register even without flag_pic.
1700 Assume the pic register is not in use, or will be handled by
1701 other means, if it is not fixed. */
1702 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1703 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1704 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1705 #endif
1706
1707 /* Mark all global registers, and all registers used by the
1708 epilogue as being live at the end of the function since they
1709 may be referenced by our caller. */
1710 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1711 if (global_regs[i] || EPILOGUE_USES (i))
1712 bitmap_set_bit (df->exit_block_uses, i);
1713
1714 if (HAVE_epilogue && epilogue_completed)
1715 {
1716 /* Mark all call-saved registers that we actually used. */
1717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1718 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1719 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1720 bitmap_set_bit (df->exit_block_uses, i);
1721 }
1722
1723 #ifdef EH_RETURN_DATA_REGNO
1724 /* Mark the registers that will contain data for the handler. */
1725 if (reload_completed && current_function_calls_eh_return)
1726 for (i = 0; ; ++i)
1727 {
1728 unsigned regno = EH_RETURN_DATA_REGNO (i);
1729 if (regno == INVALID_REGNUM)
1730 break;
1731 bitmap_set_bit (df->exit_block_uses, regno);
1732 }
1733 #endif
1734
1735 #ifdef EH_RETURN_STACKADJ_RTX
1736 if ((! HAVE_epilogue || ! epilogue_completed)
1737 && current_function_calls_eh_return)
1738 {
1739 rtx tmp = EH_RETURN_STACKADJ_RTX;
1740 if (tmp && REG_P (tmp))
1741 df_mark_reg (tmp, df->exit_block_uses);
1742 }
1743 #endif
1744
1745 #ifdef EH_RETURN_HANDLER_RTX
1746 if ((! HAVE_epilogue || ! epilogue_completed)
1747 && current_function_calls_eh_return)
1748 {
1749 rtx tmp = EH_RETURN_HANDLER_RTX;
1750 if (tmp && REG_P (tmp))
1751 df_mark_reg (tmp, df->exit_block_uses);
1752 }
1753 #endif
1754
1755 /* Mark function return value. */
1756 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
1757
1758 if (df->flags & DF_HARD_REGS)
1759 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
1760 df_uses_record (dflow, &regno_reg_rtx[i],
1761 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1762 DF_REF_ARTIFICIAL);
1763 }
1764
1765 static bool initialized = false;
1766
1767 /* Initialize some platform specific structures. */
1768
1769 void
1770 df_hard_reg_init (void)
1771 {
1772 int i;
1773 #ifdef ELIMINABLE_REGS
1774 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
1775 #endif
1776 /* After reload, some ports add certain bits to regs_ever_live so
1777 this cannot be reset. */
1778
1779 if (!reload_completed)
1780 memset (regs_ever_live, 0, sizeof (regs_ever_live));
1781
1782 if (initialized)
1783 return;
1784
1785 bitmap_obstack_initialize (&persistent_obstack);
1786
1787 /* Record which registers will be eliminated. We use this in
1788 mark_used_regs. */
1789 CLEAR_HARD_REG_SET (elim_reg_set);
1790
1791 #ifdef ELIMINABLE_REGS
1792 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
1793 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1794 #else
1795 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1796 #endif
1797
1798 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
1799
1800 /* Inconveniently, this is only readily available in hard reg set
1801 form. */
1802 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1803 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1804 bitmap_set_bit (df_invalidated_by_call, i);
1805
1806 df_all_hard_regs = BITMAP_ALLOC (&persistent_obstack);
1807
1808 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1809 bitmap_set_bit (df_all_hard_regs, i);
1810
1811 initialized = true;
1812 }