df.h (DF_SCAN, [...]): New macros.
[gcc.git] / gcc / df-scan.c
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
3
4
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
12
13 This file is part of GCC.
14
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
18 version.
19
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
24
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 02110-1301, USA.
29 */
30
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "rtl.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "recog.h"
39 #include "function.h"
40 #include "regs.h"
41 #include "output.h"
42 #include "alloc-pool.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
46 #include "sbitmap.h"
47 #include "bitmap.h"
48 #include "timevar.h"
49 #include "df.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* Indicates where we are in the compilation. */
66 int df_state;
67
68 /* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
70
71 static bitmap_obstack persistent_obstack;
72
73 /* The set of hard registers in eliminables[i].from. */
74
75 static HARD_REG_SET elim_reg_set;
76
77 /* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
79
80 bitmap df_invalidated_by_call = NULL;
81
82 /* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
84
85 bitmap df_all_hard_regs = NULL;
86
87 static void df_ref_record (struct dataflow *, rtx, rtx *,
88 basic_block, rtx, enum df_ref_type,
89 enum df_ref_flags, bool record_live);
90 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
91 enum df_ref_flags, bool record_live);
92 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
93 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
94 basic_block, rtx, enum df_ref_flags);
95
96 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
97 static void df_bb_refs_record (struct dataflow *, basic_block);
98 static void df_refs_record (struct dataflow *, bitmap);
99 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
100 basic_block, rtx, enum df_ref_type,
101 enum df_ref_flags);
102 static void df_record_exit_block_uses (struct dataflow *);
103 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
104 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
105 static void df_grow_insn_info (struct df *);
106
107 \f
108 /*----------------------------------------------------------------------------
109 SCANNING DATAFLOW PROBLEM
110
111 There are several ways in which scanning looks just like the other
112 dataflow problems. It shares the all the mechanisms for local info
113 as well as basic block info. Where it differs is when and how often
114 it gets run. It also has no need for the iterative solver.
115 ----------------------------------------------------------------------------*/
116
117 /* Problem data for the scanning dataflow function. */
118 struct df_scan_problem_data
119 {
120 alloc_pool ref_pool;
121 alloc_pool insn_pool;
122 alloc_pool reg_pool;
123 };
124
125 typedef struct df_scan_bb_info *df_scan_bb_info_t;
126
127 static void
128 df_scan_free_internal (struct dataflow *dflow)
129 {
130 struct df *df = dflow->df;
131 struct df_scan_problem_data *problem_data =
132 (struct df_scan_problem_data *) dflow->problem_data;
133
134 free (df->def_info.regs);
135 free (df->def_info.refs);
136 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
137
138 free (df->use_info.regs);
139 free (df->use_info.refs);
140 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
141
142 free (df->insns);
143 df->insns = NULL;
144 df->insns_size = 0;
145
146 free (dflow->block_info);
147 dflow->block_info = NULL;
148 dflow->block_info_size = 0;
149
150 BITMAP_FREE (df->hardware_regs_used);
151 BITMAP_FREE (df->exit_block_uses);
152
153 free_alloc_pool (dflow->block_pool);
154 free_alloc_pool (problem_data->ref_pool);
155 free_alloc_pool (problem_data->insn_pool);
156 free_alloc_pool (problem_data->reg_pool);
157 }
158
159
160 /* Get basic block info. */
161
162 struct df_scan_bb_info *
163 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
164 {
165 gcc_assert (index < dflow->block_info_size);
166 return (struct df_scan_bb_info *) dflow->block_info[index];
167 }
168
169
170 /* Set basic block info. */
171
172 static void
173 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
174 struct df_scan_bb_info *bb_info)
175 {
176 gcc_assert (index < dflow->block_info_size);
177 dflow->block_info[index] = (void *) bb_info;
178 }
179
180
181 /* Free basic block info. */
182
183 static void
184 df_scan_free_bb_info (struct dataflow *dflow, void *vbb_info)
185 {
186 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
187 if (bb_info)
188 pool_free (dflow->block_pool, bb_info);
189 }
190
191
192 /* Allocate the problem data for the scanning problem. This should be
193 called when the problem is created or when the entire function is to
194 be rescanned. */
195
196 static void
197 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
198 {
199 struct df *df = dflow->df;
200 struct df_scan_problem_data *problem_data;
201 unsigned int insn_num = get_max_uid () + 1;
202 unsigned int block_size = 50;
203 unsigned int bb_index;
204 bitmap_iterator bi;
205
206 /* Given the number of pools, this is really faster than tearing
207 everything apart. */
208 if (dflow->problem_data)
209 df_scan_free_internal (dflow);
210
211 dflow->block_pool
212 = create_alloc_pool ("df_scan_block pool",
213 sizeof (struct df_scan_bb_info),
214 block_size);
215
216 problem_data = xmalloc (sizeof (struct df_scan_problem_data));
217 dflow->problem_data = problem_data;
218
219 problem_data->ref_pool
220 = create_alloc_pool ("df_scan_ref pool",
221 sizeof (struct df_ref), block_size);
222 problem_data->insn_pool
223 = create_alloc_pool ("df_scan_insn pool",
224 sizeof (struct df_insn_info), block_size);
225
226 problem_data->reg_pool
227 = create_alloc_pool ("df_scan_reg pool",
228 sizeof (struct df_reg_info), block_size);
229
230 insn_num += insn_num / 4;
231 df_grow_reg_info (dflow, &df->def_info);
232 df_grow_ref_info (&df->def_info, insn_num);
233
234 df_grow_reg_info (dflow, &df->use_info);
235 df_grow_ref_info (&df->use_info, insn_num *2);
236
237 df_grow_insn_info (df);
238 df_grow_bb_info (dflow);
239
240 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
241 {
242 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
243 if (!bb_info)
244 {
245 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
246 df_scan_set_bb_info (dflow, bb_index, bb_info);
247 }
248 bb_info->artificial_defs = NULL;
249 bb_info->artificial_uses = NULL;
250 }
251
252 df->hardware_regs_used = BITMAP_ALLOC (NULL);
253 df->exit_block_uses = BITMAP_ALLOC (NULL);
254 }
255
256
257 /* Free all of the data associated with the scan problem. */
258
259 static void
260 df_scan_free (struct dataflow *dflow)
261 {
262 struct df *df = dflow->df;
263
264 df_scan_free_internal (dflow);
265 if (df->blocks_to_scan)
266 BITMAP_FREE (df->blocks_to_scan);
267
268 if (df->blocks_to_analyze)
269 BITMAP_FREE (df->blocks_to_analyze);
270
271 free (dflow->problem_data);
272 free (dflow);
273 }
274
275 static void
276 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
277 {
278 struct df *df = dflow->df;
279 int i;
280
281 fprintf (file, " all hard regs \t");
282 dump_bitmap (file, df_all_hard_regs);
283 fprintf (file, " invalidated by call \t");
284 dump_bitmap (file, df_invalidated_by_call);
285 fprintf (file, " hardware regs used \t");
286 dump_bitmap (file, df->hardware_regs_used);
287 fprintf (file, " exit block uses \t");
288 dump_bitmap (file, df->exit_block_uses);
289 fprintf (file, " regs ever live \t");
290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
291 if (regs_ever_live[i])
292 fprintf (file, "%d ", i);
293 fprintf (file, "\n");
294 }
295
296 static struct df_problem problem_SCAN =
297 {
298 DF_SCAN, /* Problem id. */
299 DF_NONE, /* Direction. */
300 df_scan_alloc, /* Allocate the problem specific data. */
301 df_scan_free_bb_info, /* Free basic block info. */
302 NULL, /* Local compute function. */
303 NULL, /* Init the solution specific data. */
304 NULL, /* Iterative solver. */
305 NULL, /* Confluence operator 0. */
306 NULL, /* Confluence operator n. */
307 NULL, /* Transfer function. */
308 NULL, /* Finalize function. */
309 df_scan_free, /* Free all of the problem information. */
310 df_scan_dump, /* Debugging. */
311 NULL /* Dependent problem. */
312 };
313
314
315 /* Create a new DATAFLOW instance and add it to an existing instance
316 of DF. The returned structure is what is used to get at the
317 solution. */
318
319 struct dataflow *
320 df_scan_add_problem (struct df *df)
321 {
322 return df_add_problem (df, &problem_SCAN);
323 }
324
325 /*----------------------------------------------------------------------------
326 Storage Allocation Utilities
327 ----------------------------------------------------------------------------*/
328
329
330 /* First, grow the reg_info information. If the current size is less than
331 the number of psuedos, grow to 25% more than the number of
332 pseudos.
333
334 Second, assure that all of the slots up to max_reg_num have been
335 filled with reg_info structures. */
336
337 static void
338 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
339 {
340 unsigned int max_reg = max_reg_num ();
341 unsigned int new_size = max_reg;
342 struct df_scan_problem_data *problem_data =
343 (struct df_scan_problem_data *) dflow->problem_data;
344 unsigned int i;
345
346 if (ref_info->regs_size < new_size)
347 {
348 new_size += new_size / 4;
349 ref_info->regs = xrealloc (ref_info->regs,
350 new_size *sizeof (struct df_reg_info*));
351 ref_info->regs_size = new_size;
352 }
353
354 for (i = ref_info->regs_inited; i < max_reg; i++)
355 {
356 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
357 memset (reg_info, 0, sizeof (struct df_reg_info));
358 ref_info->regs[i] = reg_info;
359 }
360
361 ref_info->regs_inited = max_reg;
362 }
363
364
365 /* Grow the ref information. */
366
367 static void
368 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
369 {
370 if (ref_info->refs_size < new_size)
371 {
372 ref_info->refs = xrealloc (ref_info->refs,
373 new_size *sizeof (struct df_ref *));
374 memset (ref_info->refs + ref_info->refs_size, 0,
375 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
376 ref_info->refs_size = new_size;
377 }
378 }
379
380
381 /* Grow the ref information. If the current size is less than the
382 number of instructions, grow to 25% more than the number of
383 instructions. */
384
385 static void
386 df_grow_insn_info (struct df *df)
387 {
388 unsigned int new_size = get_max_uid () + 1;
389 if (df->insns_size < new_size)
390 {
391 new_size += new_size / 4;
392 df->insns = xrealloc (df->insns,
393 new_size *sizeof (struct df_insn_info *));
394 memset (df->insns + df->insns_size, 0,
395 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
396 df->insns_size = new_size;
397 }
398 }
399
400
401
402 \f
403 /*----------------------------------------------------------------------------
404 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
405 ----------------------------------------------------------------------------*/
406
407 /* Rescan some BLOCKS or all the blocks defined by the last call to
408 df_set_blocks if BLOCKS is NULL); */
409
410 void
411 df_rescan_blocks (struct df *df, bitmap blocks)
412 {
413 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
414
415 struct dataflow *dflow = df->problems_by_index [DF_SCAN];
416 basic_block bb;
417
418 df->def_info.refs_organized = false;
419 df->use_info.refs_organized = false;
420
421 if (blocks)
422 {
423 /* Need to assure that there are space in all of the tables. */
424 unsigned int insn_num = get_max_uid () + 1;
425 insn_num += insn_num / 4;
426
427 df_grow_reg_info (dflow, &df->def_info);
428 df_grow_ref_info (&df->def_info, insn_num);
429
430 df_grow_reg_info (dflow, &df->use_info);
431 df_grow_ref_info (&df->use_info, insn_num *2);
432
433 df_grow_insn_info (df);
434 df_grow_bb_info (dflow);
435
436 bitmap_copy (local_blocks_to_scan, blocks);
437 df->def_info.add_refs_inline = true;
438 df->use_info.add_refs_inline = true;
439
440 df_refs_delete (dflow, local_blocks_to_scan);
441
442 /* This may be a mistake, but if an explicit blocks is passed in
443 and the set of blocks to analyze has been explicitly set, add
444 the extra blocks to blocks_to_analyze. The alternative is to
445 put an assert here. We do not want this to just go by
446 silently or else we may get storage leaks. */
447 if (df->blocks_to_analyze)
448 bitmap_ior_into (df->blocks_to_analyze, blocks);
449 }
450 else
451 {
452 /* If we are going to do everything, just reallocate everything.
453 Most stuff is allocated in pools so this is faster than
454 walking it. */
455 if (df->blocks_to_analyze)
456 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
457 else
458 FOR_ALL_BB (bb)
459 {
460 bitmap_set_bit (local_blocks_to_scan, bb->index);
461 }
462 df_scan_alloc (dflow, local_blocks_to_scan);
463
464 df->def_info.add_refs_inline = false;
465 df->use_info.add_refs_inline = false;
466 }
467
468 df_refs_record (dflow, local_blocks_to_scan);
469 #if 0
470 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
471 #endif
472
473 if (!df->blocks_to_scan)
474 df->blocks_to_scan = BITMAP_ALLOC (NULL);
475
476 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
477 BITMAP_FREE (local_blocks_to_scan);
478 }
479
480 /* Create a new ref of type DF_REF_TYPE for register REG at address
481 LOC within INSN of BB. */
482
483 struct df_ref *
484 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
485 basic_block bb,
486 enum df_ref_type ref_type,
487 enum df_ref_flags ref_flags)
488 {
489 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
490 struct df_scan_bb_info *bb_info;
491
492 df_grow_reg_info (dflow, &df->use_info);
493 df_grow_reg_info (dflow, &df->def_info);
494 df_grow_bb_info (dflow);
495
496 /* Make sure there is the bb_info for this block. */
497 bb_info = df_scan_get_bb_info (dflow, bb->index);
498 if (!bb_info)
499 {
500 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
501 df_scan_set_bb_info (dflow, bb->index, bb_info);
502 bb_info->artificial_defs = NULL;
503 bb_info->artificial_uses = NULL;
504 }
505
506 if (ref_type == DF_REF_REG_DEF)
507 df->def_info.add_refs_inline = true;
508 else
509 df->use_info.add_refs_inline = true;
510
511 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
512 }
513
514
515 \f
516 /*----------------------------------------------------------------------------
517 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
518 ----------------------------------------------------------------------------*/
519
520
521 /* Get the artifical uses for a basic block. */
522
523 struct df_ref *
524 df_get_artificial_defs (struct df *df, unsigned int bb_index)
525 {
526 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
527 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
528 }
529
530
531 /* Get the artifical uses for a basic block. */
532
533 struct df_ref *
534 df_get_artificial_uses (struct df *df, unsigned int bb_index)
535 {
536 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
537 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
538 }
539
540
541 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
542
543 void
544 df_reg_chain_create (struct df_reg_info *reg_info,
545 struct df_ref *ref)
546 {
547 struct df_ref *head = reg_info->reg_chain;
548 reg_info->reg_chain = ref;
549
550 DF_REF_NEXT_REG (ref) = head;
551
552 /* We cannot actually link to the head of the chain. */
553 DF_REF_PREV_REG (ref) = NULL;
554
555 if (head)
556 DF_REF_PREV_REG (head) = ref;
557 }
558
559
560 /* Remove REF from the CHAIN. Return the head of the chain. This
561 will be CHAIN unless the REF was at the beginning of the chain. */
562
563 static struct df_ref *
564 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
565 {
566 struct df_ref *orig_chain = chain;
567 struct df_ref *prev = NULL;
568 while (chain)
569 {
570 if (chain == ref)
571 {
572 if (prev)
573 {
574 prev->next_ref = ref->next_ref;
575 ref->next_ref = NULL;
576 return orig_chain;
577 }
578 else
579 {
580 chain = ref->next_ref;
581 ref->next_ref = NULL;
582 return chain;
583 }
584 }
585
586 prev = chain;
587 chain = chain->next_ref;
588 }
589
590 /* Someone passed in a ref that was not in the chain. */
591 gcc_unreachable ();
592 return NULL;
593 }
594
595
596 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
597 the def-use or use-def chain if it exists. Returns the next ref in
598 uses or defs chain. */
599
600 struct df_ref *
601 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
602 {
603 struct df *df = dflow->df;
604 struct df_ref *next = DF_REF_NEXT_REG (ref);
605 struct df_ref *prev = DF_REF_PREV_REG (ref);
606 struct df_scan_problem_data *problem_data =
607 (struct df_scan_problem_data *) dflow->problem_data;
608 struct df_reg_info *reg_info;
609 struct df_ref *next_ref = ref->next_ref;
610 unsigned int id = DF_REF_ID (ref);
611
612 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
613 {
614 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
615 df->def_info.bitmap_size--;
616 if (df->def_info.refs && (id < df->def_info.refs_size))
617 DF_DEFS_SET (df, id, NULL);
618 }
619 else
620 {
621 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
622 df->use_info.bitmap_size--;
623 if (df->use_info.refs && (id < df->use_info.refs_size))
624 DF_USES_SET (df, id, NULL);
625 }
626
627 /* Delete any def-use or use-def chains that start here. */
628 if (DF_REF_CHAIN (ref))
629 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
630
631 reg_info->n_refs--;
632
633 /* Unlink from the reg chain. If there is no prev, this is the
634 first of the list. If not, just join the next and prev. */
635 if (prev)
636 {
637 DF_REF_NEXT_REG (prev) = next;
638 if (next)
639 DF_REF_PREV_REG (next) = prev;
640 }
641 else
642 {
643 reg_info->reg_chain = next;
644 if (next)
645 DF_REF_PREV_REG (next) = NULL;
646 }
647
648 pool_free (problem_data->ref_pool, ref);
649 return next_ref;
650 }
651
652
653 /* Unlink REF from all def-use/use-def chains, etc. */
654
655 void
656 df_ref_remove (struct df *df, struct df_ref *ref)
657 {
658 struct dataflow *dflow = df->problems_by_index [DF_SCAN];
659 if (DF_REF_REG_DEF_P (ref))
660 {
661 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
662 {
663 struct df_scan_bb_info *bb_info
664 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
665 bb_info->artificial_defs
666 = df_ref_unlink (bb_info->artificial_defs, ref);
667 }
668 else
669 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) =
670 df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
671
672 if (df->def_info.add_refs_inline)
673 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
674 }
675 else
676 {
677 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
678 {
679 struct df_scan_bb_info *bb_info
680 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
681 bb_info->artificial_uses
682 = df_ref_unlink (bb_info->artificial_uses, ref);
683 }
684 else
685 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) =
686 df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
687
688 if (df->use_info.add_refs_inline)
689 DF_USES_SET (df, DF_REF_ID (ref), NULL);
690 }
691
692 df_reg_chain_unlink (dflow, ref);
693 }
694
695
696 /* Create the insn record for INSN. If there was one there, zero it out. */
697
698 static struct df_insn_info *
699 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
700 {
701 struct df *df = dflow->df;
702 struct df_scan_problem_data *problem_data =
703 (struct df_scan_problem_data *) dflow->problem_data;
704
705 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
706 if (!insn_rec)
707 {
708 insn_rec = pool_alloc (problem_data->insn_pool);
709 DF_INSN_SET (df, insn, insn_rec);
710 }
711 memset (insn_rec, 0, sizeof (struct df_insn_info));
712
713 return insn_rec;
714 }
715
716 /* Delete all of the refs information from BLOCKS. */
717
718 void
719 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
720 {
721 struct df *df = dflow->df;
722 unsigned int uid = INSN_UID (insn);
723 struct df_insn_info *insn_info = DF_INSN_UID_GET (df, uid);
724 struct df_ref *ref;
725 struct df_scan_problem_data *problem_data =
726 (struct df_scan_problem_data *) dflow->problem_data;
727
728 if (insn_info)
729 {
730 ref = insn_info->defs;
731 while (ref)
732 ref = df_reg_chain_unlink (dflow, ref);
733
734 ref = insn_info->uses;
735 while (ref)
736 ref = df_reg_chain_unlink (dflow, ref);
737
738 pool_free (problem_data->insn_pool, insn_info);
739 DF_INSN_SET (df, insn, NULL);
740 }
741 }
742
743
744 /* Delete all of the refs information from BLOCKS. */
745
746 void
747 df_refs_delete (struct dataflow *dflow, bitmap blocks)
748 {
749 bitmap_iterator bi;
750 unsigned int bb_index;
751 struct df_ref *def;
752 struct df_ref *use;
753
754 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
755 {
756 struct df_scan_bb_info *bb_info
757 = df_scan_get_bb_info (dflow, bb_index);
758 rtx insn;
759 basic_block bb = BASIC_BLOCK (bb_index);
760 FOR_BB_INSNS (bb, insn)
761 {
762 if (INSN_P (insn))
763 {
764 /* Record defs within INSN. */
765 df_insn_refs_delete (dflow, insn);
766 }
767 }
768
769 /* Get rid of any artifical uses. */
770 if (bb_info)
771 {
772 def = bb_info->artificial_defs;
773 while (def)
774 def = df_reg_chain_unlink (dflow, def);
775 bb_info->artificial_defs = NULL;
776 use = bb_info->artificial_uses;
777 while (use)
778 use = df_reg_chain_unlink (dflow, use);
779 bb_info->artificial_uses = NULL;
780 }
781 }
782 }
783
784
785 /* Take build ref table for either the uses or defs from the reg-use
786 or reg-def chains. */
787
788 void
789 df_reorganize_refs (struct df_ref_info *ref_info)
790 {
791 unsigned int m = ref_info->regs_inited;
792 unsigned int regno;
793 unsigned int offset = 0;
794 unsigned int size = 0;
795
796 if (ref_info->refs_organized)
797 return;
798
799 if (ref_info->refs_size < ref_info->bitmap_size)
800 {
801 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
802 df_grow_ref_info (ref_info, new_size);
803 }
804
805 for (regno = 0; regno < m; regno++)
806 {
807 struct df_reg_info *reg_info = ref_info->regs[regno];
808 int count = 0;
809 if (reg_info)
810 {
811 struct df_ref *ref = reg_info->reg_chain;
812 reg_info->begin = offset;
813 while (ref)
814 {
815 ref_info->refs[offset] = ref;
816 DF_REF_ID (ref) = offset++;
817 ref = DF_REF_NEXT_REG (ref);
818 count++;
819 size++;
820 }
821 reg_info->n_refs = count;
822 }
823 }
824
825 /* The bitmap size is not decremented when refs are deleted. So
826 reset it now that we have squished out all of the empty
827 slots. */
828 ref_info->bitmap_size = size;
829 ref_info->refs_organized = true;
830 ref_info->add_refs_inline = true;
831 }
832
833 \f
834 /* Local miscellaneous routines. */
835
836 /* Local routines for recording refs. */
837
838 /* Set where we are in the compilation. */
839
840 void
841 df_set_state (int state)
842 {
843 df_state = state;
844 }
845
846
847 \f
848 /*----------------------------------------------------------------------------
849 Hard core instruction scanning code. No external interfaces here,
850 just a lot of routines that look inside insns.
851 ----------------------------------------------------------------------------*/
852
853 /* Create a ref and add it to the reg-def or reg-use chains. */
854
855 static struct df_ref *
856 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
857 basic_block bb, rtx insn,
858 enum df_ref_type ref_type,
859 enum df_ref_flags ref_flags)
860 {
861 struct df_ref *this_ref;
862 struct df *df = dflow->df;
863 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
864 struct df_scan_problem_data *problem_data =
865 (struct df_scan_problem_data *) dflow->problem_data;
866
867 this_ref = pool_alloc (problem_data->ref_pool);
868 DF_REF_REG (this_ref) = reg;
869 DF_REF_REGNO (this_ref) = regno;
870 DF_REF_LOC (this_ref) = loc;
871 DF_REF_INSN (this_ref) = insn;
872 DF_REF_CHAIN (this_ref) = NULL;
873 DF_REF_TYPE (this_ref) = ref_type;
874 DF_REF_FLAGS (this_ref) = ref_flags;
875 DF_REF_DATA (this_ref) = NULL;
876 DF_REF_BB (this_ref) = bb;
877
878 /* Link the ref into the reg_def and reg_use chains and keep a count
879 of the instances. */
880 if (ref_type == DF_REF_REG_DEF)
881 {
882 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
883 reg_info->n_refs++;
884
885 /* Add the ref to the reg_def chain. */
886 df_reg_chain_create (reg_info, this_ref);
887 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
888 if (df->def_info.add_refs_inline)
889 {
890 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
891 {
892 int new_size = df->def_info.bitmap_size
893 + df->def_info.bitmap_size / 4;
894 df_grow_ref_info (&df->def_info, new_size);
895 }
896 /* Add the ref to the big array of defs. */
897 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
898 df->def_info.refs_organized = false;
899 }
900
901 df->def_info.bitmap_size++;
902
903 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
904 {
905 struct df_scan_bb_info *bb_info
906 = df_scan_get_bb_info (dflow, bb->index);
907 this_ref->next_ref = bb_info->artificial_defs;
908 bb_info->artificial_defs = this_ref;
909 }
910 else
911 {
912 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
913 DF_INSN_GET (df, insn)->defs = this_ref;
914 }
915 }
916 else
917 {
918 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
919 reg_info->n_refs++;
920
921 /* Add the ref to the reg_use chain. */
922 df_reg_chain_create (reg_info, this_ref);
923 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
924 if (df->use_info.add_refs_inline)
925 {
926 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
927 {
928 int new_size = df->use_info.bitmap_size
929 + df->use_info.bitmap_size / 4;
930 df_grow_ref_info (&df->use_info, new_size);
931 }
932 /* Add the ref to the big array of defs. */
933 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
934 df->use_info.refs_organized = false;
935 }
936
937 df->use_info.bitmap_size++;
938 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
939 {
940 struct df_scan_bb_info *bb_info
941 = df_scan_get_bb_info (dflow, bb->index);
942 this_ref->next_ref = bb_info->artificial_uses;
943 bb_info->artificial_uses = this_ref;
944 }
945 else
946 {
947 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
948 DF_INSN_GET (df, insn)->uses = this_ref;
949 }
950 }
951 return this_ref;
952 }
953
954
955 /* Create new references of type DF_REF_TYPE for each part of register REG
956 at address LOC within INSN of BB. */
957
958 static void
959 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
960 basic_block bb, rtx insn,
961 enum df_ref_type ref_type,
962 enum df_ref_flags ref_flags,
963 bool record_live)
964 {
965 unsigned int regno;
966 struct df *df = dflow->df;
967
968 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
969
970 /* For the reg allocator we are interested in some SUBREG rtx's, but not
971 all. Notably only those representing a word extraction from a multi-word
972 reg. As written in the docu those should have the form
973 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
974 XXX Is that true? We could also use the global word_mode variable. */
975 if ((df->flags & DF_SUBREGS) == 0
976 && GET_CODE (reg) == SUBREG
977 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
978 || GET_MODE_SIZE (GET_MODE (reg))
979 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
980 {
981 loc = &SUBREG_REG (reg);
982 reg = *loc;
983 ref_flags |= DF_REF_STRIPPED;
984 }
985
986 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
987 if (regno < FIRST_PSEUDO_REGISTER)
988 {
989 int i;
990 int endregno;
991
992 if (! (df->flags & DF_HARD_REGS))
993 return;
994
995 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
996 for the mode, because we only want to add references to regs, which
997 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
998 reference the whole reg 0 in DI mode (which would also include
999 reg 1, at least, if 0 and 1 are SImode registers). */
1000 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1001 if (GET_CODE (reg) == SUBREG)
1002 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1003 SUBREG_BYTE (reg), GET_MODE (reg));
1004 endregno += regno;
1005
1006 for (i = regno; i < endregno; i++)
1007 {
1008 /* Calls are handled at call site because regs_ever_live
1009 doesn't include clobbered regs, only used ones. */
1010 if (ref_type == DF_REF_REG_DEF && record_live)
1011 regs_ever_live[i] = 1;
1012 else if ((ref_type == DF_REF_REG_USE
1013 || ref_type == DF_REF_REG_MEM_STORE
1014 || ref_type == DF_REF_REG_MEM_LOAD)
1015 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1016 {
1017 /* Set regs_ever_live on uses of non-eliminable frame
1018 pointers and arg pointers. */
1019 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
1020 && (regno == FRAME_POINTER_REGNUM
1021 || regno == ARG_POINTER_REGNUM)))
1022 regs_ever_live[i] = 1;
1023 }
1024
1025 df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1026 bb, insn, ref_type, ref_flags);
1027 }
1028 }
1029 else
1030 {
1031 df_ref_create_structure (dflow, reg, loc,
1032 bb, insn, ref_type, ref_flags);
1033 }
1034 }
1035
1036
1037 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1038 covered by the outer mode is smaller than that covered by the inner mode,
1039 is a read-modify-write operation.
1040 This function returns true iff the SUBREG X is such a SUBREG. */
1041
1042 bool
1043 df_read_modify_subreg_p (rtx x)
1044 {
1045 unsigned int isize, osize;
1046 if (GET_CODE (x) != SUBREG)
1047 return false;
1048 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1049 osize = GET_MODE_SIZE (GET_MODE (x));
1050 return (isize > osize && isize > UNITS_PER_WORD);
1051 }
1052
1053
1054 /* Process all the registers defined in the rtx, X.
1055 Autoincrement/decrement definitions will be picked up by
1056 df_uses_record. */
1057
1058 static void
1059 df_def_record_1 (struct dataflow *dflow, rtx x,
1060 basic_block bb, rtx insn,
1061 enum df_ref_flags flags, bool record_live)
1062 {
1063 rtx *loc;
1064 rtx dst;
1065
1066 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1067 construct. */
1068 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1069 loc = &XEXP (x, 0);
1070 else
1071 loc = &SET_DEST (x);
1072 dst = *loc;
1073
1074 /* Some targets place small structures in registers for
1075 return values of functions. */
1076 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1077 {
1078 int i;
1079
1080 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1081 {
1082 rtx temp = XVECEXP (dst, 0, i);
1083 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1084 || GET_CODE (temp) == SET)
1085 df_def_record_1 (dflow, temp, bb, insn,
1086 GET_CODE (temp) == CLOBBER ? flags | DF_REF_CLOBBER : flags,
1087 record_live);
1088 }
1089 return;
1090 }
1091
1092 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1093 be handy for the reg allocator. */
1094 while (GET_CODE (dst) == STRICT_LOW_PART
1095 || GET_CODE (dst) == ZERO_EXTRACT
1096 || df_read_modify_subreg_p (dst))
1097 {
1098 #if 0
1099 /* Strict low part always contains SUBREG, but we do not want to make
1100 it appear outside, as whole register is always considered. */
1101 if (GET_CODE (dst) == STRICT_LOW_PART)
1102 {
1103 loc = &XEXP (dst, 0);
1104 dst = *loc;
1105 }
1106 #endif
1107 loc = &XEXP (dst, 0);
1108 dst = *loc;
1109 flags |= DF_REF_READ_WRITE;
1110 }
1111
1112 if (REG_P (dst)
1113 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1114 df_ref_record (dflow, dst, loc, bb, insn,
1115 DF_REF_REG_DEF, flags, record_live);
1116 }
1117
1118
1119 /* Process all the registers defined in the pattern rtx, X. */
1120
1121 static void
1122 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1123 {
1124 RTX_CODE code = GET_CODE (x);
1125
1126 if (code == SET || code == CLOBBER)
1127 {
1128 /* Mark the single def within the pattern. */
1129 df_def_record_1 (dflow, x, bb, insn,
1130 code == CLOBBER ? DF_REF_CLOBBER : 0, true);
1131 }
1132 else if (code == COND_EXEC)
1133 {
1134 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1135 }
1136 else if (code == PARALLEL)
1137 {
1138 int i;
1139
1140 /* Mark the multiple defs within the pattern. */
1141 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1142 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1143 }
1144 }
1145
1146
1147 /* Process all the registers used in the rtx at address LOC. */
1148
1149 static void
1150 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1151 basic_block bb, rtx insn, enum df_ref_flags flags)
1152 {
1153 RTX_CODE code;
1154 rtx x;
1155 retry:
1156 x = *loc;
1157 if (!x)
1158 return;
1159 code = GET_CODE (x);
1160 switch (code)
1161 {
1162 case LABEL_REF:
1163 case SYMBOL_REF:
1164 case CONST_INT:
1165 case CONST:
1166 case CONST_DOUBLE:
1167 case CONST_VECTOR:
1168 case PC:
1169 case CC0:
1170 case ADDR_VEC:
1171 case ADDR_DIFF_VEC:
1172 return;
1173
1174 case CLOBBER:
1175 /* If we are clobbering a MEM, mark any registers inside the address
1176 as being used. */
1177 if (MEM_P (XEXP (x, 0)))
1178 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1179 DF_REF_REG_MEM_STORE, bb, insn, flags);
1180
1181 /* If we're clobbering a REG then we have a def so ignore. */
1182 return;
1183
1184 case MEM:
1185 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1186 flags & DF_REF_IN_NOTE);
1187 return;
1188
1189 case SUBREG:
1190 /* While we're here, optimize this case. */
1191
1192 /* In case the SUBREG is not of a REG, do not optimize. */
1193 if (!REG_P (SUBREG_REG (x)))
1194 {
1195 loc = &SUBREG_REG (x);
1196 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1197 return;
1198 }
1199 /* ... Fall through ... */
1200
1201 case REG:
1202 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1203 return;
1204
1205 case SET:
1206 {
1207 rtx dst = SET_DEST (x);
1208 gcc_assert (!(flags & DF_REF_IN_NOTE));
1209 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1210
1211 switch (GET_CODE (dst))
1212 {
1213 case SUBREG:
1214 if (df_read_modify_subreg_p (dst))
1215 {
1216 df_uses_record (dflow, &SUBREG_REG (dst),
1217 DF_REF_REG_USE, bb,
1218 insn, DF_REF_READ_WRITE);
1219 break;
1220 }
1221 /* Fall through. */
1222 case REG:
1223 case PARALLEL:
1224 case SCRATCH:
1225 case PC:
1226 case CC0:
1227 break;
1228 case MEM:
1229 df_uses_record (dflow, &XEXP (dst, 0),
1230 DF_REF_REG_MEM_STORE,
1231 bb, insn, 0);
1232 break;
1233 case STRICT_LOW_PART:
1234 {
1235 rtx *temp = &XEXP (dst, 0);
1236 /* A strict_low_part uses the whole REG and not just the
1237 SUBREG. */
1238 dst = XEXP (dst, 0);
1239 df_uses_record (dflow,
1240 (GET_CODE (dst) == SUBREG)
1241 ? &SUBREG_REG (dst) : temp,
1242 DF_REF_REG_USE, bb,
1243 insn, DF_REF_READ_WRITE);
1244 }
1245 break;
1246 case ZERO_EXTRACT:
1247 case SIGN_EXTRACT:
1248 df_uses_record (dflow, &XEXP (dst, 0),
1249 DF_REF_REG_USE, bb, insn,
1250 DF_REF_READ_WRITE);
1251 df_uses_record (dflow, &XEXP (dst, 1),
1252 DF_REF_REG_USE, bb, insn, 0);
1253 df_uses_record (dflow, &XEXP (dst, 2),
1254 DF_REF_REG_USE, bb, insn, 0);
1255 dst = XEXP (dst, 0);
1256 break;
1257 default:
1258 gcc_unreachable ();
1259 }
1260 return;
1261 }
1262
1263 case RETURN:
1264 break;
1265
1266 case ASM_OPERANDS:
1267 case UNSPEC_VOLATILE:
1268 case TRAP_IF:
1269 case ASM_INPUT:
1270 {
1271 /* Traditional and volatile asm instructions must be
1272 considered to use and clobber all hard registers, all
1273 pseudo-registers and all of memory. So must TRAP_IF and
1274 UNSPEC_VOLATILE operations.
1275
1276 Consider for instance a volatile asm that changes the fpu
1277 rounding mode. An insn should not be moved across this
1278 even if it only uses pseudo-regs because it might give an
1279 incorrectly rounded result.
1280
1281 However, flow.c's liveness computation did *not* do this,
1282 giving the reasoning as " ?!? Unfortunately, marking all
1283 hard registers as live causes massive problems for the
1284 register allocator and marking all pseudos as live creates
1285 mountains of uninitialized variable warnings."
1286
1287 In order to maintain the status quo with regard to liveness
1288 and uses, we do what flow.c did and just mark any regs we
1289 can find in ASM_OPERANDS as used. Later on, when liveness
1290 is computed, asm insns are scanned and regs_asm_clobbered
1291 is filled out.
1292
1293 For all ASM_OPERANDS, we must traverse the vector of input
1294 operands. We can not just fall through here since then we
1295 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1296 which do not indicate traditional asms unlike their normal
1297 usage. */
1298 if (code == ASM_OPERANDS)
1299 {
1300 int j;
1301
1302 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1303 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1304 DF_REF_REG_USE, bb, insn, 0);
1305 return;
1306 }
1307 break;
1308 }
1309
1310 case PRE_DEC:
1311 case POST_DEC:
1312 case PRE_INC:
1313 case POST_INC:
1314 case PRE_MODIFY:
1315 case POST_MODIFY:
1316 /* Catch the def of the register being modified. */
1317 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1318 DF_REF_REG_DEF, DF_REF_READ_WRITE, true);
1319
1320 /* ... Fall through to handle uses ... */
1321
1322 default:
1323 break;
1324 }
1325
1326 /* Recursively scan the operands of this expression. */
1327 {
1328 const char *fmt = GET_RTX_FORMAT (code);
1329 int i;
1330
1331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1332 {
1333 if (fmt[i] == 'e')
1334 {
1335 /* Tail recursive case: save a function call level. */
1336 if (i == 0)
1337 {
1338 loc = &XEXP (x, 0);
1339 goto retry;
1340 }
1341 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1342 }
1343 else if (fmt[i] == 'E')
1344 {
1345 int j;
1346 for (j = 0; j < XVECLEN (x, i); j++)
1347 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1348 bb, insn, flags);
1349 }
1350 }
1351 }
1352 }
1353
1354 /* Return true if *LOC contains an asm. */
1355
1356 static int
1357 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1358 {
1359 if ( !*loc)
1360 return 0;
1361 if (GET_CODE (*loc) == ASM_OPERANDS)
1362 return 1;
1363 return 0;
1364 }
1365
1366
1367 /* Return true if INSN contains an ASM. */
1368
1369 static int
1370 df_insn_contains_asm (rtx insn)
1371 {
1372 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1373 }
1374
1375
1376
1377 /* Record all the refs for DF within INSN of basic block BB. */
1378
1379 static void
1380 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1381 {
1382 int i;
1383 struct df *df = dflow->df;
1384
1385 if (INSN_P (insn))
1386 {
1387 rtx note;
1388
1389 if (df_insn_contains_asm (insn))
1390 DF_INSN_CONTAINS_ASM (df, insn) = true;
1391
1392 /* Record register defs. */
1393 df_defs_record (dflow, PATTERN (insn), bb, insn);
1394
1395 if (df->flags & DF_EQUIV_NOTES)
1396 for (note = REG_NOTES (insn); note;
1397 note = XEXP (note, 1))
1398 {
1399 switch (REG_NOTE_KIND (note))
1400 {
1401 case REG_EQUIV:
1402 case REG_EQUAL:
1403 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1404 bb, insn, DF_REF_IN_NOTE);
1405 default:
1406 break;
1407 }
1408 }
1409
1410 if (CALL_P (insn))
1411 {
1412 rtx note;
1413
1414 /* Record the registers used to pass arguments, and explicitly
1415 noted as clobbered. */
1416 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1417 note = XEXP (note, 1))
1418 {
1419 if (GET_CODE (XEXP (note, 0)) == USE)
1420 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1421 DF_REF_REG_USE,
1422 bb, insn, 0);
1423 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1424 {
1425 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1426 if (REG_P (XEXP (XEXP (note, 0), 0)))
1427 {
1428 rtx reg = XEXP (XEXP (note, 0), 0);
1429 int regno_last;
1430 int regno_first;
1431 int i;
1432
1433 regno_last = regno_first = REGNO (reg);
1434 if (regno_first < FIRST_PSEUDO_REGISTER)
1435 regno_last
1436 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1437 for (i = regno_first; i <= regno_last; i++)
1438 regs_ever_live[i] = 1;
1439 }
1440 }
1441 }
1442
1443 /* The stack ptr is used (honorarily) by a CALL insn. */
1444 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1445 DF_REF_REG_USE, bb, insn,
1446 0);
1447
1448 if (df->flags & DF_HARD_REGS)
1449 {
1450 bitmap_iterator bi;
1451 unsigned int ui;
1452 /* Calls may also reference any of the global registers,
1453 so they are recorded as used. */
1454 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1455 if (global_regs[i])
1456 df_uses_record (dflow, &regno_reg_rtx[i],
1457 DF_REF_REG_USE, bb, insn,
1458 0);
1459 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1460 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb, insn,
1461 DF_REF_REG_DEF, DF_REF_CLOBBER, false);
1462 }
1463 }
1464
1465 /* Record the register uses. */
1466 df_uses_record (dflow, &PATTERN (insn),
1467 DF_REF_REG_USE, bb, insn, 0);
1468
1469 }
1470 }
1471
1472 static bool
1473 df_has_eh_preds (basic_block bb)
1474 {
1475 edge e;
1476 edge_iterator ei;
1477
1478 FOR_EACH_EDGE (e, ei, bb->preds)
1479 {
1480 if (e->flags & EDGE_EH)
1481 return true;
1482 }
1483 return false;
1484 }
1485
1486 /* Record all the refs within the basic block BB. */
1487
1488 static void
1489 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1490 {
1491 struct df *df = dflow->df;
1492 rtx insn;
1493 int luid = 0;
1494 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1495
1496 /* Need to make sure that there is a record in the basic block info. */
1497 if (!bb_info)
1498 {
1499 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1500 df_scan_set_bb_info (dflow, bb->index, bb_info);
1501 bb_info->artificial_defs = NULL;
1502 bb_info->artificial_uses = NULL;
1503 }
1504
1505 /* Scan the block an insn at a time from beginning to end. */
1506 FOR_BB_INSNS (bb, insn)
1507 {
1508 df_insn_create_insn_record (dflow, insn);
1509 if (INSN_P (insn))
1510 {
1511 /* Record defs within INSN. */
1512 DF_INSN_LUID (df, insn) = luid++;
1513 df_insn_refs_record (dflow, bb, insn);
1514 }
1515 DF_INSN_LUID (df, insn) = luid;
1516 }
1517
1518 #ifdef EH_RETURN_DATA_REGNO
1519 if ((df->flags & DF_HARD_REGS)
1520 && df_has_eh_preds (bb))
1521 {
1522 unsigned int i;
1523 /* Mark the registers that will contain data for the handler. */
1524 if (current_function_calls_eh_return)
1525 for (i = 0; ; ++i)
1526 {
1527 unsigned regno = EH_RETURN_DATA_REGNO (i);
1528 if (regno == INVALID_REGNUM)
1529 break;
1530 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i], bb, NULL,
1531 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP, false);
1532 }
1533 }
1534 #endif
1535
1536 #ifdef EH_USES
1537 /* This code is putting in a artificial ref for the use at the TOP
1538 of the block that receives the exception. It is too cumbersome
1539 to actually put the ref on the edge. We could either model this
1540 at the top of the receiver block or the bottom of the sender
1541 block.
1542
1543 The bottom of the sender block is problematic because not all
1544 out-edges of the a block are eh-edges. However, it is true that
1545 all edges into a block are either eh-edges or none of them are
1546 eh-edges. Thus, we can model this at the top of the eh-receiver
1547 for all of the edges at once. */
1548 if ((df->flags & DF_HARD_REGS)
1549 && df_has_eh_preds (bb))
1550 {
1551 unsigned int i;
1552 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1553 if (EH_USES (i))
1554 df_uses_record (dflow, &regno_reg_rtx[i],
1555 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1556 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1557 }
1558 #endif
1559
1560 if ((df->flags & DF_HARD_REGS)
1561 && bb->index >= NUM_FIXED_BLOCKS)
1562 {
1563 /* Before reload, there are a few registers that must be forced
1564 live everywhere -- which might not already be the case for
1565 blocks within infinite loops. */
1566 if (! reload_completed)
1567 {
1568
1569 /* Any reference to any pseudo before reload is a potential
1570 reference of the frame pointer. */
1571 df_uses_record (dflow, &regno_reg_rtx [FRAME_POINTER_REGNUM],
1572 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1573
1574 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1575 /* Pseudos with argument area equivalences may require
1576 reloading via the argument pointer. */
1577 if (fixed_regs[ARG_POINTER_REGNUM])
1578 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1579 DF_REF_REG_USE, bb, NULL,
1580 DF_REF_ARTIFICIAL);
1581 #endif
1582
1583 /* Any constant, or pseudo with constant equivalences, may
1584 require reloading from memory using the pic register. */
1585 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1586 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1587 df_uses_record (dflow, &regno_reg_rtx[PIC_OFFSET_TABLE_REGNUM],
1588 DF_REF_REG_USE, bb, NULL,
1589 DF_REF_ARTIFICIAL);
1590 }
1591 /* The all-important stack pointer must always be live. */
1592 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1593 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1594 }
1595 }
1596
1597
1598 /* Record all the refs in the basic blocks specified by BLOCKS. */
1599
1600 static void
1601 df_refs_record (struct dataflow *dflow, bitmap blocks)
1602 {
1603 unsigned int bb_index;
1604 bitmap_iterator bi;
1605
1606 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1607 {
1608 basic_block bb = BASIC_BLOCK (bb_index);
1609 df_bb_refs_record (dflow, bb);
1610 }
1611
1612 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1613 df_record_exit_block_uses (dflow);
1614 }
1615
1616
1617 /*----------------------------------------------------------------------------
1618 Specialized hard register scanning functions.
1619 ----------------------------------------------------------------------------*/
1620
1621 /* Mark a register in SET. Hard registers in large modes get all
1622 of their component registers set as well. */
1623
1624 static void
1625 df_mark_reg (rtx reg, void *vset)
1626 {
1627 bitmap set = (bitmap) vset;
1628 int regno = REGNO (reg);
1629
1630 gcc_assert (GET_MODE (reg) != BLKmode);
1631
1632 bitmap_set_bit (set, regno);
1633 if (regno < FIRST_PSEUDO_REGISTER)
1634 {
1635 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1636 while (--n > 0)
1637 bitmap_set_bit (set, regno + n);
1638 }
1639 }
1640
1641 /* Record the set of hard registers that are used in the exit block. */
1642
1643 static void
1644 df_record_exit_block_uses (struct dataflow *dflow)
1645 {
1646 unsigned int i;
1647 bitmap_iterator bi;
1648 struct df *df = dflow->df;
1649
1650 bitmap_clear (df->exit_block_uses);
1651
1652 if (! (df->flags & DF_HARD_REGS))
1653 return;
1654
1655 /* If exiting needs the right stack value, consider the stack
1656 pointer live at the end of the function. */
1657 if ((HAVE_epilogue && epilogue_completed)
1658 || ! EXIT_IGNORE_STACK
1659 || (! FRAME_POINTER_REQUIRED
1660 && ! current_function_calls_alloca
1661 && flag_omit_frame_pointer)
1662 || current_function_sp_is_unchanging)
1663 {
1664 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1665 }
1666
1667 /* Mark the frame pointer if needed at the end of the function.
1668 If we end up eliminating it, it will be removed from the live
1669 list of each basic block by reload. */
1670
1671 if (! reload_completed || frame_pointer_needed)
1672 {
1673 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1674 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1675 /* If they are different, also mark the hard frame pointer as live. */
1676 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1677 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1678 #endif
1679 }
1680
1681 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1682 /* Many architectures have a GP register even without flag_pic.
1683 Assume the pic register is not in use, or will be handled by
1684 other means, if it is not fixed. */
1685 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1686 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1687 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1688 #endif
1689
1690 /* Mark all global registers, and all registers used by the
1691 epilogue as being live at the end of the function since they
1692 may be referenced by our caller. */
1693 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1694 if (global_regs[i] || EPILOGUE_USES (i))
1695 bitmap_set_bit (df->exit_block_uses, i);
1696
1697 if (HAVE_epilogue && epilogue_completed)
1698 {
1699 /* Mark all call-saved registers that we actually used. */
1700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1701 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1702 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1703 bitmap_set_bit (df->exit_block_uses, i);
1704 }
1705
1706 #ifdef EH_RETURN_DATA_REGNO
1707 /* Mark the registers that will contain data for the handler. */
1708 if (reload_completed && current_function_calls_eh_return)
1709 for (i = 0; ; ++i)
1710 {
1711 unsigned regno = EH_RETURN_DATA_REGNO (i);
1712 if (regno == INVALID_REGNUM)
1713 break;
1714 bitmap_set_bit (df->exit_block_uses, regno);
1715 }
1716 #endif
1717
1718 #ifdef EH_RETURN_STACKADJ_RTX
1719 if ((! HAVE_epilogue || ! epilogue_completed)
1720 && current_function_calls_eh_return)
1721 {
1722 rtx tmp = EH_RETURN_STACKADJ_RTX;
1723 if (tmp && REG_P (tmp))
1724 df_mark_reg (tmp, df->exit_block_uses);
1725 }
1726 #endif
1727
1728 #ifdef EH_RETURN_HANDLER_RTX
1729 if ((! HAVE_epilogue || ! epilogue_completed)
1730 && current_function_calls_eh_return)
1731 {
1732 rtx tmp = EH_RETURN_HANDLER_RTX;
1733 if (tmp && REG_P (tmp))
1734 df_mark_reg (tmp, df->exit_block_uses);
1735 }
1736 #endif
1737
1738 /* Mark function return value. */
1739 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
1740
1741 if (df->flags & DF_HARD_REGS)
1742 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
1743 df_uses_record (dflow, &regno_reg_rtx[i],
1744 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1745 DF_REF_ARTIFICIAL);
1746 }
1747
1748 static bool initialized = false;
1749
1750 /* Initialize some platform specific structures. */
1751
1752 void
1753 df_hard_reg_init (void)
1754 {
1755 #ifdef ELIMINABLE_REGS
1756 int i;
1757 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
1758 #endif
1759 /* After reload, some ports add certain bits to regs_ever_live so
1760 this cannot be reset. */
1761
1762 if (!reload_completed)
1763 memset (regs_ever_live, 0, sizeof (regs_ever_live));
1764
1765 if (initialized)
1766 return;
1767
1768 bitmap_obstack_initialize (&persistent_obstack);
1769
1770 /* Record which registers will be eliminated. We use this in
1771 mark_used_regs. */
1772 CLEAR_HARD_REG_SET (elim_reg_set);
1773
1774 #ifdef ELIMINABLE_REGS
1775 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
1776 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1777 #else
1778 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1779 #endif
1780
1781 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
1782
1783 /* Inconveniently, this is only readily available in hard reg set
1784 form. */
1785 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1786 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1787 bitmap_set_bit (df_invalidated_by_call, i);
1788
1789 df_all_hard_regs = BITMAP_ALLOC (&persistent_obstack);
1790
1791 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1792 bitmap_set_bit (df_all_hard_regs, i);
1793
1794 initialized = true;
1795 }