df.h (DF_FIRST_OPTIONAL_PROBLEM): Removed.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 02110-1301, USA.
25 */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tm_p.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "regs.h"
37 #include "output.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
42 #include "sbitmap.h"
43 #include "bitmap.h"
44 #include "timevar.h"
45 #include "tree.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "df.h"
49 #include "tree-pass.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* The bitmap_obstack is used to hold some static variables that
66 should not be reset after each function is compiled. */
67
68 static bitmap_obstack persistent_obstack;
69
70 /* The set of hard registers in eliminables[i].from. */
71
72 static HARD_REG_SET elim_reg_set;
73
74 /* This is a bitmap copy of regs_invalidated_by_call so that we can
75 easily add it into bitmaps, etc. */
76
77 bitmap df_invalidated_by_call = NULL;
78
79 /* Initialize ur_in and ur_out as if all hard registers were partially
80 available. */
81
82 struct df_collection_rec
83 {
84 struct df_ref ** def_vec;
85 unsigned int next_def;
86 struct df_ref ** use_vec;
87 unsigned int next_use;
88 struct df_ref ** eq_use_vec;
89 unsigned int next_eq_use;
90 struct df_mw_hardreg **mw_vec;
91 unsigned int next_mw;
92 };
93
94 static struct df_ref * df_null_ref_rec[1];
95 static struct df_mw_hardreg * df_null_mw_rec[1];
96
97 static void df_ref_record (struct df_collection_rec *,
98 rtx, rtx *,
99 basic_block, rtx, enum df_ref_type,
100 enum df_ref_flags);
101 static void df_def_record_1 (struct df_collection_rec *,
102 rtx, basic_block, rtx,
103 enum df_ref_flags);
104 static void df_defs_record (struct df_collection_rec *,
105 rtx, basic_block, rtx,
106 enum df_ref_flags);
107 static void df_uses_record (struct df_collection_rec *,
108 rtx *, enum df_ref_type,
109 basic_block, rtx, enum df_ref_flags);
110
111 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
112 basic_block, rtx, enum df_ref_type,
113 enum df_ref_flags);
114
115 static void df_insn_refs_collect (struct df_collection_rec*,
116 basic_block, rtx);
117 static void df_canonize_collection_rec (struct df_collection_rec *);
118
119 static void df_get_regular_block_artificial_uses (bitmap);
120 static void df_get_eh_block_artificial_uses (bitmap);
121
122 static void df_record_entry_block_defs (bitmap);
123 static void df_record_exit_block_uses (bitmap);
124 static void df_get_exit_block_use_set (bitmap);
125 static void df_get_entry_block_def_set (bitmap);
126 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
127 static void df_ref_chain_delete_du_chain (struct df_ref **);
128 static void df_ref_chain_delete (struct df_ref **);
129
130 static void df_refs_add_to_chains (struct df_collection_rec *,
131 basic_block, rtx);
132
133 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
134 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
135 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
136 static void df_install_ref (struct df_ref *, struct df_reg_info *,
137 struct df_ref_info *, bool);
138
139 static int df_ref_compare (const void *, const void *);
140 static int df_mw_compare (const void *, const void *);
141
142 /* Indexed by hardware reg number, is true if that register is ever
143 used in the current function.
144
145 In df-scan.c, this is set up to record the hard regs used
146 explicitly. Reload adds in the hard regs used for holding pseudo
147 regs. Final uses it to generate the code in the function prologue
148 and epilogue to save and restore registers as needed. */
149
150 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
151 \f
152 /*----------------------------------------------------------------------------
153 SCANNING DATAFLOW PROBLEM
154
155 There are several ways in which scanning looks just like the other
156 dataflow problems. It shares the all the mechanisms for local info
157 as well as basic block info. Where it differs is when and how often
158 it gets run. It also has no need for the iterative solver.
159 ----------------------------------------------------------------------------*/
160
161 /* Problem data for the scanning dataflow function. */
162 struct df_scan_problem_data
163 {
164 alloc_pool ref_pool;
165 alloc_pool insn_pool;
166 alloc_pool reg_pool;
167 alloc_pool mw_reg_pool;
168 alloc_pool mw_link_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
171 };
172
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
174
175 static void
176 df_scan_free_internal (void)
177 {
178 struct df_scan_problem_data *problem_data
179 = (struct df_scan_problem_data *) df_scan->problem_data;
180
181 free (df->def_info.refs);
182 free (df->def_info.begin);
183 free (df->def_info.count);
184 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
185
186 free (df->use_info.refs);
187 free (df->use_info.begin);
188 free (df->use_info.count);
189 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
190
191 free (df->def_regs);
192 df->def_regs = NULL;
193 free (df->use_regs);
194 df->use_regs = NULL;
195 free (df->eq_use_regs);
196 df->eq_use_regs = NULL;
197 df->regs_size = 0;
198 DF_REG_SIZE(df) = 0;
199
200 free (df->insns);
201 df->insns = NULL;
202 DF_INSN_SIZE () = 0;
203
204 free (df_scan->block_info);
205 df_scan->block_info = NULL;
206 df_scan->block_info_size = 0;
207
208 BITMAP_FREE (df->hardware_regs_used);
209 BITMAP_FREE (df->regular_block_artificial_uses);
210 BITMAP_FREE (df->eh_block_artificial_uses);
211 BITMAP_FREE (df->entry_block_defs);
212 BITMAP_FREE (df->exit_block_uses);
213 BITMAP_FREE (df->insns_to_delete);
214 BITMAP_FREE (df->insns_to_rescan);
215 BITMAP_FREE (df->insns_to_notes_rescan);
216
217 free_alloc_pool (df_scan->block_pool);
218 free_alloc_pool (problem_data->ref_pool);
219 free_alloc_pool (problem_data->insn_pool);
220 free_alloc_pool (problem_data->reg_pool);
221 free_alloc_pool (problem_data->mw_reg_pool);
222 free_alloc_pool (problem_data->mw_link_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
226 }
227
228
229 /* Set basic block info. */
230
231 static void
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
234 {
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
238 }
239
240
241 /* Free basic block info. */
242
243 static void
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
245 {
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
248 if (bb_info)
249 {
250 rtx insn;
251 FOR_BB_INSNS (bb, insn)
252 {
253 if (INSN_P (insn))
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
256 }
257
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
260
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
269 }
270 }
271
272
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
275 be rescanned. */
276 void
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
278 {
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
282 basic_block bb;
283
284 /* Given the number of pools, this is really faster than tearing
285 everything apart. */
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
288
289 df_scan->block_pool
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
292 block_size);
293
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
297
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->insn_pool
302 = create_alloc_pool ("df_scan_insn pool",
303 sizeof (struct df_insn_info), block_size);
304 problem_data->reg_pool
305 = create_alloc_pool ("df_scan_reg pool",
306 sizeof (struct df_reg_info), block_size);
307 problem_data->mw_reg_pool
308 = create_alloc_pool ("df_scan_mw_reg pool",
309 sizeof (struct df_mw_hardreg), block_size);
310 problem_data->mw_link_pool
311 = create_alloc_pool ("df_scan_mw_link pool",
312 sizeof (struct df_link), block_size);
313
314 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
315 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
316
317 insn_num += insn_num / 4;
318 df_grow_reg_info ();
319
320 df_grow_insn_info ();
321 df_grow_bb_info (df_scan);
322
323 FOR_ALL_BB (bb)
324 {
325 unsigned int bb_index = bb->index;
326 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
327 if (!bb_info)
328 {
329 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
330 df_scan_set_bb_info (bb_index, bb_info);
331 }
332 bb_info->artificial_defs = NULL;
333 bb_info->artificial_uses = NULL;
334 }
335
336 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
343 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 df_scan->optional_p = false;
345 }
346
347
348 /* Free all of the data associated with the scan problem. */
349
350 static void
351 df_scan_free (void)
352 {
353 if (df_scan->problem_data)
354 df_scan_free_internal ();
355
356 if (df->blocks_to_analyze)
357 {
358 BITMAP_FREE (df->blocks_to_analyze);
359 df->blocks_to_analyze = NULL;
360 }
361
362 free (df_scan);
363 }
364
365 /* Dump the preamble for DF_SCAN dump. */
366 static void
367 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
368 {
369 int i;
370
371 fprintf (file, ";; invalidated by call \t");
372 df_print_regset (file, df_invalidated_by_call);
373 fprintf (file, ";; hardware regs used \t");
374 df_print_regset (file, df->hardware_regs_used);
375 fprintf (file, ";; regular block artificial uses \t");
376 df_print_regset (file, df->regular_block_artificial_uses);
377 fprintf (file, ";; eh block artificial uses \t");
378 df_print_regset (file, df->eh_block_artificial_uses);
379 fprintf (file, ";; entry block defs \t");
380 df_print_regset (file, df->entry_block_defs);
381 fprintf (file, ";; exit block uses \t");
382 df_print_regset (file, df->exit_block_uses);
383 fprintf (file, ";; regs ever live \t");
384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
385 if (df_regs_ever_live_p (i))
386 fprintf (file, " %d[%s]", i, reg_names[i]);
387
388 fprintf (file, "\n");
389 }
390
391 /* Dump the bb_info for a given basic block. */
392 static void
393 df_scan_start_block (basic_block bb, FILE *file)
394 {
395 struct df_scan_bb_info *bb_info
396 = df_scan_get_bb_info (bb->index);
397
398 if (bb_info)
399 {
400 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
401 df_refs_chain_dump (bb_info->artificial_defs, true, file);
402 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
403 df_refs_chain_dump (bb_info->artificial_uses, true, file);
404 fprintf (file, "\n");
405 }
406 #if 0
407 {
408 rtx insn;
409 FOR_BB_INSNS (bb, insn)
410 if (INSN_P (insn))
411 df_insn_debug (insn, false, file);
412 }
413 #endif
414 }
415
416 static struct df_problem problem_SCAN =
417 {
418 DF_SCAN, /* Problem id. */
419 DF_NONE, /* Direction. */
420 df_scan_alloc, /* Allocate the problem specific data. */
421 NULL, /* Reset global information. */
422 df_scan_free_bb_info, /* Free basic block info. */
423 NULL, /* Local compute function. */
424 NULL, /* Init the solution specific data. */
425 NULL, /* Iterative solver. */
426 NULL, /* Confluence operator 0. */
427 NULL, /* Confluence operator n. */
428 NULL, /* Transfer function. */
429 NULL, /* Finalize function. */
430 df_scan_free, /* Free all of the problem information. */
431 NULL, /* Remove this problem from the stack of dataflow problems. */
432 df_scan_start_dump, /* Debugging. */
433 df_scan_start_block, /* Debugging start block. */
434 NULL, /* Debugging end block. */
435 NULL, /* Incremental solution verify start. */
436 NULL, /* Incremental solution verfiy end. */
437 NULL, /* Dependent problem. */
438 TV_DF_SCAN, /* Timing variable. */
439 false /* Reset blocks on dropping out of blocks_to_analyze. */
440 };
441
442
443 /* Create a new DATAFLOW instance and add it to an existing instance
444 of DF. The returned structure is what is used to get at the
445 solution. */
446
447 void
448 df_scan_add_problem (void)
449 {
450 df_add_problem (&problem_SCAN);
451 }
452
453 \f
454 /*----------------------------------------------------------------------------
455 Storage Allocation Utilities
456 ----------------------------------------------------------------------------*/
457
458
459 /* First, grow the reg_info information. If the current size is less than
460 the number of psuedos, grow to 25% more than the number of
461 pseudos.
462
463 Second, assure that all of the slots up to max_reg_num have been
464 filled with reg_info structures. */
465
466 void
467 df_grow_reg_info (void)
468 {
469 unsigned int max_reg = max_reg_num ();
470 unsigned int new_size = max_reg;
471 struct df_scan_problem_data *problem_data
472 = (struct df_scan_problem_data *) df_scan->problem_data;
473 unsigned int i;
474
475 if (df->regs_size < new_size)
476 {
477 new_size += new_size / 4;
478 df->def_regs = xrealloc (df->def_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->use_regs = xrealloc (df->use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->eq_use_regs = xrealloc (df->eq_use_regs,
483 new_size *sizeof (struct df_reg_info*));
484 df->def_info.begin = xrealloc (df->def_info.begin,
485 new_size *sizeof (int));
486 df->def_info.count = xrealloc (df->def_info.count,
487 new_size *sizeof (int));
488 df->use_info.begin = xrealloc (df->use_info.begin,
489 new_size *sizeof (int));
490 df->use_info.count = xrealloc (df->use_info.count,
491 new_size *sizeof (int));
492 df->regs_size = new_size;
493 }
494
495 for (i = df->regs_inited; i < max_reg; i++)
496 {
497 struct df_reg_info *reg_info;
498
499 reg_info = pool_alloc (problem_data->reg_pool);
500 memset (reg_info, 0, sizeof (struct df_reg_info));
501 df->def_regs[i] = reg_info;
502 reg_info = pool_alloc (problem_data->reg_pool);
503 memset (reg_info, 0, sizeof (struct df_reg_info));
504 df->use_regs[i] = reg_info;
505 reg_info = pool_alloc (problem_data->reg_pool);
506 memset (reg_info, 0, sizeof (struct df_reg_info));
507 df->eq_use_regs[i] = reg_info;
508 df->def_info.begin[i] = 0;
509 df->def_info.count[i] = 0;
510 df->use_info.begin[i] = 0;
511 df->use_info.count[i] = 0;
512 }
513
514 df->regs_inited = max_reg;
515 }
516
517
518 /* Grow the ref information. */
519
520 static void
521 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
522 {
523 if (ref_info->refs_size < new_size)
524 {
525 ref_info->refs = xrealloc (ref_info->refs,
526 new_size *sizeof (struct df_ref *));
527 memset (ref_info->refs + ref_info->refs_size, 0,
528 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
529 ref_info->refs_size = new_size;
530 }
531 }
532
533
534 /* Check and grow the ref information if necessary. This routine
535 guarantees total_size + BITMAP_ADDEND amount of entries in refs
536 array. It updates ref_info->refs_size only and does not change
537 ref_info->total_size. */
538
539 static void
540 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
541 unsigned bitmap_addend)
542 {
543 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
544 {
545 int new_size = ref_info->total_size + bitmap_addend;
546 new_size += ref_info->total_size / 4;
547 df_grow_ref_info (ref_info, new_size);
548 }
549 }
550
551
552 /* Grow the ref information. If the current size is less than the
553 number of instructions, grow to 25% more than the number of
554 instructions. */
555
556 void
557 df_grow_insn_info (void)
558 {
559 unsigned int new_size = get_max_uid () + 1;
560 if (DF_INSN_SIZE () < new_size)
561 {
562 new_size += new_size / 4;
563 df->insns = xrealloc (df->insns,
564 new_size *sizeof (struct df_insn_info *));
565 memset (df->insns + df->insns_size, 0,
566 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
567 DF_INSN_SIZE () = new_size;
568 }
569 }
570
571
572
573 \f
574 /*----------------------------------------------------------------------------
575 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
576 ----------------------------------------------------------------------------*/
577
578 /* Rescan all of the block_to_analyze or all of the blocks in the
579 function if df_set_blocks if blocks_to_analyze is NULL; */
580
581 void
582 df_scan_blocks (void)
583 {
584 basic_block bb;
585
586 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
587 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
588
589 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
590 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
591
592 bitmap_ior_into (df->eh_block_artificial_uses,
593 df->regular_block_artificial_uses);
594
595 /* ENTRY and EXIT blocks have special defs/uses. */
596 df_get_entry_block_def_set (df->entry_block_defs);
597 df_record_entry_block_defs (df->entry_block_defs);
598 df_get_exit_block_use_set (df->exit_block_uses);
599 df_record_exit_block_uses (df->exit_block_uses);
600 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
601 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
602
603 /* Regular blocks */
604 FOR_EACH_BB (bb)
605 {
606 unsigned int bb_index = bb->index;
607 df_bb_refs_record (bb_index, true);
608 }
609 }
610
611
612 /* Create a new ref of type DF_REF_TYPE for register REG at address
613 LOC within INSN of BB. */
614
615 struct df_ref *
616 df_ref_create (rtx reg, rtx *loc, rtx insn,
617 basic_block bb,
618 enum df_ref_type ref_type,
619 enum df_ref_flags ref_flags)
620 {
621 struct df_ref *ref;
622 struct df_reg_info **reg_info;
623 struct df_ref_info *ref_info;
624 struct df_ref **ref_rec;
625 struct df_ref ***ref_rec_ptr;
626 unsigned int count = 0;
627 bool add_to_table;
628
629 df_grow_reg_info ();
630
631 /* You cannot hack artificial refs. */
632 gcc_assert (insn);
633 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
634 ref_type, ref_flags);
635
636 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
637 {
638 reg_info = df->def_regs;
639 ref_info = &df->def_info;
640 ref_rec_ptr = &DF_INSN_DEFS (insn);
641 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
642 }
643 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
644 {
645 reg_info = df->eq_use_regs;
646 ref_info = &df->use_info;
647 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
648 switch (ref_info->ref_order)
649 {
650 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
651 case DF_REF_ORDER_BY_REG_WITH_NOTES:
652 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
653 add_to_table = true;
654 break;
655 default:
656 add_to_table = false;
657 break;
658 }
659 }
660 else
661 {
662 reg_info = df->use_regs;
663 ref_info = &df->use_info;
664 ref_rec_ptr = &DF_INSN_USES (insn);
665 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
666 }
667
668 /* Do not add if ref is not in the right blocks. */
669 if (add_to_table && df->analyze_subset)
670 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
671
672 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
673
674 if (add_to_table)
675 switch (ref_info->ref_order)
676 {
677 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
678 case DF_REF_ORDER_BY_REG_WITH_NOTES:
679 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
680 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
681 break;
682 default:
683 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
684 break;
685 }
686
687 ref_rec = *ref_rec_ptr;
688 while (*ref_rec)
689 {
690 count++;
691 ref_rec++;
692 }
693
694 ref_rec = *ref_rec_ptr;
695 if (count)
696 {
697 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
698 *ref_rec_ptr = ref_rec;
699 ref_rec[count] = ref;
700 ref_rec[count+1] = NULL;
701 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
702 }
703 else
704 {
705 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
706 ref_rec[0] = ref;
707 ref_rec[1] = NULL;
708 *ref_rec_ptr = ref_rec;
709 }
710
711 #if 0
712 if (dump_file)
713 {
714 fprintf (dump_file, "adding ref ");
715 df_ref_debug (ref, dump_file);
716 }
717 #endif
718 /* By adding the ref directly, df_insn_rescan my not find any
719 differences even though the block will have changed. So we need
720 to mark the block dirty ourselves. */
721 df_set_bb_dirty (bb);
722
723 return ref;
724 }
725
726
727 \f
728 /*----------------------------------------------------------------------------
729 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
730 ----------------------------------------------------------------------------*/
731
732
733 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
734 Also delete the def-use or use-def chain if it exists. */
735
736 static void
737 df_reg_chain_unlink (struct df_ref *ref)
738 {
739 struct df_ref *next = DF_REF_NEXT_REG (ref);
740 struct df_ref *prev = DF_REF_PREV_REG (ref);
741 struct df_scan_problem_data *problem_data
742 = (struct df_scan_problem_data *) df_scan->problem_data;
743 int id = DF_REF_ID (ref);
744 struct df_reg_info *reg_info;
745 struct df_ref **refs = NULL;
746
747 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
748 {
749 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
750 refs = df->def_info.refs;
751 }
752 else
753 {
754 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
755 {
756 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
757 switch (df->use_info.ref_order)
758 {
759 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
760 case DF_REF_ORDER_BY_REG_WITH_NOTES:
761 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
762 refs = df->use_info.refs;
763 break;
764 default:
765 break;
766 }
767 }
768 else
769 {
770 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
771 refs = df->use_info.refs;
772 }
773 }
774
775 if (refs)
776 {
777 if (df->analyze_subset)
778 {
779 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
780 refs[id] = NULL;
781 }
782 else
783 refs[id] = NULL;
784 }
785
786 /* Delete any def-use or use-def chains that start here. It is
787 possible that there is trash in this field. This happens for
788 insns that have been deleted when rescanning has been deferred
789 and the chain problem has also been deleted. The chain tear down
790 code skips deleted insns. */
791 if (df_chain && DF_REF_CHAIN (ref))
792 df_chain_unlink (ref);
793
794 reg_info->n_refs--;
795 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
796 {
797 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
798 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
799 }
800
801 /* Unlink from the reg chain. If there is no prev, this is the
802 first of the list. If not, just join the next and prev. */
803 if (prev)
804 DF_REF_NEXT_REG (prev) = next;
805 else
806 {
807 gcc_assert (reg_info->reg_chain == ref);
808 reg_info->reg_chain = next;
809 }
810 if (next)
811 DF_REF_PREV_REG (next) = prev;
812
813 pool_free (problem_data->ref_pool, ref);
814 }
815
816
817 /* Remove REF from VEC. */
818
819 static void
820 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
821 {
822 struct df_ref **vec = *vec_ptr;
823
824 if (vec[1])
825 {
826 while (*vec && *vec != ref)
827 vec++;
828
829 while (*vec)
830 {
831 *vec = *(vec+1);
832 vec++;
833 }
834 }
835 else
836 {
837 free (vec);
838 *vec_ptr = df_null_ref_rec;
839 }
840 }
841
842
843 /* Unlink REF from all def-use/use-def chains, etc. */
844
845 void
846 df_ref_remove (struct df_ref *ref)
847 {
848 #if 0
849 if (dump_file)
850 {
851 fprintf (dump_file, "removing ref ");
852 df_ref_debug (ref, dump_file);
853 }
854 #endif
855
856 if (DF_REF_REG_DEF_P (ref))
857 {
858 if (DF_REF_IS_ARTIFICIAL (ref))
859 {
860 struct df_scan_bb_info *bb_info
861 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
862 df_ref_compress_rec (&bb_info->artificial_defs, ref);
863 }
864 else
865 {
866 unsigned int uid = DF_REF_INSN_UID (ref);
867 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
868 df_ref_compress_rec (&insn_rec->defs, ref);
869 }
870 }
871 else
872 {
873 if (DF_REF_IS_ARTIFICIAL (ref))
874 {
875 struct df_scan_bb_info *bb_info
876 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
877 df_ref_compress_rec (&bb_info->artificial_uses, ref);
878 }
879 else
880 {
881 unsigned int uid = DF_REF_INSN_UID (ref);
882 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
883
884 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
885 df_ref_compress_rec (&insn_rec->eq_uses, ref);
886 else
887 df_ref_compress_rec (&insn_rec->uses, ref);
888 }
889 }
890
891 /* By deleting the ref directly, df_insn_rescan my not find any
892 differences even though the block will have changed. So we need
893 to mark the block dirty ourselves. */
894 df_set_bb_dirty (DF_REF_BB (ref));
895 df_reg_chain_unlink (ref);
896 }
897
898
899 /* Create the insn record for INSN. If there was one there, zero it
900 out. */
901
902 struct df_insn_info *
903 df_insn_create_insn_record (rtx insn)
904 {
905 struct df_scan_problem_data *problem_data
906 = (struct df_scan_problem_data *) df_scan->problem_data;
907 struct df_insn_info *insn_rec;
908
909 df_grow_insn_info ();
910 insn_rec = DF_INSN_GET (insn);
911 if (!insn_rec)
912 {
913 insn_rec = pool_alloc (problem_data->insn_pool);
914 DF_INSN_SET (insn, insn_rec);
915 }
916 memset (insn_rec, 0, sizeof (struct df_insn_info));
917 insn_rec->insn = insn;
918 return insn_rec;
919 }
920
921
922 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
923
924 static void
925 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
926 {
927 while (*ref_rec)
928 {
929 struct df_ref *ref = *ref_rec;
930 /* CHAIN is allocated by DF_CHAIN. So make sure to
931 pass df_scan instance for the problem. */
932 if (DF_REF_CHAIN (ref))
933 df_chain_unlink (ref);
934 ref_rec++;
935 }
936 }
937
938
939 /* Delete all refs in the ref chain. */
940
941 static void
942 df_ref_chain_delete (struct df_ref **ref_rec)
943 {
944 struct df_ref **start = ref_rec;
945 while (*ref_rec)
946 {
947 df_reg_chain_unlink (*ref_rec);
948 ref_rec++;
949 }
950
951 /* If the list is empty, it has a special shared element that is not
952 to be deleted. */
953 if (*start)
954 free (start);
955 }
956
957
958 /* Delete the hardreg chain. */
959
960 static void
961 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
962 {
963 struct df_scan_problem_data *problem_data;
964
965 if (!hardregs)
966 return;
967
968 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
969
970 while (*hardregs)
971 {
972 pool_free (problem_data->mw_reg_pool, *hardregs);
973 hardregs++;
974 }
975 }
976
977
978 /* Delete all of the refs information from INSN. BB must be passed in
979 except when called from df_process_deferred_rescans to mark the block
980 as dirty. */
981
982 void
983 df_insn_delete (basic_block bb, unsigned int uid)
984 {
985 struct df_insn_info *insn_info = NULL;
986 if (!df)
987 return;
988
989 df_grow_bb_info (df_scan);
990 df_grow_reg_info ();
991
992 /* The block must be marked as dirty now, rather than later as in
993 df_insn_rescan and df_notes_rescan because it may not be there at
994 rescanning time and the mark would blow up. */
995 if (bb)
996 df_set_bb_dirty (bb);
997
998 insn_info = DF_INSN_UID_SAFE_GET (uid);
999
1000 /* The client has deferred rescanning. */
1001 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1002 {
1003 if (insn_info)
1004 {
1005 bitmap_clear_bit (df->insns_to_rescan, uid);
1006 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1007 bitmap_set_bit (df->insns_to_delete, uid);
1008 }
1009 if (dump_file)
1010 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1011 return;
1012 }
1013
1014 if (dump_file)
1015 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1016
1017 bitmap_clear_bit (df->insns_to_delete, uid);
1018 bitmap_clear_bit (df->insns_to_rescan, uid);
1019 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1020 if (insn_info)
1021 {
1022 struct df_scan_problem_data *problem_data
1023 = (struct df_scan_problem_data *) df_scan->problem_data;
1024
1025 /* In general, notes do not have the insn_info fields
1026 initialized. However, combine deletes insns by changing them
1027 to notes. How clever. So we cannot just check if it is a
1028 valid insn before short circuiting this code, we need to see
1029 if we actually initialized it. */
1030 if (insn_info->defs)
1031 {
1032 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1033
1034 if (df_chain)
1035 {
1036 df_ref_chain_delete_du_chain (insn_info->defs);
1037 df_ref_chain_delete_du_chain (insn_info->uses);
1038 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1039 }
1040
1041 df_ref_chain_delete (insn_info->defs);
1042 df_ref_chain_delete (insn_info->uses);
1043 df_ref_chain_delete (insn_info->eq_uses);
1044 }
1045 pool_free (problem_data->insn_pool, insn_info);
1046 DF_INSN_UID_SET (uid, NULL);
1047 }
1048 }
1049
1050
1051 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1052
1053 static void
1054 df_free_collection_rec (struct df_collection_rec *collection_rec)
1055 {
1056 struct df_scan_problem_data *problem_data
1057 = (struct df_scan_problem_data *) df_scan->problem_data;
1058 struct df_ref **ref;
1059 struct df_mw_hardreg **mw;
1060
1061 if (collection_rec->def_vec)
1062 for (ref = collection_rec->def_vec; *ref; ref++)
1063 pool_free (problem_data->ref_pool, *ref);
1064 if (collection_rec->use_vec)
1065 for (ref = collection_rec->use_vec; *ref; ref++)
1066 pool_free (problem_data->ref_pool, *ref);
1067 if (collection_rec->eq_use_vec)
1068 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1069 pool_free (problem_data->ref_pool, *ref);
1070 if (collection_rec->mw_vec)
1071 for (mw = collection_rec->mw_vec; *mw; mw++)
1072 pool_free (problem_data->mw_reg_pool, *mw);
1073 }
1074
1075
1076 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1077
1078 bool
1079 df_insn_rescan (rtx insn)
1080 {
1081 unsigned int uid = INSN_UID (insn);
1082 struct df_insn_info *insn_info = NULL;
1083 basic_block bb = BLOCK_FOR_INSN (insn);
1084 struct df_collection_rec collection_rec;
1085 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1086 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1087 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1088 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1089
1090 if ((!df) || (!INSN_P (insn)))
1091 return false;
1092
1093 if (!bb)
1094 {
1095 if (dump_file)
1096 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1097 return false;
1098 }
1099
1100 /* The client has disabled rescanning and plans to do it itself. */
1101 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1102 return false;
1103
1104 df_grow_bb_info (df_scan);
1105 df_grow_reg_info ();
1106
1107 insn_info = DF_INSN_UID_SAFE_GET (uid);
1108
1109 /* The client has deferred rescanning. */
1110 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1111 {
1112 if (!insn_info)
1113 {
1114 insn_info = df_insn_create_insn_record (insn);
1115 insn_info->defs = df_null_ref_rec;
1116 insn_info->uses = df_null_ref_rec;
1117 insn_info->eq_uses = df_null_ref_rec;
1118 insn_info->mw_hardregs = df_null_mw_rec;
1119 }
1120 if (dump_file)
1121 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1122
1123 bitmap_clear_bit (df->insns_to_delete, uid);
1124 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1125 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1126 return false;
1127 }
1128
1129 bitmap_clear_bit (df->insns_to_delete, uid);
1130 bitmap_clear_bit (df->insns_to_rescan, uid);
1131 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1132 if (insn_info)
1133 {
1134 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1135 /* If there's no change, return false. */
1136 if (the_same)
1137 {
1138 df_free_collection_rec (&collection_rec);
1139 if (dump_file)
1140 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1141 return false;
1142 }
1143 if (dump_file)
1144 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1145
1146 /* There's change - we need to delete the existing info. */
1147 df_insn_delete (NULL, uid);
1148 df_insn_create_insn_record (insn);
1149 }
1150 else
1151 {
1152 df_insn_create_insn_record (insn);
1153 df_insn_refs_collect (&collection_rec, bb, insn);
1154 if (dump_file)
1155 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1156 }
1157
1158 df_refs_add_to_chains (&collection_rec, bb, insn);
1159 df_set_bb_dirty (bb);
1160 return true;
1161 }
1162
1163
1164 /* Rescan all of the insns in the function. Note that the artificial
1165 uses and defs are not touched. This function will destroy def-se
1166 or use-def chains. */
1167
1168 void
1169 df_insn_rescan_all (void)
1170 {
1171 bool no_insn_rescan = false;
1172 bool defer_insn_rescan = false;
1173 basic_block bb;
1174 bitmap_iterator bi;
1175 unsigned int uid;
1176 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1177
1178 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1179 {
1180 df_clear_flags (DF_NO_INSN_RESCAN);
1181 no_insn_rescan = true;
1182 }
1183
1184 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1185 {
1186 df_clear_flags (DF_DEFER_INSN_RESCAN);
1187 defer_insn_rescan = true;
1188 }
1189
1190 bitmap_copy (tmp, df->insns_to_delete);
1191 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1192 {
1193 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1194 if (insn_info)
1195 df_insn_delete (NULL, uid);
1196 }
1197
1198 BITMAP_FREE (tmp);
1199 bitmap_clear (df->insns_to_delete);
1200 bitmap_clear (df->insns_to_rescan);
1201 bitmap_clear (df->insns_to_notes_rescan);
1202
1203 FOR_EACH_BB (bb)
1204 {
1205 rtx insn;
1206 FOR_BB_INSNS (bb, insn)
1207 {
1208 df_insn_rescan (insn);
1209 }
1210 }
1211
1212 if (no_insn_rescan)
1213 df_set_flags (DF_NO_INSN_RESCAN);
1214 if (defer_insn_rescan)
1215 df_set_flags (DF_DEFER_INSN_RESCAN);
1216 }
1217
1218
1219 /* Process all of the deferred rescans or deletions. */
1220
1221 void
1222 df_process_deferred_rescans (void)
1223 {
1224 bool no_insn_rescan = false;
1225 bool defer_insn_rescan = false;
1226 bitmap_iterator bi;
1227 unsigned int uid;
1228 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1229
1230 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1231 {
1232 df_clear_flags (DF_NO_INSN_RESCAN);
1233 no_insn_rescan = true;
1234 }
1235
1236 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1237 {
1238 df_clear_flags (DF_DEFER_INSN_RESCAN);
1239 defer_insn_rescan = true;
1240 }
1241
1242 if (dump_file)
1243 fprintf (dump_file, "starting the processing of deferred insns\n");
1244
1245 bitmap_copy (tmp, df->insns_to_delete);
1246 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1247 {
1248 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1249 if (insn_info)
1250 df_insn_delete (NULL, uid);
1251 }
1252
1253 bitmap_copy (tmp, df->insns_to_rescan);
1254 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1255 {
1256 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1257 if (insn_info)
1258 df_insn_rescan (insn_info->insn);
1259 }
1260
1261 bitmap_copy (tmp, df->insns_to_notes_rescan);
1262 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1263 {
1264 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1265 if (insn_info)
1266 df_notes_rescan (insn_info->insn);
1267 }
1268
1269 if (dump_file)
1270 fprintf (dump_file, "ending the processing of deferred insns\n");
1271
1272 BITMAP_FREE (tmp);
1273 bitmap_clear (df->insns_to_delete);
1274 bitmap_clear (df->insns_to_rescan);
1275 bitmap_clear (df->insns_to_notes_rescan);
1276
1277 if (no_insn_rescan)
1278 df_set_flags (DF_NO_INSN_RESCAN);
1279 if (defer_insn_rescan)
1280 df_set_flags (DF_DEFER_INSN_RESCAN);
1281
1282 /* If someone changed regs_ever_live during this pass, fix up the
1283 entry and exit blocks. */
1284 if (df->redo_entry_and_exit)
1285 {
1286 df_update_entry_exit_and_calls ();
1287 df->redo_entry_and_exit = false;
1288 }
1289 }
1290
1291
1292 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1293 the uses if INCLUDE_USES. Include the eq_uses if
1294 INCLUDE_EQ_USES. */
1295
1296 static unsigned int
1297 df_count_refs (bool include_defs, bool include_uses,
1298 bool include_eq_uses)
1299 {
1300 unsigned int regno;
1301 int size = 0;
1302 unsigned int m = df->regs_inited;
1303
1304 for (regno = 0; regno < m; regno++)
1305 {
1306 if (include_defs)
1307 size += DF_REG_DEF_COUNT (regno);
1308 if (include_uses)
1309 size += DF_REG_USE_COUNT (regno);
1310 if (include_eq_uses)
1311 size += DF_REG_EQ_USE_COUNT (regno);
1312 }
1313 return size;
1314 }
1315
1316
1317 /* Take build ref table for either the uses or defs from the reg-use
1318 or reg-def chains. This version processes the refs in reg order
1319 which is likely to be best if processing the whole function. */
1320
1321 static void
1322 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1323 bool include_defs,
1324 bool include_uses,
1325 bool include_eq_uses)
1326 {
1327 unsigned int m = df->regs_inited;
1328 unsigned int regno;
1329 unsigned int offset = 0;
1330 unsigned int start;
1331
1332 if (df->changeable_flags & DF_NO_HARD_REGS)
1333 {
1334 start = FIRST_PSEUDO_REGISTER;
1335 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1336 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1337 }
1338 else
1339 start = 0;
1340
1341 ref_info->total_size
1342 = df_count_refs (include_defs, include_uses, include_eq_uses);
1343
1344 df_check_and_grow_ref_info (ref_info, 1);
1345
1346 for (regno = start; regno < m; regno++)
1347 {
1348 int count = 0;
1349 ref_info->begin[regno] = offset;
1350 if (include_defs)
1351 {
1352 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1353 while (ref)
1354 {
1355 ref_info->refs[offset] = ref;
1356 DF_REF_ID (ref) = offset++;
1357 count++;
1358 ref = DF_REF_NEXT_REG (ref);
1359 gcc_assert (offset < ref_info->refs_size);
1360 }
1361 }
1362 if (include_uses)
1363 {
1364 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1365 while (ref)
1366 {
1367 ref_info->refs[offset] = ref;
1368 DF_REF_ID (ref) = offset++;
1369 count++;
1370 ref = DF_REF_NEXT_REG (ref);
1371 gcc_assert (offset < ref_info->refs_size);
1372 }
1373 }
1374 if (include_eq_uses)
1375 {
1376 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1377 while (ref)
1378 {
1379 ref_info->refs[offset] = ref;
1380 DF_REF_ID (ref) = offset++;
1381 count++;
1382 ref = DF_REF_NEXT_REG (ref);
1383 gcc_assert (offset < ref_info->refs_size);
1384 }
1385 }
1386 ref_info->count[regno] = count;
1387 }
1388
1389 /* The bitmap size is not decremented when refs are deleted. So
1390 reset it now that we have squished out all of the empty
1391 slots. */
1392 ref_info->table_size = offset;
1393 }
1394
1395
1396 /* Take build ref table for either the uses or defs from the reg-use
1397 or reg-def chains. This version processes the refs in insn order
1398 which is likely to be best if processing some segment of the
1399 function. */
1400
1401 static void
1402 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1403 bool include_defs,
1404 bool include_uses,
1405 bool include_eq_uses)
1406 {
1407 bitmap_iterator bi;
1408 unsigned int bb_index;
1409 unsigned int m = df->regs_inited;
1410 unsigned int offset = 0;
1411 unsigned int r;
1412 unsigned int start
1413 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1414
1415 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1416 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1417
1418 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1419 df_check_and_grow_ref_info (ref_info, 1);
1420
1421 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1422 {
1423 basic_block bb = BASIC_BLOCK (bb_index);
1424 rtx insn;
1425 struct df_ref **ref_rec;
1426
1427 if (include_defs)
1428 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1429 {
1430 unsigned int regno = DF_REF_REGNO (*ref_rec);
1431 ref_info->count[regno]++;
1432 }
1433 if (include_uses)
1434 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1435 {
1436 unsigned int regno = DF_REF_REGNO (*ref_rec);
1437 ref_info->count[regno]++;
1438 }
1439
1440 FOR_BB_INSNS (bb, insn)
1441 {
1442 if (INSN_P (insn))
1443 {
1444 unsigned int uid = INSN_UID (insn);
1445
1446 if (include_defs)
1447 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1448 {
1449 unsigned int regno = DF_REF_REGNO (*ref_rec);
1450 ref_info->count[regno]++;
1451 }
1452 if (include_uses)
1453 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1454 {
1455 unsigned int regno = DF_REF_REGNO (*ref_rec);
1456 ref_info->count[regno]++;
1457 }
1458 if (include_eq_uses)
1459 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1460 {
1461 unsigned int regno = DF_REF_REGNO (*ref_rec);
1462 ref_info->count[regno]++;
1463 }
1464 }
1465 }
1466 }
1467
1468 for (r = start; r < m; r++)
1469 {
1470 ref_info->begin[r] = offset;
1471 offset += ref_info->count[r];
1472 ref_info->count[r] = 0;
1473 }
1474
1475 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1476 {
1477 basic_block bb = BASIC_BLOCK (bb_index);
1478 rtx insn;
1479 struct df_ref **ref_rec;
1480
1481 if (include_defs)
1482 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1483 {
1484 struct df_ref *ref = *ref_rec;
1485 unsigned int regno = DF_REF_REGNO (ref);
1486 if (regno >= start)
1487 {
1488 unsigned int id
1489 = ref_info->begin[regno] + ref_info->count[regno]++;
1490 DF_REF_ID (ref) = id;
1491 ref_info->refs[id] = ref;
1492 }
1493 }
1494 if (include_uses)
1495 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1496 {
1497 struct df_ref *ref = *ref_rec;
1498 unsigned int regno = DF_REF_REGNO (ref);
1499 if (regno >= start)
1500 {
1501 unsigned int id
1502 = ref_info->begin[regno] + ref_info->count[regno]++;
1503 DF_REF_ID (ref) = id;
1504 ref_info->refs[id] = ref;
1505 }
1506 }
1507
1508 FOR_BB_INSNS (bb, insn)
1509 {
1510 if (INSN_P (insn))
1511 {
1512 unsigned int uid = INSN_UID (insn);
1513
1514 if (include_defs)
1515 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1516 {
1517 struct df_ref *ref = *ref_rec;
1518 unsigned int regno = DF_REF_REGNO (ref);
1519 if (regno >= start)
1520 {
1521 unsigned int id
1522 = ref_info->begin[regno] + ref_info->count[regno]++;
1523 DF_REF_ID (ref) = id;
1524 ref_info->refs[id] = ref;
1525 }
1526 }
1527 if (include_uses)
1528 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1529 {
1530 struct df_ref *ref = *ref_rec;
1531 unsigned int regno = DF_REF_REGNO (ref);
1532 if (regno >= start)
1533 {
1534 unsigned int id
1535 = ref_info->begin[regno] + ref_info->count[regno]++;
1536 DF_REF_ID (ref) = id;
1537 ref_info->refs[id] = ref;
1538 }
1539 }
1540 if (include_eq_uses)
1541 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1542 {
1543 struct df_ref *ref = *ref_rec;
1544 unsigned int regno = DF_REF_REGNO (ref);
1545 if (regno >= start)
1546 {
1547 unsigned int id
1548 = ref_info->begin[regno] + ref_info->count[regno]++;
1549 DF_REF_ID (ref) = id;
1550 ref_info->refs[id] = ref;
1551 }
1552 }
1553 }
1554 }
1555 }
1556
1557 /* The bitmap size is not decremented when refs are deleted. So
1558 reset it now that we have squished out all of the empty
1559 slots. */
1560
1561 ref_info->table_size = offset;
1562 }
1563
1564 /* Take build ref table for either the uses or defs from the reg-use
1565 or reg-def chains. */
1566
1567 static void
1568 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1569 bool include_defs,
1570 bool include_uses,
1571 bool include_eq_uses)
1572 {
1573 if (df->analyze_subset)
1574 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1575 include_uses, include_eq_uses);
1576 else
1577 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1578 include_uses, include_eq_uses);
1579 }
1580
1581
1582 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1583 static unsigned int
1584 df_add_refs_to_table (unsigned int offset,
1585 struct df_ref_info *ref_info,
1586 struct df_ref **ref_vec)
1587 {
1588 while (*ref_vec)
1589 {
1590 struct df_ref *ref = *ref_vec;
1591 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1592 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1593 {
1594 ref_info->refs[offset] = ref;
1595 DF_REF_ID (*ref_vec) = offset++;
1596 }
1597 ref_vec++;
1598 }
1599 return offset;
1600 }
1601
1602
1603 /* Count the number of refs in all of the insns of BB. Include the
1604 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1605 eq_uses if INCLUDE_EQ_USES. */
1606
1607 static unsigned int
1608 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1609 struct df_ref_info *ref_info,
1610 bool include_defs, bool include_uses,
1611 bool include_eq_uses)
1612 {
1613 rtx insn;
1614
1615 if (include_defs)
1616 offset = df_add_refs_to_table (offset, ref_info,
1617 df_get_artificial_defs (bb->index));
1618 if (include_uses)
1619 offset = df_add_refs_to_table (offset, ref_info,
1620 df_get_artificial_uses (bb->index));
1621
1622 FOR_BB_INSNS (bb, insn)
1623 if (INSN_P (insn))
1624 {
1625 unsigned int uid = INSN_UID (insn);
1626 if (include_defs)
1627 offset = df_add_refs_to_table (offset, ref_info,
1628 DF_INSN_UID_DEFS (uid));
1629 if (include_uses)
1630 offset = df_add_refs_to_table (offset, ref_info,
1631 DF_INSN_UID_USES (uid));
1632 if (include_eq_uses)
1633 offset = df_add_refs_to_table (offset, ref_info,
1634 DF_INSN_UID_EQ_USES (uid));
1635 }
1636 return offset;
1637 }
1638
1639
1640 /* Organize the refs by insn into the table in REF_INFO. If
1641 blocks_to_analyze is defined, use that set, otherwise the entire
1642 program. Include the defs if INCLUDE_DEFS. Include the uses if
1643 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1644
1645 static void
1646 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1647 bool include_defs, bool include_uses,
1648 bool include_eq_uses)
1649 {
1650 basic_block bb;
1651 unsigned int offset = 0;
1652
1653 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1654 df_check_and_grow_ref_info (ref_info, 1);
1655 if (df->blocks_to_analyze)
1656 {
1657 bitmap_iterator bi;
1658 unsigned int index;
1659
1660 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1661 {
1662 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1663 include_defs, include_uses,
1664 include_eq_uses);
1665 }
1666
1667 ref_info->table_size = offset;
1668 }
1669 else
1670 {
1671 FOR_ALL_BB (bb)
1672 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1673 include_defs, include_uses,
1674 include_eq_uses);
1675 ref_info->table_size = offset;
1676 }
1677 }
1678
1679
1680 /* If the use refs in DF are not organized, reorganize them. */
1681
1682 void
1683 df_maybe_reorganize_use_refs (enum df_ref_order order)
1684 {
1685 if (order == df->use_info.ref_order)
1686 return;
1687
1688 switch (order)
1689 {
1690 case DF_REF_ORDER_BY_REG:
1691 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1692 break;
1693
1694 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1695 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1696 break;
1697
1698 case DF_REF_ORDER_BY_INSN:
1699 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1700 break;
1701
1702 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1703 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1704 break;
1705
1706 case DF_REF_ORDER_NO_TABLE:
1707 free (df->use_info.refs);
1708 df->use_info.refs = NULL;
1709 df->use_info.refs_size = 0;
1710 break;
1711
1712 case DF_REF_ORDER_UNORDERED:
1713 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1714 gcc_unreachable ();
1715 break;
1716 }
1717
1718 df->use_info.ref_order = order;
1719 }
1720
1721
1722 /* If the def refs in DF are not organized, reorganize them. */
1723
1724 void
1725 df_maybe_reorganize_def_refs (enum df_ref_order order)
1726 {
1727 if (order == df->def_info.ref_order)
1728 return;
1729
1730 switch (order)
1731 {
1732 case DF_REF_ORDER_BY_REG:
1733 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1734 break;
1735
1736 case DF_REF_ORDER_BY_INSN:
1737 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1738 break;
1739
1740 case DF_REF_ORDER_NO_TABLE:
1741 free (df->def_info.refs);
1742 df->def_info.refs = NULL;
1743 df->def_info.refs_size = 0;
1744 break;
1745
1746 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1747 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1748 case DF_REF_ORDER_UNORDERED:
1749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1750 gcc_unreachable ();
1751 break;
1752 }
1753
1754 df->def_info.ref_order = order;
1755 }
1756
1757
1758 /* Change the BB of all refs in the ref chain to NEW_BB.
1759 Assumes that all refs in the chain have the same BB.
1760 If changed, return the original bb the chain belonged to
1761 (or .
1762 If no change, return NEW_BB.
1763 If something's wrong, it will return NULL. */
1764
1765 static basic_block
1766 df_ref_chain_change_bb (struct df_ref **ref_rec,
1767 basic_block old_bb,
1768 basic_block new_bb)
1769 {
1770 while (*ref_rec)
1771 {
1772 struct df_ref *ref = *ref_rec;
1773
1774 if (DF_REF_BB (ref) == new_bb)
1775 return new_bb;
1776 else
1777 {
1778 gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
1779 old_bb = DF_REF_BB (ref);
1780 DF_REF_BB (ref) = new_bb;
1781 }
1782 ref_rec++;
1783 }
1784
1785 return old_bb;
1786 }
1787
1788
1789 /* Change all of the basic block references in INSN to use the insn's
1790 current basic block. This function is called from routines that move
1791 instructions from one block to another. */
1792
1793 void
1794 df_insn_change_bb (rtx insn)
1795 {
1796 basic_block new_bb = BLOCK_FOR_INSN (insn);
1797 basic_block old_bb = NULL;
1798 struct df_insn_info *insn_info;
1799 unsigned int uid = INSN_UID (insn);
1800
1801 if (!df)
1802 return;
1803
1804 if (dump_file)
1805 fprintf (dump_file, "changing bb of uid %d\n", uid);
1806
1807 insn_info = DF_INSN_UID_SAFE_GET (uid);
1808 if (insn_info == NULL)
1809 {
1810 if (dump_file)
1811 fprintf (dump_file, " unscanned insn\n");
1812 df_insn_rescan (insn);
1813 return;
1814 }
1815
1816 if (!INSN_P (insn))
1817 return;
1818
1819 old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1820 if (old_bb == new_bb)
1821 return;
1822
1823 old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1824 if (old_bb == new_bb)
1825 return;
1826
1827 old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1828 if (old_bb == new_bb)
1829 return;
1830
1831 df_set_bb_dirty (new_bb);
1832 if (old_bb)
1833 {
1834 if (dump_file)
1835 fprintf (dump_file, " from %d to %d\n",
1836 old_bb->index, new_bb->index);
1837 df_set_bb_dirty (old_bb);
1838 }
1839 else
1840 if (dump_file)
1841 fprintf (dump_file, " to %d\n", new_bb->index);
1842 }
1843
1844
1845 /* Helper function for df_ref_change_reg_with_loc. */
1846
1847 static void
1848 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1849 int new_regno, rtx loc)
1850 {
1851 struct df_ref *the_ref = old->reg_chain;
1852
1853 while (the_ref)
1854 {
1855 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1856 {
1857 struct df_ref *next_ref = the_ref->next_reg;
1858 struct df_ref *prev_ref = the_ref->prev_reg;
1859 struct df_ref **ref_vec, **ref_vec_t;
1860 unsigned int count = 0;
1861
1862 DF_REF_REGNO (the_ref) = new_regno;
1863 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1864
1865 /* Pull the_ref out of the old regno chain. */
1866 if (prev_ref)
1867 prev_ref->next_reg = next_ref;
1868 else
1869 old->reg_chain = next_ref;
1870 if (next_ref)
1871 next_ref->prev_reg = prev_ref;
1872 old->n_refs--;
1873
1874 /* Put the ref into the new regno chain. */
1875 the_ref->prev_reg = NULL;
1876 the_ref->next_reg = new->reg_chain;
1877 if (new->reg_chain)
1878 new->reg_chain->prev_reg = the_ref;
1879 new->reg_chain = the_ref;
1880 new->n_refs++;
1881 df_set_bb_dirty (DF_REF_BB (the_ref));
1882
1883 /* Need to resort the record that the ref was in because the
1884 regno is a sorting key. First, find the right record. */
1885 if (DF_REF_IS_ARTIFICIAL (the_ref))
1886 {
1887 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1888 if (DF_REF_REG_DEF_P (the_ref))
1889 ref_vec = df_get_artificial_defs (bb_index);
1890 else
1891 ref_vec = df_get_artificial_uses (bb_index);
1892 }
1893 else
1894 {
1895 struct df_insn_info *insn_info
1896 = DF_INSN_GET (DF_REF_INSN (the_ref));
1897 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1898 ref_vec = insn_info->eq_uses;
1899 else
1900 ref_vec = insn_info->uses;
1901 if (dump_file)
1902 fprintf (dump_file, "changing reg in insn %d\n",
1903 INSN_UID (DF_REF_INSN (the_ref)));
1904 }
1905 ref_vec_t = ref_vec;
1906
1907 /* Find the length. */
1908 while (*ref_vec_t)
1909 {
1910 count++;
1911 ref_vec_t++;
1912 }
1913 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1914
1915 the_ref = next_ref;
1916 }
1917 else
1918 the_ref = the_ref->next_reg;
1919 }
1920 }
1921
1922
1923 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1924 NEW_REGNO. Refs that do not match LOC are not changed. This call
1925 is to support the SET_REGNO macro. */
1926
1927 void
1928 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1929 {
1930 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1931 return;
1932
1933 df_grow_reg_info ();
1934
1935 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1936 DF_REG_DEF_GET (new_regno), new_regno, loc);
1937 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1938 DF_REG_USE_GET (new_regno), new_regno, loc);
1939 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1940 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1941 }
1942
1943
1944 /* Delete the mw_hardregs that point into the eq_notes. */
1945
1946 static unsigned int
1947 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1948 {
1949 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1950 unsigned int deleted = 0;
1951 unsigned int count = 0;
1952 struct df_scan_problem_data *problem_data
1953 = (struct df_scan_problem_data *) df_scan->problem_data;
1954
1955 if (!*mw_vec)
1956 return 0;
1957
1958 while (*mw_vec)
1959 {
1960 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1961 {
1962 struct df_mw_hardreg **temp_vec = mw_vec;
1963
1964 pool_free (problem_data->mw_reg_pool, *mw_vec);
1965 temp_vec = mw_vec;
1966 /* Shove the remaining ones down one to fill the gap. While
1967 this looks n**2, it is highly unusual to have any mw regs
1968 in eq_notes and the chances of more than one are almost
1969 non existent. */
1970 while (*temp_vec)
1971 {
1972 *temp_vec = *(temp_vec + 1);
1973 temp_vec++;
1974 }
1975 deleted++;
1976 }
1977 else
1978 {
1979 mw_vec++;
1980 count++;
1981 }
1982 }
1983
1984 if (count == 0)
1985 {
1986 free (insn_info->mw_hardregs);
1987 insn_info->mw_hardregs = df_null_mw_rec;
1988 return 0;
1989 }
1990 return deleted;
1991 }
1992
1993
1994 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1995
1996 void
1997 df_notes_rescan (rtx insn)
1998 {
1999 struct df_insn_info *insn_info;
2000 unsigned int uid = INSN_UID (insn);
2001
2002 if (!df)
2003 return;
2004
2005 /* The client has disabled rescanning and plans to do it itself. */
2006 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2007 return;
2008
2009 df_grow_bb_info (df_scan);
2010 df_grow_reg_info ();
2011
2012 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2013
2014 /* The client has deferred rescanning. */
2015 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2016 {
2017 if (!insn_info)
2018 {
2019 insn_info = df_insn_create_insn_record (insn);
2020 insn_info->defs = df_null_ref_rec;
2021 insn_info->uses = df_null_ref_rec;
2022 insn_info->eq_uses = df_null_ref_rec;
2023 insn_info->mw_hardregs = df_null_mw_rec;
2024 }
2025
2026 bitmap_clear_bit (df->insns_to_delete, uid);
2027 /* If the insn is set to be rescanned, it does not need to also
2028 be notes rescanned. */
2029 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2030 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2031 return;
2032 }
2033
2034 bitmap_clear_bit (df->insns_to_delete, uid);
2035 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2036
2037 if (insn_info)
2038 {
2039 basic_block bb = BLOCK_FOR_INSN (insn);
2040 rtx note;
2041 struct df_collection_rec collection_rec;
2042 unsigned int num_deleted;
2043
2044 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2045 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2046 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2047
2048 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2049 df_ref_chain_delete (insn_info->eq_uses);
2050 insn_info->eq_uses = NULL;
2051
2052 /* Process REG_EQUIV/REG_EQUAL notes */
2053 for (note = REG_NOTES (insn); note;
2054 note = XEXP (note, 1))
2055 {
2056 switch (REG_NOTE_KIND (note))
2057 {
2058 case REG_EQUIV:
2059 case REG_EQUAL:
2060 df_uses_record (&collection_rec,
2061 &XEXP (note, 0), DF_REF_REG_USE,
2062 bb, insn, DF_REF_IN_NOTE);
2063 default:
2064 break;
2065 }
2066 }
2067
2068 /* Find some place to put any new mw_hardregs. */
2069 df_canonize_collection_rec (&collection_rec);
2070 if (collection_rec.next_mw)
2071 {
2072 unsigned int count = 0;
2073 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2074 while (*mw_rec)
2075 {
2076 count++;
2077 mw_rec++;
2078 }
2079
2080 if (count)
2081 {
2082 /* Append to the end of the existing record after
2083 expanding it if necessary. */
2084 if (collection_rec.next_mw > num_deleted)
2085 {
2086 insn_info->mw_hardregs =
2087 xrealloc (insn_info->mw_hardregs,
2088 (count + 1 + collection_rec.next_mw)
2089 * sizeof (struct df_ref*));
2090 }
2091 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2092 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2093 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2094 sizeof (struct df_mw_hardreg *), df_mw_compare);
2095 }
2096 else
2097 {
2098 /* No vector there. */
2099 insn_info->mw_hardregs
2100 = XNEWVEC (struct df_mw_hardreg*,
2101 count + 1 + collection_rec.next_mw);
2102 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2103 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2104 }
2105 }
2106 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2107 ignore it. */
2108 collection_rec.mw_vec = NULL;
2109 collection_rec.next_mw = 0;
2110 df_refs_add_to_chains (&collection_rec, bb, insn);
2111 }
2112 else
2113 df_insn_rescan (insn);
2114
2115 }
2116
2117 \f
2118 /*----------------------------------------------------------------------------
2119 Hard core instruction scanning code. No external interfaces here,
2120 just a lot of routines that look inside insns.
2121 ----------------------------------------------------------------------------*/
2122
2123
2124 /* Return true if the contents of two df_ref's are identical.
2125 It ignores DF_REF_MARKER. */
2126
2127 static bool
2128 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2129 {
2130 if (!ref2)
2131 return false;
2132 return (ref1 == ref2) ||
2133 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2134 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2135 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2136 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2137 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2138 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2139 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2140 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2141 }
2142
2143
2144 /* Compare REF1 and REF2 for sorting. This is only called from places
2145 where all of the refs are of the same type, in the same insn, and
2146 have the same bb. So these fields are not checked. */
2147
2148 static int
2149 df_ref_compare (const void *r1, const void *r2)
2150 {
2151 const struct df_ref *ref1 = *(struct df_ref **)r1;
2152 const struct df_ref *ref2 = *(struct df_ref **)r2;
2153
2154 if (ref1 == ref2)
2155 return 0;
2156
2157 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2158 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2159
2160 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2161 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2162
2163 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2164 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2165 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2166
2167 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2168 {
2169 /* If two refs are identical except that one of them has is from
2170 a mw and one is not, we need to have the one with the mw
2171 first. */
2172 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2173 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2174 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2175 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2176 return -1;
2177 else
2178 return 1;
2179 }
2180 return 0;
2181 }
2182
2183 static void
2184 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2185 {
2186 struct df_ref *tmp = ref_vec[i];
2187 ref_vec[i] = ref_vec[j];
2188 ref_vec[j] = tmp;
2189 }
2190
2191 /* Sort and compress a set of refs. */
2192
2193 static unsigned int
2194 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2195 {
2196 struct df_scan_problem_data *problem_data
2197 = (struct df_scan_problem_data *) df_scan->problem_data;
2198 unsigned int i;
2199 unsigned int dist = 0;
2200
2201 ref_vec[count] = NULL;
2202 /* If there are 1 or 0 elements, there is nothing to do. */
2203 if (count < 2)
2204 return count;
2205 else if (count == 2)
2206 {
2207 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2208 df_swap_refs (ref_vec, 0, 1);
2209 }
2210 else
2211 {
2212 for (i = 0; i < count - 1; i++)
2213 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2214 break;
2215 /* If the array is already strictly ordered,
2216 which is the most common case for large COUNT case
2217 (which happens for CALL INSNs),
2218 no need to sort and filter out duplicate.
2219 Simply return the count.
2220 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2221 of DF_REF_COMPARE. */
2222 if (i == count - 1)
2223 return count;
2224 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2225 }
2226
2227 for (i=0; i<count-dist; i++)
2228 {
2229 /* Find the next ref that is not equal to the current ref. */
2230 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2231 {
2232 pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
2233 dist++;
2234 }
2235 /* Copy it down to the next position. */
2236 if (dist)
2237 ref_vec[i+1] = ref_vec[i + dist + 1];
2238 }
2239
2240 count -= dist;
2241 ref_vec[count] = NULL;
2242 return count;
2243 }
2244
2245
2246 /* Return true if the contents of two df_ref's are identical.
2247 It ignores DF_REF_MARKER. */
2248
2249 static bool
2250 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2251 {
2252 if (!mw2)
2253 return false;
2254 return (mw1 == mw2) ||
2255 (mw1->mw_reg == mw2->mw_reg
2256 && mw1->type == mw2->type
2257 && mw1->flags == mw2->flags
2258 && mw1->start_regno == mw2->start_regno
2259 && mw1->end_regno == mw2->end_regno);
2260 }
2261
2262
2263 /* Compare MW1 and MW2 for sorting. */
2264
2265 static int
2266 df_mw_compare (const void *m1, const void *m2)
2267 {
2268 const struct df_mw_hardreg *mw1 = *(struct df_mw_hardreg **)m1;
2269 const struct df_mw_hardreg *mw2 = *(struct df_mw_hardreg **)m2;
2270
2271 if (mw1 == mw2)
2272 return 0;
2273
2274 if (mw1->type != mw2->type)
2275 return mw1->type - mw2->type;
2276
2277 if (mw1->flags != mw2->flags)
2278 return mw1->flags - mw2->flags;
2279
2280 if (mw1->start_regno != mw2->start_regno)
2281 return mw1->start_regno - mw2->start_regno;
2282
2283 if (mw1->end_regno != mw2->end_regno)
2284 return mw1->end_regno - mw2->end_regno;
2285
2286 if (mw1->mw_reg != mw2->mw_reg)
2287 return mw1->mw_order - mw2->mw_order;
2288
2289 return 0;
2290 }
2291
2292
2293 /* Sort and compress a set of refs. */
2294
2295 static unsigned int
2296 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2297 {
2298 struct df_scan_problem_data *problem_data
2299 = (struct df_scan_problem_data *) df_scan->problem_data;
2300 unsigned int i;
2301 unsigned int dist = 0;
2302 mw_vec[count] = NULL;
2303
2304 if (count < 2)
2305 return count;
2306 else if (count == 2)
2307 {
2308 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2309 {
2310 struct df_mw_hardreg *tmp = mw_vec[0];
2311 mw_vec[0] = mw_vec[1];
2312 mw_vec[1] = tmp;
2313 }
2314 }
2315 else
2316 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2317
2318 for (i=0; i<count-dist; i++)
2319 {
2320 /* Find the next ref that is not equal to the current ref. */
2321 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2322 {
2323 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2324 dist++;
2325 }
2326 /* Copy it down to the next position. */
2327 if (dist)
2328 mw_vec[i+1] = mw_vec[i + dist + 1];
2329 }
2330
2331 count -= dist;
2332 mw_vec[count] = NULL;
2333 return count;
2334 }
2335
2336
2337 /* Sort and remove duplicates from the COLLECTION_REC. */
2338
2339 static void
2340 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2341 {
2342 if (collection_rec->def_vec)
2343 collection_rec->next_def
2344 = df_sort_and_compress_refs (collection_rec->def_vec,
2345 collection_rec->next_def);
2346 if (collection_rec->use_vec)
2347 collection_rec->next_use
2348 = df_sort_and_compress_refs (collection_rec->use_vec,
2349 collection_rec->next_use);
2350 if (collection_rec->eq_use_vec)
2351 collection_rec->next_eq_use
2352 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2353 collection_rec->next_eq_use);
2354 if (collection_rec->mw_vec)
2355 collection_rec->next_mw
2356 = df_sort_and_compress_mws (collection_rec->mw_vec,
2357 collection_rec->next_mw);
2358 }
2359
2360
2361 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2362
2363 static void
2364 df_install_ref (struct df_ref *this_ref,
2365 struct df_reg_info *reg_info,
2366 struct df_ref_info *ref_info,
2367 bool add_to_table)
2368 {
2369 unsigned int regno = DF_REF_REGNO (this_ref);
2370 /* Add the ref to the reg_{def,use,eq_use} chain. */
2371 struct df_ref *head = reg_info->reg_chain;
2372
2373 reg_info->reg_chain = this_ref;
2374 reg_info->n_refs++;
2375
2376 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2377 {
2378 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2379 df->hard_regs_live_count[regno]++;
2380 }
2381
2382 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2383 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2384
2385 DF_REF_NEXT_REG (this_ref) = head;
2386
2387 /* We cannot actually link to the head of the chain. */
2388 DF_REF_PREV_REG (this_ref) = NULL;
2389
2390 if (head)
2391 DF_REF_PREV_REG (head) = this_ref;
2392
2393 if (add_to_table)
2394 {
2395 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2396 df_check_and_grow_ref_info (ref_info, 1);
2397 DF_REF_ID (this_ref) = ref_info->table_size;
2398 /* Add the ref to the big array of defs. */
2399 ref_info->refs[ref_info->table_size] = this_ref;
2400 ref_info->table_size++;
2401 }
2402 else
2403 DF_REF_ID (this_ref) = -1;
2404
2405 ref_info->total_size++;
2406 }
2407
2408
2409 /* This function takes one of the groups of refs (defs, uses or
2410 eq_uses) and installs the entire group into the insn. It also adds
2411 each of these refs into the appropriate chains. */
2412
2413 static struct df_ref **
2414 df_install_refs (basic_block bb,
2415 struct df_ref **old_vec, unsigned int count,
2416 struct df_reg_info **reg_info,
2417 struct df_ref_info *ref_info,
2418 bool is_notes)
2419 {
2420 if (count)
2421 {
2422 unsigned int i;
2423 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2424 bool add_to_table;
2425
2426 switch (ref_info->ref_order)
2427 {
2428 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2429 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2430 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2431 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2432 add_to_table = true;
2433 break;
2434 case DF_REF_ORDER_UNORDERED:
2435 case DF_REF_ORDER_BY_REG:
2436 case DF_REF_ORDER_BY_INSN:
2437 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2438 add_to_table = !is_notes;
2439 break;
2440 default:
2441 add_to_table = false;
2442 break;
2443 }
2444
2445 /* Do not add if ref is not in the right blocks. */
2446 if (add_to_table && df->analyze_subset)
2447 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2448
2449 for (i = 0; i < count; i++)
2450 {
2451 struct df_ref *this_ref = old_vec[i];
2452 new_vec[i] = this_ref;
2453 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2454 ref_info, add_to_table);
2455 }
2456
2457 new_vec[count] = NULL;
2458 return new_vec;
2459 }
2460 else
2461 return df_null_ref_rec;
2462 }
2463
2464
2465 /* This function takes the mws installs the entire group into the
2466 insn. */
2467
2468 static struct df_mw_hardreg **
2469 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2470 {
2471 if (count)
2472 {
2473 struct df_mw_hardreg **new_vec
2474 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2475 memcpy (new_vec, old_vec,
2476 sizeof (struct df_mw_hardreg*) * (count + 1));
2477 return new_vec;
2478 }
2479 else
2480 return df_null_mw_rec;
2481 }
2482
2483
2484 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2485 chains and update other necessary information. */
2486
2487 static void
2488 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2489 basic_block bb, rtx insn)
2490 {
2491 if (insn)
2492 {
2493 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2494 /* If there is a vector in the collection rec, add it to the
2495 insn. A null rec is a signal that the caller will handle the
2496 chain specially. */
2497 if (collection_rec->def_vec)
2498 {
2499 if (insn_rec->defs && *insn_rec->defs)
2500 free (insn_rec->defs);
2501 insn_rec->defs
2502 = df_install_refs (bb, collection_rec->def_vec,
2503 collection_rec->next_def,
2504 df->def_regs,
2505 &df->def_info, false);
2506 }
2507 if (collection_rec->use_vec)
2508 {
2509 if (insn_rec->uses && *insn_rec->uses)
2510 free (insn_rec->uses);
2511 insn_rec->uses
2512 = df_install_refs (bb, collection_rec->use_vec,
2513 collection_rec->next_use,
2514 df->use_regs,
2515 &df->use_info, false);
2516 }
2517 if (collection_rec->eq_use_vec)
2518 {
2519 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2520 free (insn_rec->eq_uses);
2521 insn_rec->eq_uses
2522 = df_install_refs (bb, collection_rec->eq_use_vec,
2523 collection_rec->next_eq_use,
2524 df->eq_use_regs,
2525 &df->use_info, true);
2526 }
2527 if (collection_rec->mw_vec)
2528 {
2529 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2530 free (insn_rec->mw_hardregs);
2531 insn_rec->mw_hardregs
2532 = df_install_mws (collection_rec->mw_vec,
2533 collection_rec->next_mw);
2534 }
2535 }
2536 else
2537 {
2538 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2539
2540 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2541 free (bb_info->artificial_defs);
2542 bb_info->artificial_defs
2543 = df_install_refs (bb, collection_rec->def_vec,
2544 collection_rec->next_def,
2545 df->def_regs,
2546 &df->def_info, false);
2547 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2548 free (bb_info->artificial_uses);
2549 bb_info->artificial_uses
2550 = df_install_refs (bb, collection_rec->use_vec,
2551 collection_rec->next_use,
2552 df->use_regs,
2553 &df->use_info, false);
2554 }
2555 }
2556
2557
2558 /* Allocate a ref and initialize its fields. */
2559
2560 static struct df_ref *
2561 df_ref_create_structure (struct df_collection_rec *collection_rec,
2562 rtx reg, rtx *loc,
2563 basic_block bb, rtx insn,
2564 enum df_ref_type ref_type,
2565 enum df_ref_flags ref_flags)
2566 {
2567 struct df_ref *this_ref;
2568 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2569 struct df_scan_problem_data *problem_data
2570 = (struct df_scan_problem_data *) df_scan->problem_data;
2571
2572 this_ref = pool_alloc (problem_data->ref_pool);
2573 DF_REF_ID (this_ref) = -1;
2574 DF_REF_REG (this_ref) = reg;
2575 DF_REF_REGNO (this_ref) = regno;
2576 DF_REF_LOC (this_ref) = loc;
2577 DF_REF_INSN (this_ref) = insn;
2578 DF_REF_CHAIN (this_ref) = NULL;
2579 DF_REF_TYPE (this_ref) = ref_type;
2580 DF_REF_FLAGS (this_ref) = ref_flags;
2581 DF_REF_BB (this_ref) = bb;
2582 DF_REF_NEXT_REG (this_ref) = NULL;
2583 DF_REF_PREV_REG (this_ref) = NULL;
2584 DF_REF_ORDER (this_ref) = df->ref_order++;
2585
2586 /* We need to clear this bit because fwprop, and in the future
2587 possibly other optimizations sometimes create new refs using ond
2588 refs as the model. */
2589 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2590
2591 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2592 if ((regno < FIRST_PSEUDO_REGISTER)
2593 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2594 {
2595 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2596 {
2597 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2598 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2599 }
2600 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2601 && (regno == FRAME_POINTER_REGNUM
2602 || regno == ARG_POINTER_REGNUM)))
2603 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2604 }
2605
2606 if (collection_rec)
2607 {
2608 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2609 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2610 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2611 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2612 else
2613 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2614 }
2615
2616 return this_ref;
2617 }
2618
2619
2620 /* Create new references of type DF_REF_TYPE for each part of register REG
2621 at address LOC within INSN of BB. */
2622
2623 static void
2624 df_ref_record (struct df_collection_rec *collection_rec,
2625 rtx reg, rtx *loc,
2626 basic_block bb, rtx insn,
2627 enum df_ref_type ref_type,
2628 enum df_ref_flags ref_flags)
2629 {
2630 rtx oldreg = reg;
2631 unsigned int regno;
2632
2633 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2634
2635 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2636 if (regno < FIRST_PSEUDO_REGISTER)
2637 {
2638 struct df_mw_hardreg *hardreg = NULL;
2639 struct df_scan_problem_data *problem_data
2640 = (struct df_scan_problem_data *) df_scan->problem_data;
2641 unsigned int i;
2642 unsigned int endregno;
2643 struct df_ref *ref;
2644
2645 if (GET_CODE (reg) == SUBREG)
2646 {
2647 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2648 SUBREG_BYTE (reg), GET_MODE (reg));
2649 endregno = regno + subreg_nregs (reg);
2650 }
2651 else
2652 endregno = END_HARD_REGNO (reg);
2653
2654 /* If this is a multiword hardreg, we create some extra
2655 datastructures that will enable us to easily build REG_DEAD
2656 and REG_UNUSED notes. */
2657 if ((endregno != regno + 1) && insn)
2658 {
2659 /* Sets to a subreg of a multiword register are partial.
2660 Sets to a non-subreg of a multiword register are not. */
2661 if (GET_CODE (oldreg) == SUBREG)
2662 ref_flags |= DF_REF_PARTIAL;
2663 ref_flags |= DF_REF_MW_HARDREG;
2664
2665 hardreg = pool_alloc (problem_data->mw_reg_pool);
2666 hardreg->type = ref_type;
2667 hardreg->flags = ref_flags;
2668 hardreg->mw_reg = reg;
2669 hardreg->loc = loc;
2670 hardreg->start_regno = regno;
2671 hardreg->end_regno = endregno - 1;
2672 hardreg->mw_order = df->ref_order++;
2673 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2674 }
2675
2676 for (i = regno; i < endregno; i++)
2677 {
2678 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2679 bb, insn, ref_type, ref_flags);
2680
2681 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2682 }
2683 }
2684 else
2685 {
2686 struct df_ref *ref;
2687 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2688 ref_type, ref_flags);
2689 }
2690 }
2691
2692
2693 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2694 covered by the outer mode is smaller than that covered by the inner mode,
2695 is a read-modify-write operation.
2696 This function returns true iff the SUBREG X is such a SUBREG. */
2697
2698 bool
2699 df_read_modify_subreg_p (rtx x)
2700 {
2701 unsigned int isize, osize;
2702 if (GET_CODE (x) != SUBREG)
2703 return false;
2704 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2705 osize = GET_MODE_SIZE (GET_MODE (x));
2706 return (isize > osize && isize > UNITS_PER_WORD);
2707 }
2708
2709
2710 /* Process all the registers defined in the rtx, X.
2711 Autoincrement/decrement definitions will be picked up by
2712 df_uses_record. */
2713
2714 static void
2715 df_def_record_1 (struct df_collection_rec *collection_rec,
2716 rtx x, basic_block bb, rtx insn,
2717 enum df_ref_flags flags)
2718 {
2719 rtx *loc;
2720 rtx dst;
2721 bool dst_in_strict_lowpart = false;
2722
2723 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2724 construct. */
2725 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2726 loc = &XEXP (x, 0);
2727 else
2728 loc = &SET_DEST (x);
2729 dst = *loc;
2730
2731 /* It is legal to have a set destination be a parallel. */
2732 if (GET_CODE (dst) == PARALLEL)
2733 {
2734 int i;
2735
2736 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2737 {
2738 rtx temp = XVECEXP (dst, 0, i);
2739 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2740 || GET_CODE (temp) == SET)
2741 df_def_record_1 (collection_rec,
2742 temp, bb, insn,
2743 GET_CODE (temp) == CLOBBER
2744 ? flags | DF_REF_MUST_CLOBBER : flags);
2745 }
2746 return;
2747 }
2748
2749 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
2750 be handy for the reg allocator. */
2751 while (GET_CODE (dst) == STRICT_LOW_PART
2752 || GET_CODE (dst) == ZERO_EXTRACT
2753 || df_read_modify_subreg_p (dst))
2754 {
2755 #if 0
2756 /* Strict low part always contains SUBREG, but we do not want to make
2757 it appear outside, as whole register is always considered. */
2758 if (GET_CODE (dst) == STRICT_LOW_PART)
2759 {
2760 loc = &XEXP (dst, 0);
2761 dst = *loc;
2762 }
2763 #endif
2764 loc = &XEXP (dst, 0);
2765 if (GET_CODE (dst) == STRICT_LOW_PART)
2766 dst_in_strict_lowpart = true;
2767 dst = *loc;
2768 flags |= DF_REF_READ_WRITE;
2769
2770 }
2771
2772 /* Sets to a subreg of a single word register are partial sets if
2773 they are wrapped in a strict lowpart, and not partial otherwise.
2774 */
2775 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
2776 && dst_in_strict_lowpart)
2777 flags |= DF_REF_PARTIAL;
2778
2779 if (REG_P (dst)
2780 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
2781 df_ref_record (collection_rec,
2782 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2783 }
2784
2785
2786 /* Process all the registers defined in the pattern rtx, X. */
2787
2788 static void
2789 df_defs_record (struct df_collection_rec *collection_rec,
2790 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2791 {
2792 RTX_CODE code = GET_CODE (x);
2793
2794 if (code == SET || code == CLOBBER)
2795 {
2796 /* Mark the single def within the pattern. */
2797 enum df_ref_flags clobber_flags = flags;
2798 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2799 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2800 }
2801 else if (code == COND_EXEC)
2802 {
2803 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2804 bb, insn, DF_REF_CONDITIONAL);
2805 }
2806 else if (code == PARALLEL)
2807 {
2808 int i;
2809
2810 /* Mark the multiple defs within the pattern. */
2811 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2812 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2813 }
2814 }
2815
2816
2817 /* Process all the registers used in the rtx at address LOC. */
2818
2819 static void
2820 df_uses_record (struct df_collection_rec *collection_rec,
2821 rtx *loc, enum df_ref_type ref_type,
2822 basic_block bb, rtx insn, enum df_ref_flags flags)
2823 {
2824 RTX_CODE code;
2825 rtx x;
2826
2827 retry:
2828 x = *loc;
2829 if (!x)
2830 return;
2831 code = GET_CODE (x);
2832 switch (code)
2833 {
2834 case LABEL_REF:
2835 case SYMBOL_REF:
2836 case CONST_INT:
2837 case CONST:
2838 case CONST_DOUBLE:
2839 case CONST_VECTOR:
2840 case PC:
2841 case CC0:
2842 case ADDR_VEC:
2843 case ADDR_DIFF_VEC:
2844 return;
2845
2846 case CLOBBER:
2847 /* If we are clobbering a MEM, mark any registers inside the address
2848 as being used. */
2849 if (MEM_P (XEXP (x, 0)))
2850 df_uses_record (collection_rec,
2851 &XEXP (XEXP (x, 0), 0),
2852 DF_REF_REG_MEM_STORE, bb, insn, flags);
2853
2854 /* If we're clobbering a REG then we have a def so ignore. */
2855 return;
2856
2857 case MEM:
2858 df_uses_record (collection_rec,
2859 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2860 bb, insn, flags & DF_REF_IN_NOTE);
2861 return;
2862
2863 case SUBREG:
2864 /* While we're here, optimize this case. */
2865 flags |= DF_REF_PARTIAL;
2866 /* In case the SUBREG is not of a REG, do not optimize. */
2867 if (!REG_P (SUBREG_REG (x)))
2868 {
2869 loc = &SUBREG_REG (x);
2870 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
2871 return;
2872 }
2873 /* ... Fall through ... */
2874
2875 case REG:
2876 df_ref_record (collection_rec,
2877 x, loc, bb, insn, ref_type, flags);
2878 return;
2879
2880 case SET:
2881 {
2882 rtx dst = SET_DEST (x);
2883 gcc_assert (!(flags & DF_REF_IN_NOTE));
2884 df_uses_record (collection_rec,
2885 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
2886
2887 switch (GET_CODE (dst))
2888 {
2889 case SUBREG:
2890 if (df_read_modify_subreg_p (dst))
2891 {
2892 df_uses_record (collection_rec, &SUBREG_REG (dst),
2893 DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
2894 break;
2895 }
2896 /* Fall through. */
2897 case REG:
2898 case PARALLEL:
2899 case SCRATCH:
2900 case PC:
2901 case CC0:
2902 break;
2903 case MEM:
2904 df_uses_record (collection_rec, &XEXP (dst, 0),
2905 DF_REF_REG_MEM_STORE, bb, insn, flags);
2906 break;
2907 case STRICT_LOW_PART:
2908 {
2909 rtx *temp = &XEXP (dst, 0);
2910 /* A strict_low_part uses the whole REG and not just the
2911 SUBREG. */
2912 dst = XEXP (dst, 0);
2913 df_uses_record (collection_rec,
2914 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2915 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2916 }
2917 break;
2918 case ZERO_EXTRACT:
2919 case SIGN_EXTRACT:
2920 df_uses_record (collection_rec, &XEXP (dst, 0),
2921 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2922 df_uses_record (collection_rec, &XEXP (dst, 1),
2923 DF_REF_REG_USE, bb, insn, flags);
2924 df_uses_record (collection_rec, &XEXP (dst, 2),
2925 DF_REF_REG_USE, bb, insn, flags);
2926 dst = XEXP (dst, 0);
2927 break;
2928 default:
2929 gcc_unreachable ();
2930 }
2931 return;
2932 }
2933
2934 case RETURN:
2935 break;
2936
2937 case ASM_OPERANDS:
2938 case UNSPEC_VOLATILE:
2939 case TRAP_IF:
2940 case ASM_INPUT:
2941 {
2942 /* Traditional and volatile asm instructions must be
2943 considered to use and clobber all hard registers, all
2944 pseudo-registers and all of memory. So must TRAP_IF and
2945 UNSPEC_VOLATILE operations.
2946
2947 Consider for instance a volatile asm that changes the fpu
2948 rounding mode. An insn should not be moved across this
2949 even if it only uses pseudo-regs because it might give an
2950 incorrectly rounded result.
2951
2952 However, flow.c's liveness computation did *not* do this,
2953 giving the reasoning as " ?!? Unfortunately, marking all
2954 hard registers as live causes massive problems for the
2955 register allocator and marking all pseudos as live creates
2956 mountains of uninitialized variable warnings."
2957
2958 In order to maintain the status quo with regard to liveness
2959 and uses, we do what flow.c did and just mark any regs we
2960 can find in ASM_OPERANDS as used. In global asm insns are
2961 scanned and regs_asm_clobbered is filled out.
2962
2963 For all ASM_OPERANDS, we must traverse the vector of input
2964 operands. We can not just fall through here since then we
2965 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2966 which do not indicate traditional asms unlike their normal
2967 usage. */
2968 if (code == ASM_OPERANDS)
2969 {
2970 int j;
2971
2972 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2973 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2974 DF_REF_REG_USE, bb, insn, flags);
2975 return;
2976 }
2977 break;
2978 }
2979
2980 case PRE_DEC:
2981 case POST_DEC:
2982 case PRE_INC:
2983 case POST_INC:
2984 case PRE_MODIFY:
2985 case POST_MODIFY:
2986 /* Catch the def of the register being modified. */
2987 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
2988 DF_REF_REG_DEF,
2989 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
2990
2991 /* ... Fall through to handle uses ... */
2992
2993 default:
2994 break;
2995 }
2996
2997 /* Recursively scan the operands of this expression. */
2998 {
2999 const char *fmt = GET_RTX_FORMAT (code);
3000 int i;
3001
3002 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3003 {
3004 if (fmt[i] == 'e')
3005 {
3006 /* Tail recursive case: save a function call level. */
3007 if (i == 0)
3008 {
3009 loc = &XEXP (x, 0);
3010 goto retry;
3011 }
3012 df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
3013 }
3014 else if (fmt[i] == 'E')
3015 {
3016 int j;
3017 for (j = 0; j < XVECLEN (x, i); j++)
3018 df_uses_record (collection_rec,
3019 &XVECEXP (x, i, j), ref_type, bb, insn, flags);
3020 }
3021 }
3022 }
3023
3024 return;
3025 }
3026
3027
3028 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3029
3030 static void
3031 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3032 {
3033 unsigned int i;
3034 for (i = 0; i < collection_rec->next_def; i++)
3035 {
3036 struct df_ref *ref = collection_rec->def_vec[i];
3037 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3038 {
3039 struct df_ref *use
3040 = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3041 DF_REF_LOC (ref), DF_REF_BB (ref),
3042 DF_REF_INSN (ref), DF_REF_REG_USE,
3043 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3044 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3045 }
3046 }
3047 }
3048
3049
3050 /* Get call's extra defs and uses. */
3051
3052 static void
3053 df_get_call_refs (struct df_collection_rec * collection_rec,
3054 basic_block bb,
3055 rtx insn,
3056 enum df_ref_flags flags)
3057 {
3058 rtx note;
3059 bitmap_iterator bi;
3060 unsigned int ui;
3061 bool is_sibling_call;
3062 unsigned int i;
3063 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3064
3065 /* Do not generate clobbers for registers that are the result of the
3066 call. This causes ordering problems in the chain building code
3067 depending on which def is seen first. */
3068 for (i=0; i<collection_rec->next_def; i++)
3069 {
3070 struct df_ref *def = collection_rec->def_vec[i];
3071 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3072 }
3073
3074 /* Record the registers used to pass arguments, and explicitly
3075 noted as clobbered. */
3076 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3077 note = XEXP (note, 1))
3078 {
3079 if (GET_CODE (XEXP (note, 0)) == USE)
3080 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3081 DF_REF_REG_USE, bb, insn, flags);
3082 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3083 {
3084 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3085 if (!bitmap_bit_p (defs_generated, regno))
3086 df_defs_record (collection_rec, XEXP (note, 0), bb, insn, flags);
3087 }
3088 }
3089
3090 /* The stack ptr is used (honorarily) by a CALL insn. */
3091 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3092 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
3093
3094 /* Calls may also reference any of the global registers,
3095 so they are recorded as used. */
3096 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3097 if (global_regs[i])
3098 df_ref_record (collection_rec, regno_reg_rtx[i],
3099 NULL, bb, insn, DF_REF_REG_USE, flags);
3100
3101 is_sibling_call = SIBLING_CALL_P (insn);
3102 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3103 {
3104 if ((!bitmap_bit_p (defs_generated, ui))
3105 && (!is_sibling_call
3106 || !bitmap_bit_p (df->exit_block_uses, ui)
3107 || refers_to_regno_p (ui, ui+1,
3108 current_function_return_rtx, NULL)))
3109
3110 df_ref_record (collection_rec, regno_reg_rtx[ui],
3111 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
3112 }
3113
3114 BITMAP_FREE (defs_generated);
3115 return;
3116 }
3117
3118 /* Collect all refs in the INSN. This function is free of any
3119 side-effect - it will create and return a lists of df_ref's in the
3120 COLLECTION_REC without putting those refs into existing ref chains
3121 and reg chains. */
3122
3123 static void
3124 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3125 basic_block bb, rtx insn)
3126 {
3127 rtx note;
3128 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3129
3130 /* Clear out the collection record. */
3131 collection_rec->next_def = 0;
3132 collection_rec->next_use = 0;
3133 collection_rec->next_eq_use = 0;
3134 collection_rec->next_mw = 0;
3135
3136 /* Record register defs. */
3137 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3138
3139 /* Process REG_EQUIV/REG_EQUAL notes */
3140 for (note = REG_NOTES (insn); note;
3141 note = XEXP (note, 1))
3142 {
3143 switch (REG_NOTE_KIND (note))
3144 {
3145 case REG_EQUIV:
3146 case REG_EQUAL:
3147 df_uses_record (collection_rec,
3148 &XEXP (note, 0), DF_REF_REG_USE,
3149 bb, insn, DF_REF_IN_NOTE);
3150 break;
3151 case REG_NON_LOCAL_GOTO:
3152 /* The frame ptr is used by a non-local goto. */
3153 df_ref_record (collection_rec,
3154 regno_reg_rtx[FRAME_POINTER_REGNUM],
3155 NULL,
3156 bb, insn,
3157 DF_REF_REG_USE, 0);
3158 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3159 df_ref_record (collection_rec,
3160 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3161 NULL,
3162 bb, insn,
3163 DF_REF_REG_USE, 0);
3164 #endif
3165 break;
3166 default:
3167 break;
3168 }
3169 }
3170
3171 if (CALL_P (insn))
3172 df_get_call_refs (collection_rec, bb, insn,
3173 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3174
3175 /* Record the register uses. */
3176 df_uses_record (collection_rec,
3177 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
3178
3179 /* DF_REF_CONDITIONAL needs corresponding USES. */
3180 if (is_cond_exec)
3181 df_get_conditional_uses (collection_rec);
3182
3183 df_canonize_collection_rec (collection_rec);
3184 }
3185
3186 /* Return true if any pred of BB is an eh. */
3187
3188 bool
3189 df_has_eh_preds (basic_block bb)
3190 {
3191 edge e;
3192 edge_iterator ei;
3193
3194 FOR_EACH_EDGE (e, ei, bb->preds)
3195 {
3196 if (e->flags & EDGE_EH)
3197 return true;
3198 }
3199 return false;
3200 }
3201
3202
3203 /* Recompute the luids for the insns in BB. */
3204
3205 void
3206 df_recompute_luids (basic_block bb)
3207 {
3208 rtx insn;
3209 int luid = 0;
3210
3211 df_grow_insn_info ();
3212
3213 /* Scan the block an insn at a time from beginning to end. */
3214 FOR_BB_INSNS (bb, insn)
3215 {
3216 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3217 /* Inserting labels does not always trigger the incremental
3218 rescanning. */
3219 if (!insn_info)
3220 {
3221 gcc_assert (!INSN_P (insn));
3222 df_insn_create_insn_record (insn);
3223 }
3224
3225 DF_INSN_LUID (insn) = luid;
3226 if (INSN_P (insn))
3227 luid++;
3228 }
3229 }
3230
3231
3232 /* Returns true if the function entry needs to
3233 define the static chain register. */
3234
3235 static bool
3236 df_need_static_chain_reg (struct function *fun)
3237 {
3238 tree fun_context = decl_function_context (fun->decl);
3239 return fun_context
3240 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3241 }
3242
3243
3244 /* Collect all artificial refs at the block level for BB and add them
3245 to COLLECTION_REC. */
3246
3247 static void
3248 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3249 {
3250 collection_rec->next_def = 0;
3251 collection_rec->next_use = 0;
3252 collection_rec->next_eq_use = 0;
3253 collection_rec->next_mw = 0;
3254
3255 if (bb->index == ENTRY_BLOCK)
3256 {
3257 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3258 return;
3259 }
3260 else if (bb->index == EXIT_BLOCK)
3261 {
3262 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3263 return;
3264 }
3265
3266 #ifdef EH_RETURN_DATA_REGNO
3267 if (df_has_eh_preds (bb))
3268 {
3269 unsigned int i;
3270 /* Mark the registers that will contain data for the handler. */
3271 for (i = 0; ; ++i)
3272 {
3273 unsigned regno = EH_RETURN_DATA_REGNO (i);
3274 if (regno == INVALID_REGNUM)
3275 break;
3276 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3277 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3278 }
3279 }
3280 #endif
3281
3282
3283 #ifdef EH_USES
3284 if (df_has_eh_preds (bb))
3285 {
3286 unsigned int i;
3287 /* This code is putting in an artificial ref for the use at the
3288 TOP of the block that receives the exception. It is too
3289 cumbersome to actually put the ref on the edge. We could
3290 either model this at the top of the receiver block or the
3291 bottom of the sender block.
3292
3293 The bottom of the sender block is problematic because not all
3294 out-edges of the a block are eh-edges. However, it is true
3295 that all edges into a block are either eh-edges or none of
3296 them are eh-edges. Thus, we can model this at the top of the
3297 eh-receiver for all of the edges at once. */
3298 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3299 if (EH_USES (i))
3300 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3301 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
3302 }
3303 #endif
3304
3305 /* Add the hard_frame_pointer if this block is the target of a
3306 non-local goto. */
3307 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3308 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3309 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3310
3311 /* Add the artificial uses. */
3312 if (bb->index >= NUM_FIXED_BLOCKS)
3313 {
3314 bitmap_iterator bi;
3315 unsigned int regno;
3316 bitmap au = df_has_eh_preds (bb)
3317 ? df->eh_block_artificial_uses
3318 : df->regular_block_artificial_uses;
3319
3320 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3321 {
3322 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3323 bb, NULL, DF_REF_REG_USE, 0);
3324 }
3325 }
3326
3327 df_canonize_collection_rec (collection_rec);
3328 }
3329
3330
3331 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3332
3333 void
3334 df_bb_refs_record (int bb_index, bool scan_insns)
3335 {
3336 basic_block bb = BASIC_BLOCK (bb_index);
3337 rtx insn;
3338 int luid = 0;
3339 struct df_scan_bb_info *bb_info;
3340 struct df_collection_rec collection_rec;
3341 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3342 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3343 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3344 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3345
3346 if (!df)
3347 return;
3348
3349 bb_info = df_scan_get_bb_info (bb_index);
3350
3351 /* Need to make sure that there is a record in the basic block info. */
3352 if (!bb_info)
3353 {
3354 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3355 df_scan_set_bb_info (bb_index, bb_info);
3356 bb_info->artificial_defs = NULL;
3357 bb_info->artificial_uses = NULL;
3358 }
3359
3360 if (scan_insns)
3361 /* Scan the block an insn at a time from beginning to end. */
3362 FOR_BB_INSNS (bb, insn)
3363 {
3364 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3365 gcc_assert (!insn_info);
3366
3367 df_insn_create_insn_record (insn);
3368 if (INSN_P (insn))
3369 {
3370 /* Record refs within INSN. */
3371 DF_INSN_LUID (insn) = luid++;
3372 df_insn_refs_collect (&collection_rec, bb, insn);
3373 df_refs_add_to_chains (&collection_rec, bb, insn);
3374 }
3375 DF_INSN_LUID (insn) = luid;
3376 }
3377
3378 /* Other block level artificial refs */
3379 df_bb_refs_collect (&collection_rec, bb);
3380 df_refs_add_to_chains (&collection_rec, bb, NULL);
3381
3382 /* Now that the block has been processed, set the block as dirty so
3383 lr and ur will get it processed. */
3384 df_set_bb_dirty (bb);
3385 }
3386
3387
3388 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3389 block. */
3390
3391 static void
3392 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3393 {
3394 bitmap_clear (regular_block_artificial_uses);
3395
3396 if (reload_completed)
3397 {
3398 if (frame_pointer_needed)
3399 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3400 }
3401 else
3402 /* Before reload, there are a few registers that must be forced
3403 live everywhere -- which might not already be the case for
3404 blocks within infinite loops. */
3405 {
3406 /* Any reference to any pseudo before reload is a potential
3407 reference of the frame pointer. */
3408 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3409
3410 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3411 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3412 #endif
3413
3414 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3415 /* Pseudos with argument area equivalences may require
3416 reloading via the argument pointer. */
3417 if (fixed_regs[ARG_POINTER_REGNUM])
3418 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3419 #endif
3420
3421 /* Any constant, or pseudo with constant equivalences, may
3422 require reloading from memory using the pic register. */
3423 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3424 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3425 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3426 }
3427 /* The all-important stack pointer must always be live. */
3428 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3429 }
3430
3431
3432 /* Get the artificial use set for an eh block. */
3433
3434 static void
3435 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3436 {
3437 bitmap_clear (eh_block_artificial_uses);
3438
3439 /* The following code (down thru the arg_pointer seting APPEARS
3440 to be necessary because there is nothing that actually
3441 describes what the exception handling code may actually need
3442 to keep alive. */
3443 if (reload_completed)
3444 {
3445 if (frame_pointer_needed)
3446 {
3447 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3448 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3449 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3450 #endif
3451 }
3452 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3453 if (fixed_regs[ARG_POINTER_REGNUM])
3454 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3455 #endif
3456 }
3457 }
3458
3459
3460 \f
3461 /*----------------------------------------------------------------------------
3462 Specialized hard register scanning functions.
3463 ----------------------------------------------------------------------------*/
3464
3465
3466 /* Mark a register in SET. Hard registers in large modes get all
3467 of their component registers set as well. */
3468
3469 static void
3470 df_mark_reg (rtx reg, void *vset)
3471 {
3472 bitmap set = (bitmap) vset;
3473 int regno = REGNO (reg);
3474
3475 gcc_assert (GET_MODE (reg) != BLKmode);
3476
3477 bitmap_set_bit (set, regno);
3478 if (regno < FIRST_PSEUDO_REGISTER)
3479 {
3480 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3481 while (--n > 0)
3482 bitmap_set_bit (set, regno + n);
3483 }
3484 }
3485
3486
3487
3488
3489 /* Set the bit for regs that are considered being defined at the entry. */
3490
3491 static void
3492 df_get_entry_block_def_set (bitmap entry_block_defs)
3493 {
3494 rtx r;
3495 int i;
3496
3497 bitmap_clear (entry_block_defs);
3498
3499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3500 {
3501 if (FUNCTION_ARG_REGNO_P (i))
3502 #ifdef INCOMING_REGNO
3503 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3504 #else
3505 bitmap_set_bit (entry_block_defs, i);
3506 #endif
3507 }
3508
3509 /* Once the prologue has been generated, all of these registers
3510 should just show up in the first regular block. */
3511 if (HAVE_prologue && epilogue_completed)
3512 {
3513 /* Defs for the callee saved registers are inserted so that the
3514 pushes have some defining location. */
3515 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3516 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3517 bitmap_set_bit (entry_block_defs, i);
3518 }
3519 else
3520 {
3521 /* The always important stack pointer. */
3522 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3523
3524 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3525 only STATIC_CHAIN_REGNUM is defined. If they are different,
3526 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3527 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3528 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3529 #else
3530 #ifdef STATIC_CHAIN_REGNUM
3531 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3532 #endif
3533 #endif
3534
3535 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3536 if (r && REG_P (r))
3537 bitmap_set_bit (entry_block_defs, REGNO (r));
3538 }
3539
3540 if ((!reload_completed) || frame_pointer_needed)
3541 {
3542 /* Any reference to any pseudo before reload is a potential
3543 reference of the frame pointer. */
3544 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3545 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3546 /* If they are different, also mark the hard frame pointer as live. */
3547 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3548 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3549 #endif
3550 }
3551
3552 /* These registers are live everywhere. */
3553 if (!reload_completed)
3554 {
3555 #ifdef EH_USES
3556 /* The ia-64, the only machine that uses this, does not define these
3557 until after reload. */
3558 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3559 if (EH_USES (i))
3560 {
3561 bitmap_set_bit (entry_block_defs, i);
3562 }
3563 #endif
3564
3565 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3566 /* Pseudos with argument area equivalences may require
3567 reloading via the argument pointer. */
3568 if (fixed_regs[ARG_POINTER_REGNUM])
3569 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3570 #endif
3571
3572 #ifdef PIC_OFFSET_TABLE_REGNUM
3573 /* Any constant, or pseudo with constant equivalences, may
3574 require reloading from memory using the pic register. */
3575 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3576 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3577 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3578 #endif
3579 }
3580
3581 #ifdef INCOMING_RETURN_ADDR_RTX
3582 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3583 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3584 #endif
3585
3586 targetm.live_on_entry (entry_block_defs);
3587
3588 /* If the function has an incoming STATIC_CHAIN,
3589 it has to show up in the entry def set. */
3590 if (df_need_static_chain_reg (cfun))
3591 {
3592 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3593 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3594 #else
3595 #ifdef STATIC_CHAIN_REGNUM
3596 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3597 #endif
3598 #endif
3599 }
3600 }
3601
3602
3603 /* Return the (conservative) set of hard registers that are defined on
3604 entry to the function.
3605 It uses df->entry_block_defs to determine which register
3606 reference to include. */
3607
3608 static void
3609 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3610 bitmap entry_block_defs)
3611 {
3612 unsigned int i;
3613 bitmap_iterator bi;
3614
3615 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3616 {
3617 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3618 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3619 }
3620
3621 df_canonize_collection_rec (collection_rec);
3622 }
3623
3624
3625 /* Record the (conservative) set of hard registers that are defined on
3626 entry to the function. */
3627
3628 static void
3629 df_record_entry_block_defs (bitmap entry_block_defs)
3630 {
3631 struct df_collection_rec collection_rec;
3632 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3633 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3634
3635 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3636
3637 /* Process bb_refs chain */
3638 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3639 }
3640
3641
3642 /* Update the defs in the entry bolck. */
3643
3644 void
3645 df_update_entry_block_defs (void)
3646 {
3647 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3648 bool changed = false;
3649
3650 df_get_entry_block_def_set (refs);
3651 if (df->entry_block_defs)
3652 {
3653 if (!bitmap_equal_p (df->entry_block_defs, refs))
3654 {
3655 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3656 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3657 df_ref_chain_delete (bb_info->artificial_defs);
3658 bb_info->artificial_defs = NULL;
3659 changed = true;
3660 }
3661 }
3662 else
3663 {
3664 struct df_scan_problem_data *problem_data
3665 = (struct df_scan_problem_data *) df_scan->problem_data;
3666 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3667 changed = true;
3668 }
3669
3670 if (changed)
3671 {
3672 df_record_entry_block_defs (refs);
3673 bitmap_copy (df->entry_block_defs, refs);
3674 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3675 }
3676 BITMAP_FREE (refs);
3677 }
3678
3679
3680 /* Set the bit for regs that are considered being used at the exit. */
3681
3682 static void
3683 df_get_exit_block_use_set (bitmap exit_block_uses)
3684 {
3685 unsigned int i;
3686
3687 bitmap_clear (exit_block_uses);
3688
3689 /* Stack pointer is always live at the exit. */
3690 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3691
3692 /* Mark the frame pointer if needed at the end of the function.
3693 If we end up eliminating it, it will be removed from the live
3694 list of each basic block by reload. */
3695
3696 if ((!reload_completed) || frame_pointer_needed)
3697 {
3698 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3699 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3700 /* If they are different, also mark the hard frame pointer as live. */
3701 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3702 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3703 #endif
3704 }
3705
3706 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3707 /* Many architectures have a GP register even without flag_pic.
3708 Assume the pic register is not in use, or will be handled by
3709 other means, if it is not fixed. */
3710 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3711 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3712 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3713 #endif
3714
3715 /* Mark all global registers, and all registers used by the
3716 epilogue as being live at the end of the function since they
3717 may be referenced by our caller. */
3718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3719 if (global_regs[i] || EPILOGUE_USES (i))
3720 bitmap_set_bit (exit_block_uses, i);
3721
3722 if (HAVE_epilogue && epilogue_completed)
3723 {
3724 /* Mark all call-saved registers that we actually used. */
3725 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3726 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3727 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3728 bitmap_set_bit (exit_block_uses, i);
3729 }
3730
3731 #ifdef EH_RETURN_DATA_REGNO
3732 /* Mark the registers that will contain data for the handler. */
3733 if (reload_completed && current_function_calls_eh_return)
3734 for (i = 0; ; ++i)
3735 {
3736 unsigned regno = EH_RETURN_DATA_REGNO (i);
3737 if (regno == INVALID_REGNUM)
3738 break;
3739 bitmap_set_bit (exit_block_uses, regno);
3740 }
3741 #endif
3742
3743 #ifdef EH_RETURN_STACKADJ_RTX
3744 if ((!HAVE_epilogue || ! epilogue_completed)
3745 && current_function_calls_eh_return)
3746 {
3747 rtx tmp = EH_RETURN_STACKADJ_RTX;
3748 if (tmp && REG_P (tmp))
3749 df_mark_reg (tmp, exit_block_uses);
3750 }
3751 #endif
3752
3753 #ifdef EH_RETURN_HANDLER_RTX
3754 if ((!HAVE_epilogue || ! epilogue_completed)
3755 && current_function_calls_eh_return)
3756 {
3757 rtx tmp = EH_RETURN_HANDLER_RTX;
3758 if (tmp && REG_P (tmp))
3759 df_mark_reg (tmp, exit_block_uses);
3760 }
3761 #endif
3762
3763 /* Mark function return value. */
3764 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3765 }
3766
3767
3768 /* Return the refs of hard registers that are used in the exit block.
3769 It uses df->exit_block_uses to determine register to include. */
3770
3771 static void
3772 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3773 {
3774 unsigned int i;
3775 bitmap_iterator bi;
3776
3777 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3778 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3779 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3780
3781 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3782 /* It is deliberate that this is not put in the exit block uses but
3783 I do not know why. */
3784 if (reload_completed
3785 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3786 && df_has_eh_preds (EXIT_BLOCK_PTR)
3787 && fixed_regs[ARG_POINTER_REGNUM])
3788 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3789 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3790 #endif
3791
3792 df_canonize_collection_rec (collection_rec);
3793 }
3794
3795
3796 /* Record the set of hard registers that are used in the exit block.
3797 It uses df->exit_block_uses to determine which bit to include. */
3798
3799 static void
3800 df_record_exit_block_uses (bitmap exit_block_uses)
3801 {
3802 struct df_collection_rec collection_rec;
3803 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3804 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3805
3806 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3807
3808 /* Process bb_refs chain */
3809 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3810 }
3811
3812
3813 /* Update the uses in the exit block. */
3814
3815 void
3816 df_update_exit_block_uses (void)
3817 {
3818 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3819 bool changed = false;
3820
3821 df_get_exit_block_use_set (refs);
3822 if (df->exit_block_uses)
3823 {
3824 if (!bitmap_equal_p (df->exit_block_uses, refs))
3825 {
3826 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3827 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3828 df_ref_chain_delete (bb_info->artificial_uses);
3829 bb_info->artificial_uses = NULL;
3830 changed = true;
3831 }
3832 }
3833 else
3834 {
3835 struct df_scan_problem_data *problem_data
3836 = (struct df_scan_problem_data *) df_scan->problem_data;
3837 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3838 changed = true;
3839 }
3840
3841 if (changed)
3842 {
3843 df_record_exit_block_uses (refs);
3844 bitmap_copy (df->exit_block_uses, refs);
3845 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3846 }
3847 BITMAP_FREE (refs);
3848 }
3849
3850 static bool initialized = false;
3851
3852
3853 /* Initialize some platform specific structures. */
3854
3855 void
3856 df_hard_reg_init (void)
3857 {
3858 int i;
3859 #ifdef ELIMINABLE_REGS
3860 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3861 #endif
3862 if (initialized)
3863 return;
3864
3865 bitmap_obstack_initialize (&persistent_obstack);
3866
3867 /* Record which registers will be eliminated. We use this in
3868 mark_used_regs. */
3869 CLEAR_HARD_REG_SET (elim_reg_set);
3870
3871 #ifdef ELIMINABLE_REGS
3872 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3873 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3874 #else
3875 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3876 #endif
3877
3878 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3879
3880 /* Inconveniently, this is only readily available in hard reg set
3881 form. */
3882 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3883 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3884 bitmap_set_bit (df_invalidated_by_call, i);
3885
3886 initialized = true;
3887 }
3888
3889
3890 /* Recompute the parts of scanning that are based on regs_ever_live
3891 because something changed in that array. */
3892
3893 void
3894 df_update_entry_exit_and_calls (void)
3895 {
3896 basic_block bb;
3897
3898 df_update_entry_block_defs ();
3899 df_update_exit_block_uses ();
3900
3901 /* The call insns need to be rescanned because there may be changes
3902 in the set of registers clobbered across the call. */
3903 FOR_EACH_BB (bb)
3904 {
3905 rtx insn;
3906 FOR_BB_INSNS (bb, insn)
3907 {
3908 if (INSN_P (insn) && CALL_P (insn))
3909 df_insn_rescan (insn);
3910 }
3911 }
3912 }
3913
3914
3915 /* Return true if hard REG is actually used in the some instruction.
3916 There are a fair number of conditions that affect the setting of
3917 this array. See the comment in df.h for df->hard_regs_live_count
3918 for the conditions that this array is set. */
3919
3920 bool
3921 df_hard_reg_used_p (unsigned int reg)
3922 {
3923 gcc_assert (df);
3924 return df->hard_regs_live_count[reg] != 0;
3925 }
3926
3927
3928 /* A count of the number of times REG is actually used in the some
3929 instruction. There are a fair number of conditions that affect the
3930 setting of this array. See the comment in df.h for
3931 df->hard_regs_live_count for the conditions that this array is
3932 set. */
3933
3934
3935 unsigned int
3936 df_hard_reg_used_count (unsigned int reg)
3937 {
3938 gcc_assert (df);
3939 return df->hard_regs_live_count[reg];
3940 }
3941
3942
3943 /* Get the value of regs_ever_live[REGNO]. */
3944
3945 bool
3946 df_regs_ever_live_p (unsigned int regno)
3947 {
3948 return regs_ever_live[regno];
3949 }
3950
3951
3952 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3953 to change, schedule that change for the next update. */
3954
3955 void
3956 df_set_regs_ever_live (unsigned int regno, bool value)
3957 {
3958 if (regs_ever_live[regno] == value)
3959 return;
3960
3961 regs_ever_live[regno] = value;
3962 if (df)
3963 df->redo_entry_and_exit = true;
3964 }
3965
3966
3967 /* Compute "regs_ever_live" information from the underlying df
3968 information. Set the vector to all false if RESET. */
3969
3970 void
3971 df_compute_regs_ever_live (bool reset)
3972 {
3973 unsigned int i;
3974 bool changed = df->redo_entry_and_exit;
3975
3976 if (reset)
3977 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3978
3979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3980 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3981 {
3982 regs_ever_live[i] = true;
3983 changed = true;
3984 }
3985 if (changed)
3986 df_update_entry_exit_and_calls ();
3987 df->redo_entry_and_exit = false;
3988 }
3989
3990 \f
3991 /*----------------------------------------------------------------------------
3992 Dataflow ref information verification functions.
3993
3994 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3995 df_reg_chain_verify_unmarked (refs)
3996 df_refs_verify (ref*, ref*, bool)
3997 df_mws_verify (mw*, mw*, bool)
3998 df_insn_refs_verify (collection_rec, bb, insn, bool)
3999 df_bb_refs_verify (bb, refs, bool)
4000 df_bb_verify (bb)
4001 df_exit_block_bitmap_verify (bool)
4002 df_entry_block_bitmap_verify (bool)
4003 df_scan_verify ()
4004 ----------------------------------------------------------------------------*/
4005
4006
4007 /* Mark all refs in the reg chain. Verify that all of the registers
4008 are in the correct chain. */
4009
4010 static unsigned int
4011 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4012 bool is_def, bool is_eq_use)
4013 {
4014 unsigned int count = 0;
4015 struct df_ref *ref;
4016 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4017 {
4018 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4019
4020 /* If there are no def-use or use-def chains, make sure that all
4021 of the chains are clear. */
4022 if (!df_chain)
4023 gcc_assert (!DF_REF_CHAIN (ref));
4024
4025 /* Check to make sure the ref is in the correct chain. */
4026 gcc_assert (DF_REF_REGNO (ref) == regno);
4027 if (is_def)
4028 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4029 else
4030 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4031
4032 if (is_eq_use)
4033 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4034 else
4035 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4036
4037 if (ref->next_reg)
4038 gcc_assert (ref->next_reg->prev_reg == ref);
4039 count++;
4040 DF_REF_REG_MARK (ref);
4041 }
4042 return count;
4043 }
4044
4045
4046 /* Verify that all of the registers in the chain are unmarked. */
4047
4048 static void
4049 df_reg_chain_verify_unmarked (struct df_ref *refs)
4050 {
4051 struct df_ref *ref;
4052 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4053 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4054 }
4055
4056
4057 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4058
4059 static bool
4060 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4061 bool abort_if_fail)
4062 {
4063 while ((*new_rec) && (*old_rec))
4064 {
4065 if (!df_ref_equal_p (*new_rec, *old_rec))
4066 {
4067 if (abort_if_fail)
4068 gcc_assert (0);
4069 else
4070 return false;
4071 }
4072
4073 /* Abort if fail is called from the function level verifier. If
4074 that is the context, mark this reg as being seem. */
4075 if (abort_if_fail)
4076 {
4077 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4078 DF_REF_REG_UNMARK (*old_rec);
4079 }
4080
4081 new_rec++;
4082 old_rec++;
4083 }
4084
4085 if (abort_if_fail)
4086 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4087 else
4088 return ((*new_rec == NULL) && (*old_rec == NULL));
4089 return false;
4090 }
4091
4092
4093 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4094
4095 static bool
4096 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4097 bool abort_if_fail)
4098 {
4099 while ((*new_rec) && (*old_rec))
4100 {
4101 if (!df_mw_equal_p (*new_rec, *old_rec))
4102 {
4103 if (abort_if_fail)
4104 gcc_assert (0);
4105 else
4106 return false;
4107 }
4108 new_rec++;
4109 old_rec++;
4110 }
4111
4112 if (abort_if_fail)
4113 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4114 else
4115 return ((*new_rec == NULL) && (*old_rec == NULL));
4116 return false;
4117 }
4118
4119
4120 /* Return true if the existing insn refs information is complete and
4121 correct. Otherwise (i.e. if there's any missing or extra refs),
4122 return the correct df_ref chain in REFS_RETURN.
4123
4124 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4125 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4126 verification mode instead of the whole function, so unmark
4127 everything.
4128
4129 If ABORT_IF_FAIL is set, this function never returns false. */
4130
4131 static bool
4132 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4133 basic_block bb,
4134 rtx insn,
4135 bool abort_if_fail)
4136 {
4137 bool ret1, ret2, ret3, ret4;
4138 unsigned int uid = INSN_UID (insn);
4139
4140 df_insn_refs_collect (collection_rec, bb, insn);
4141
4142 if (!DF_INSN_UID_DEFS (uid))
4143 {
4144 /* The insn_rec was created but it was never filled out. */
4145 if (abort_if_fail)
4146 gcc_assert (0);
4147 else
4148 return false;
4149 }
4150
4151 /* Unfortunately we cannot opt out early if one of these is not
4152 right because the marks will not get cleared. */
4153 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4154 abort_if_fail);
4155 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4156 abort_if_fail);
4157 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4158 abort_if_fail);
4159 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4160 abort_if_fail);
4161 return (ret1 && ret2 && ret3 && ret4);
4162 }
4163
4164
4165 /* Return true if all refs in the basic block are correct and complete.
4166 Due to df_ref_chain_verify, it will cause all refs
4167 that are verified to have DF_REF_MARK bit set. */
4168
4169 static bool
4170 df_bb_verify (basic_block bb)
4171 {
4172 rtx insn;
4173 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4174 struct df_collection_rec collection_rec;
4175
4176 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4177 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4178 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4179 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4180 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4181
4182 gcc_assert (bb_info);
4183
4184 /* Scan the block an insn at a time from beginning to end. */
4185 FOR_BB_INSNS_REVERSE (bb, insn)
4186 {
4187 if (!INSN_P (insn))
4188 continue;
4189 df_insn_refs_verify (&collection_rec, bb, insn, true);
4190 df_free_collection_rec (&collection_rec);
4191 }
4192
4193 /* Do the artificial defs and uses. */
4194 df_bb_refs_collect (&collection_rec, bb);
4195 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4196 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4197 df_free_collection_rec (&collection_rec);
4198
4199 return true;
4200 }
4201
4202
4203 /* Returns true if the entry block has correct and complete df_ref set.
4204 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4205
4206 static bool
4207 df_entry_block_bitmap_verify (bool abort_if_fail)
4208 {
4209 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4210 bool is_eq;
4211
4212 df_get_entry_block_def_set (entry_block_defs);
4213
4214 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4215
4216 if (!is_eq && abort_if_fail)
4217 {
4218 print_current_pass (stderr);
4219 fprintf (stderr, "entry_block_defs = ");
4220 df_print_regset (stderr, entry_block_defs);
4221 fprintf (stderr, "df->entry_block_defs = ");
4222 df_print_regset (stderr, df->entry_block_defs);
4223 gcc_assert (0);
4224 }
4225
4226 BITMAP_FREE (entry_block_defs);
4227
4228 return is_eq;
4229 }
4230
4231
4232 /* Returns true if the exit block has correct and complete df_ref set.
4233 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4234
4235 static bool
4236 df_exit_block_bitmap_verify (bool abort_if_fail)
4237 {
4238 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4239 bool is_eq;
4240
4241 df_get_exit_block_use_set (exit_block_uses);
4242
4243 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4244
4245 if (!is_eq && abort_if_fail)
4246 {
4247 print_current_pass (stderr);
4248 fprintf (stderr, "exit_block_uses = ");
4249 df_print_regset (stderr, exit_block_uses);
4250 fprintf (stderr, "df->exit_block_uses = ");
4251 df_print_regset (stderr, df->exit_block_uses);
4252 gcc_assert (0);
4253 }
4254
4255 BITMAP_FREE (exit_block_uses);
4256
4257 return is_eq;
4258 }
4259
4260
4261 /* Return true if df_ref information for all insns in all BLOCKS are
4262 correct and complete. If BLOCKS is null, all blocks are
4263 checked. */
4264
4265 void
4266 df_scan_verify (void)
4267 {
4268 unsigned int i;
4269 basic_block bb;
4270 bitmap regular_block_artificial_uses;
4271 bitmap eh_block_artificial_uses;
4272
4273 if (!df)
4274 return;
4275
4276 /* This is a hack, but a necessary one. If you do not do this,
4277 insn_attrtab can never be compiled in a bootstrap. This
4278 verification is just too expensive. */
4279 if (n_basic_blocks > 250)
4280 return;
4281
4282 /* Verification is a 4 step process. */
4283
4284 /* (1) All of the refs are marked by going thru the reg chains. */
4285 for (i = 0; i < DF_REG_SIZE (df); i++)
4286 {
4287 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4288 == DF_REG_DEF_COUNT(i));
4289 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4290 == DF_REG_USE_COUNT(i));
4291 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4292 == DF_REG_EQ_USE_COUNT(i));
4293 }
4294
4295 /* (2) There are various bitmaps whose value may change over the
4296 course of the compilation. This step recomputes them to make
4297 sure that they have not slipped out of date. */
4298 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4299 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4300
4301 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4302 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4303
4304 bitmap_ior_into (eh_block_artificial_uses,
4305 regular_block_artificial_uses);
4306
4307 /* Check artificial_uses bitmaps didn't change. */
4308 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4309 df->regular_block_artificial_uses));
4310 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4311 df->eh_block_artificial_uses));
4312
4313 BITMAP_FREE (regular_block_artificial_uses);
4314 BITMAP_FREE (eh_block_artificial_uses);
4315
4316 /* Verify entry block and exit block. These only verify the bitmaps,
4317 the refs are verified in df_bb_verify. */
4318 df_entry_block_bitmap_verify (true);
4319 df_exit_block_bitmap_verify (true);
4320
4321 /* (3) All of the insns in all of the blocks are traversed and the
4322 marks are cleared both in the artificial refs attached to the
4323 blocks and the real refs inside the insns. It is a failure to
4324 clear a mark that has not been set as this means that the ref in
4325 the block or insn was not in the reg chain. */
4326
4327 FOR_ALL_BB (bb)
4328 df_bb_verify (bb);
4329
4330 /* (4) See if all reg chains are traversed a second time. This time
4331 a check is made that the marks are clear. A set mark would be a
4332 from a reg that is not in any insn or basic block. */
4333
4334 for (i = 0; i < DF_REG_SIZE (df); i++)
4335 {
4336 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4337 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4338 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4339 }
4340 }