df-scan.c (df_read_modify_subreg_p): Use REGMODE_NATURAL_SIZE.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 02110-1301, USA.
25 */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tm_p.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "regs.h"
37 #include "output.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
42 #include "sbitmap.h"
43 #include "bitmap.h"
44 #include "timevar.h"
45 #include "tree.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "df.h"
49 #include "tree-pass.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* The bitmap_obstack is used to hold some static variables that
66 should not be reset after each function is compiled. */
67
68 static bitmap_obstack persistent_obstack;
69
70 /* The set of hard registers in eliminables[i].from. */
71
72 static HARD_REG_SET elim_reg_set;
73
74 /* This is a bitmap copy of regs_invalidated_by_call so that we can
75 easily add it into bitmaps, etc. */
76
77 bitmap df_invalidated_by_call = NULL;
78
79 /* Initialize ur_in and ur_out as if all hard registers were partially
80 available. */
81
82 struct df_collection_rec
83 {
84 struct df_ref ** def_vec;
85 unsigned int next_def;
86 struct df_ref ** use_vec;
87 unsigned int next_use;
88 struct df_ref ** eq_use_vec;
89 unsigned int next_eq_use;
90 struct df_mw_hardreg **mw_vec;
91 unsigned int next_mw;
92 };
93
94 static struct df_ref * df_null_ref_rec[1];
95 static struct df_mw_hardreg * df_null_mw_rec[1];
96
97 static void df_ref_record (struct df_collection_rec *,
98 rtx, rtx *,
99 basic_block, rtx, enum df_ref_type,
100 enum df_ref_flags);
101 static void df_def_record_1 (struct df_collection_rec *,
102 rtx, basic_block, rtx,
103 enum df_ref_flags);
104 static void df_defs_record (struct df_collection_rec *,
105 rtx, basic_block, rtx,
106 enum df_ref_flags);
107 static void df_uses_record (struct df_collection_rec *,
108 rtx *, enum df_ref_type,
109 basic_block, rtx, enum df_ref_flags);
110
111 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
112 basic_block, rtx, enum df_ref_type,
113 enum df_ref_flags);
114
115 static void df_insn_refs_collect (struct df_collection_rec*,
116 basic_block, rtx);
117 static void df_canonize_collection_rec (struct df_collection_rec *);
118
119 static void df_get_regular_block_artificial_uses (bitmap);
120 static void df_get_eh_block_artificial_uses (bitmap);
121
122 static void df_record_entry_block_defs (bitmap);
123 static void df_record_exit_block_uses (bitmap);
124 static void df_get_exit_block_use_set (bitmap);
125 static void df_get_entry_block_def_set (bitmap);
126 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
127 static void df_ref_chain_delete_du_chain (struct df_ref **);
128 static void df_ref_chain_delete (struct df_ref **);
129
130 static void df_refs_add_to_chains (struct df_collection_rec *,
131 basic_block, rtx);
132
133 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
134 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
135 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
136 static void df_install_ref (struct df_ref *, struct df_reg_info *,
137 struct df_ref_info *, bool);
138
139 static int df_ref_compare (const void *, const void *);
140 static int df_mw_compare (const void *, const void *);
141
142 /* Indexed by hardware reg number, is true if that register is ever
143 used in the current function.
144
145 In df-scan.c, this is set up to record the hard regs used
146 explicitly. Reload adds in the hard regs used for holding pseudo
147 regs. Final uses it to generate the code in the function prologue
148 and epilogue to save and restore registers as needed. */
149
150 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
151 \f
152 /*----------------------------------------------------------------------------
153 SCANNING DATAFLOW PROBLEM
154
155 There are several ways in which scanning looks just like the other
156 dataflow problems. It shares the all the mechanisms for local info
157 as well as basic block info. Where it differs is when and how often
158 it gets run. It also has no need for the iterative solver.
159 ----------------------------------------------------------------------------*/
160
161 /* Problem data for the scanning dataflow function. */
162 struct df_scan_problem_data
163 {
164 alloc_pool ref_pool;
165 alloc_pool insn_pool;
166 alloc_pool reg_pool;
167 alloc_pool mw_reg_pool;
168 alloc_pool mw_link_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
171 };
172
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
174
175 static void
176 df_scan_free_internal (void)
177 {
178 struct df_scan_problem_data *problem_data
179 = (struct df_scan_problem_data *) df_scan->problem_data;
180
181 free (df->def_info.refs);
182 free (df->def_info.begin);
183 free (df->def_info.count);
184 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
185
186 free (df->use_info.refs);
187 free (df->use_info.begin);
188 free (df->use_info.count);
189 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
190
191 free (df->def_regs);
192 df->def_regs = NULL;
193 free (df->use_regs);
194 df->use_regs = NULL;
195 free (df->eq_use_regs);
196 df->eq_use_regs = NULL;
197 df->regs_size = 0;
198 DF_REG_SIZE(df) = 0;
199
200 free (df->insns);
201 df->insns = NULL;
202 DF_INSN_SIZE () = 0;
203
204 free (df_scan->block_info);
205 df_scan->block_info = NULL;
206 df_scan->block_info_size = 0;
207
208 BITMAP_FREE (df->hardware_regs_used);
209 BITMAP_FREE (df->regular_block_artificial_uses);
210 BITMAP_FREE (df->eh_block_artificial_uses);
211 BITMAP_FREE (df->entry_block_defs);
212 BITMAP_FREE (df->exit_block_uses);
213 BITMAP_FREE (df->insns_to_delete);
214 BITMAP_FREE (df->insns_to_rescan);
215 BITMAP_FREE (df->insns_to_notes_rescan);
216
217 free_alloc_pool (df_scan->block_pool);
218 free_alloc_pool (problem_data->ref_pool);
219 free_alloc_pool (problem_data->insn_pool);
220 free_alloc_pool (problem_data->reg_pool);
221 free_alloc_pool (problem_data->mw_reg_pool);
222 free_alloc_pool (problem_data->mw_link_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
226 }
227
228
229 /* Set basic block info. */
230
231 static void
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
234 {
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
238 }
239
240
241 /* Free basic block info. */
242
243 static void
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
245 {
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
248 if (bb_info)
249 {
250 rtx insn;
251 FOR_BB_INSNS (bb, insn)
252 {
253 if (INSN_P (insn))
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
256 }
257
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
260
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
269 }
270 }
271
272
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
275 be rescanned. */
276 void
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
278 {
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
282 basic_block bb;
283
284 /* Given the number of pools, this is really faster than tearing
285 everything apart. */
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
288
289 df_scan->block_pool
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
292 block_size);
293
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
297
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->insn_pool
302 = create_alloc_pool ("df_scan_insn pool",
303 sizeof (struct df_insn_info), block_size);
304 problem_data->reg_pool
305 = create_alloc_pool ("df_scan_reg pool",
306 sizeof (struct df_reg_info), block_size);
307 problem_data->mw_reg_pool
308 = create_alloc_pool ("df_scan_mw_reg pool",
309 sizeof (struct df_mw_hardreg), block_size);
310 problem_data->mw_link_pool
311 = create_alloc_pool ("df_scan_mw_link pool",
312 sizeof (struct df_link), block_size);
313
314 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
315 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
316
317 insn_num += insn_num / 4;
318 df_grow_reg_info ();
319
320 df_grow_insn_info ();
321 df_grow_bb_info (df_scan);
322
323 FOR_ALL_BB (bb)
324 {
325 unsigned int bb_index = bb->index;
326 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
327 if (!bb_info)
328 {
329 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
330 df_scan_set_bb_info (bb_index, bb_info);
331 }
332 bb_info->artificial_defs = NULL;
333 bb_info->artificial_uses = NULL;
334 }
335
336 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
343 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 df_scan->optional_p = false;
345 }
346
347
348 /* Free all of the data associated with the scan problem. */
349
350 static void
351 df_scan_free (void)
352 {
353 if (df_scan->problem_data)
354 df_scan_free_internal ();
355
356 if (df->blocks_to_analyze)
357 {
358 BITMAP_FREE (df->blocks_to_analyze);
359 df->blocks_to_analyze = NULL;
360 }
361
362 free (df_scan);
363 }
364
365 /* Dump the preamble for DF_SCAN dump. */
366 static void
367 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
368 {
369 int i;
370
371 fprintf (file, ";; invalidated by call \t");
372 df_print_regset (file, df_invalidated_by_call);
373 fprintf (file, ";; hardware regs used \t");
374 df_print_regset (file, df->hardware_regs_used);
375 fprintf (file, ";; regular block artificial uses \t");
376 df_print_regset (file, df->regular_block_artificial_uses);
377 fprintf (file, ";; eh block artificial uses \t");
378 df_print_regset (file, df->eh_block_artificial_uses);
379 fprintf (file, ";; entry block defs \t");
380 df_print_regset (file, df->entry_block_defs);
381 fprintf (file, ";; exit block uses \t");
382 df_print_regset (file, df->exit_block_uses);
383 fprintf (file, ";; regs ever live \t");
384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
385 if (df_regs_ever_live_p (i))
386 fprintf (file, " %d[%s]", i, reg_names[i]);
387
388 fprintf (file, "\n");
389 }
390
391 /* Dump the bb_info for a given basic block. */
392 static void
393 df_scan_start_block (basic_block bb, FILE *file)
394 {
395 struct df_scan_bb_info *bb_info
396 = df_scan_get_bb_info (bb->index);
397
398 if (bb_info)
399 {
400 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
401 df_refs_chain_dump (bb_info->artificial_defs, true, file);
402 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
403 df_refs_chain_dump (bb_info->artificial_uses, true, file);
404 fprintf (file, "\n");
405 }
406 #if 0
407 {
408 rtx insn;
409 FOR_BB_INSNS (bb, insn)
410 if (INSN_P (insn))
411 df_insn_debug (insn, false, file);
412 }
413 #endif
414 }
415
416 static struct df_problem problem_SCAN =
417 {
418 DF_SCAN, /* Problem id. */
419 DF_NONE, /* Direction. */
420 df_scan_alloc, /* Allocate the problem specific data. */
421 NULL, /* Reset global information. */
422 df_scan_free_bb_info, /* Free basic block info. */
423 NULL, /* Local compute function. */
424 NULL, /* Init the solution specific data. */
425 NULL, /* Iterative solver. */
426 NULL, /* Confluence operator 0. */
427 NULL, /* Confluence operator n. */
428 NULL, /* Transfer function. */
429 NULL, /* Finalize function. */
430 df_scan_free, /* Free all of the problem information. */
431 NULL, /* Remove this problem from the stack of dataflow problems. */
432 df_scan_start_dump, /* Debugging. */
433 df_scan_start_block, /* Debugging start block. */
434 NULL, /* Debugging end block. */
435 NULL, /* Incremental solution verify start. */
436 NULL, /* Incremental solution verfiy end. */
437 NULL, /* Dependent problem. */
438 TV_DF_SCAN, /* Timing variable. */
439 false /* Reset blocks on dropping out of blocks_to_analyze. */
440 };
441
442
443 /* Create a new DATAFLOW instance and add it to an existing instance
444 of DF. The returned structure is what is used to get at the
445 solution. */
446
447 void
448 df_scan_add_problem (void)
449 {
450 df_add_problem (&problem_SCAN);
451 }
452
453 \f
454 /*----------------------------------------------------------------------------
455 Storage Allocation Utilities
456 ----------------------------------------------------------------------------*/
457
458
459 /* First, grow the reg_info information. If the current size is less than
460 the number of psuedos, grow to 25% more than the number of
461 pseudos.
462
463 Second, assure that all of the slots up to max_reg_num have been
464 filled with reg_info structures. */
465
466 void
467 df_grow_reg_info (void)
468 {
469 unsigned int max_reg = max_reg_num ();
470 unsigned int new_size = max_reg;
471 struct df_scan_problem_data *problem_data
472 = (struct df_scan_problem_data *) df_scan->problem_data;
473 unsigned int i;
474
475 if (df->regs_size < new_size)
476 {
477 new_size += new_size / 4;
478 df->def_regs = xrealloc (df->def_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->use_regs = xrealloc (df->use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->eq_use_regs = xrealloc (df->eq_use_regs,
483 new_size *sizeof (struct df_reg_info*));
484 df->def_info.begin = xrealloc (df->def_info.begin,
485 new_size *sizeof (int));
486 df->def_info.count = xrealloc (df->def_info.count,
487 new_size *sizeof (int));
488 df->use_info.begin = xrealloc (df->use_info.begin,
489 new_size *sizeof (int));
490 df->use_info.count = xrealloc (df->use_info.count,
491 new_size *sizeof (int));
492 df->regs_size = new_size;
493 }
494
495 for (i = df->regs_inited; i < max_reg; i++)
496 {
497 struct df_reg_info *reg_info;
498
499 reg_info = pool_alloc (problem_data->reg_pool);
500 memset (reg_info, 0, sizeof (struct df_reg_info));
501 df->def_regs[i] = reg_info;
502 reg_info = pool_alloc (problem_data->reg_pool);
503 memset (reg_info, 0, sizeof (struct df_reg_info));
504 df->use_regs[i] = reg_info;
505 reg_info = pool_alloc (problem_data->reg_pool);
506 memset (reg_info, 0, sizeof (struct df_reg_info));
507 df->eq_use_regs[i] = reg_info;
508 df->def_info.begin[i] = 0;
509 df->def_info.count[i] = 0;
510 df->use_info.begin[i] = 0;
511 df->use_info.count[i] = 0;
512 }
513
514 df->regs_inited = max_reg;
515 }
516
517
518 /* Grow the ref information. */
519
520 static void
521 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
522 {
523 if (ref_info->refs_size < new_size)
524 {
525 ref_info->refs = xrealloc (ref_info->refs,
526 new_size *sizeof (struct df_ref *));
527 memset (ref_info->refs + ref_info->refs_size, 0,
528 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
529 ref_info->refs_size = new_size;
530 }
531 }
532
533
534 /* Check and grow the ref information if necessary. This routine
535 guarantees total_size + BITMAP_ADDEND amount of entries in refs
536 array. It updates ref_info->refs_size only and does not change
537 ref_info->total_size. */
538
539 static void
540 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
541 unsigned bitmap_addend)
542 {
543 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
544 {
545 int new_size = ref_info->total_size + bitmap_addend;
546 new_size += ref_info->total_size / 4;
547 df_grow_ref_info (ref_info, new_size);
548 }
549 }
550
551
552 /* Grow the ref information. If the current size is less than the
553 number of instructions, grow to 25% more than the number of
554 instructions. */
555
556 void
557 df_grow_insn_info (void)
558 {
559 unsigned int new_size = get_max_uid () + 1;
560 if (DF_INSN_SIZE () < new_size)
561 {
562 new_size += new_size / 4;
563 df->insns = xrealloc (df->insns,
564 new_size *sizeof (struct df_insn_info *));
565 memset (df->insns + df->insns_size, 0,
566 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
567 DF_INSN_SIZE () = new_size;
568 }
569 }
570
571
572
573 \f
574 /*----------------------------------------------------------------------------
575 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
576 ----------------------------------------------------------------------------*/
577
578 /* Rescan all of the block_to_analyze or all of the blocks in the
579 function if df_set_blocks if blocks_to_analyze is NULL; */
580
581 void
582 df_scan_blocks (void)
583 {
584 basic_block bb;
585
586 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
587 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
588
589 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
590 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
591
592 bitmap_ior_into (df->eh_block_artificial_uses,
593 df->regular_block_artificial_uses);
594
595 /* ENTRY and EXIT blocks have special defs/uses. */
596 df_get_entry_block_def_set (df->entry_block_defs);
597 df_record_entry_block_defs (df->entry_block_defs);
598 df_get_exit_block_use_set (df->exit_block_uses);
599 df_record_exit_block_uses (df->exit_block_uses);
600 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
601 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
602
603 /* Regular blocks */
604 FOR_EACH_BB (bb)
605 {
606 unsigned int bb_index = bb->index;
607 df_bb_refs_record (bb_index, true);
608 }
609 }
610
611
612 /* Create a new ref of type DF_REF_TYPE for register REG at address
613 LOC within INSN of BB. */
614
615 struct df_ref *
616 df_ref_create (rtx reg, rtx *loc, rtx insn,
617 basic_block bb,
618 enum df_ref_type ref_type,
619 enum df_ref_flags ref_flags)
620 {
621 struct df_ref *ref;
622 struct df_reg_info **reg_info;
623 struct df_ref_info *ref_info;
624 struct df_ref **ref_rec;
625 struct df_ref ***ref_rec_ptr;
626 unsigned int count = 0;
627 bool add_to_table;
628
629 df_grow_reg_info ();
630
631 /* You cannot hack artificial refs. */
632 gcc_assert (insn);
633 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
634 ref_type, ref_flags);
635
636 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
637 {
638 reg_info = df->def_regs;
639 ref_info = &df->def_info;
640 ref_rec_ptr = &DF_INSN_DEFS (insn);
641 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
642 }
643 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
644 {
645 reg_info = df->eq_use_regs;
646 ref_info = &df->use_info;
647 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
648 switch (ref_info->ref_order)
649 {
650 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
651 case DF_REF_ORDER_BY_REG_WITH_NOTES:
652 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
653 add_to_table = true;
654 break;
655 default:
656 add_to_table = false;
657 break;
658 }
659 }
660 else
661 {
662 reg_info = df->use_regs;
663 ref_info = &df->use_info;
664 ref_rec_ptr = &DF_INSN_USES (insn);
665 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
666 }
667
668 /* Do not add if ref is not in the right blocks. */
669 if (add_to_table && df->analyze_subset)
670 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
671
672 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
673
674 if (add_to_table)
675 switch (ref_info->ref_order)
676 {
677 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
678 case DF_REF_ORDER_BY_REG_WITH_NOTES:
679 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
680 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
681 break;
682 default:
683 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
684 break;
685 }
686
687 ref_rec = *ref_rec_ptr;
688 while (*ref_rec)
689 {
690 count++;
691 ref_rec++;
692 }
693
694 ref_rec = *ref_rec_ptr;
695 if (count)
696 {
697 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
698 *ref_rec_ptr = ref_rec;
699 ref_rec[count] = ref;
700 ref_rec[count+1] = NULL;
701 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
702 }
703 else
704 {
705 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
706 ref_rec[0] = ref;
707 ref_rec[1] = NULL;
708 *ref_rec_ptr = ref_rec;
709 }
710
711 #if 0
712 if (dump_file)
713 {
714 fprintf (dump_file, "adding ref ");
715 df_ref_debug (ref, dump_file);
716 }
717 #endif
718 /* By adding the ref directly, df_insn_rescan my not find any
719 differences even though the block will have changed. So we need
720 to mark the block dirty ourselves. */
721 df_set_bb_dirty (bb);
722
723 return ref;
724 }
725
726
727 \f
728 /*----------------------------------------------------------------------------
729 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
730 ----------------------------------------------------------------------------*/
731
732
733 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
734 Also delete the def-use or use-def chain if it exists. */
735
736 static void
737 df_reg_chain_unlink (struct df_ref *ref)
738 {
739 struct df_ref *next = DF_REF_NEXT_REG (ref);
740 struct df_ref *prev = DF_REF_PREV_REG (ref);
741 struct df_scan_problem_data *problem_data
742 = (struct df_scan_problem_data *) df_scan->problem_data;
743 int id = DF_REF_ID (ref);
744 struct df_reg_info *reg_info;
745 struct df_ref **refs = NULL;
746
747 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
748 {
749 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
750 refs = df->def_info.refs;
751 }
752 else
753 {
754 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
755 {
756 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
757 switch (df->use_info.ref_order)
758 {
759 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
760 case DF_REF_ORDER_BY_REG_WITH_NOTES:
761 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
762 refs = df->use_info.refs;
763 break;
764 default:
765 break;
766 }
767 }
768 else
769 {
770 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
771 refs = df->use_info.refs;
772 }
773 }
774
775 if (refs)
776 {
777 if (df->analyze_subset)
778 {
779 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
780 refs[id] = NULL;
781 }
782 else
783 refs[id] = NULL;
784 }
785
786 /* Delete any def-use or use-def chains that start here. It is
787 possible that there is trash in this field. This happens for
788 insns that have been deleted when rescanning has been deferred
789 and the chain problem has also been deleted. The chain tear down
790 code skips deleted insns. */
791 if (df_chain && DF_REF_CHAIN (ref))
792 df_chain_unlink (ref);
793
794 reg_info->n_refs--;
795 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
796 {
797 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
798 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
799 }
800
801 /* Unlink from the reg chain. If there is no prev, this is the
802 first of the list. If not, just join the next and prev. */
803 if (prev)
804 DF_REF_NEXT_REG (prev) = next;
805 else
806 {
807 gcc_assert (reg_info->reg_chain == ref);
808 reg_info->reg_chain = next;
809 }
810 if (next)
811 DF_REF_PREV_REG (next) = prev;
812
813 pool_free (problem_data->ref_pool, ref);
814 }
815
816
817 /* Remove REF from VEC. */
818
819 static void
820 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
821 {
822 struct df_ref **vec = *vec_ptr;
823
824 if (vec[1])
825 {
826 while (*vec && *vec != ref)
827 vec++;
828
829 while (*vec)
830 {
831 *vec = *(vec+1);
832 vec++;
833 }
834 }
835 else
836 {
837 free (vec);
838 *vec_ptr = df_null_ref_rec;
839 }
840 }
841
842
843 /* Unlink REF from all def-use/use-def chains, etc. */
844
845 void
846 df_ref_remove (struct df_ref *ref)
847 {
848 #if 0
849 if (dump_file)
850 {
851 fprintf (dump_file, "removing ref ");
852 df_ref_debug (ref, dump_file);
853 }
854 #endif
855
856 if (DF_REF_REG_DEF_P (ref))
857 {
858 if (DF_REF_IS_ARTIFICIAL (ref))
859 {
860 struct df_scan_bb_info *bb_info
861 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
862 df_ref_compress_rec (&bb_info->artificial_defs, ref);
863 }
864 else
865 {
866 unsigned int uid = DF_REF_INSN_UID (ref);
867 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
868 df_ref_compress_rec (&insn_rec->defs, ref);
869 }
870 }
871 else
872 {
873 if (DF_REF_IS_ARTIFICIAL (ref))
874 {
875 struct df_scan_bb_info *bb_info
876 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
877 df_ref_compress_rec (&bb_info->artificial_uses, ref);
878 }
879 else
880 {
881 unsigned int uid = DF_REF_INSN_UID (ref);
882 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
883
884 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
885 df_ref_compress_rec (&insn_rec->eq_uses, ref);
886 else
887 df_ref_compress_rec (&insn_rec->uses, ref);
888 }
889 }
890
891 /* By deleting the ref directly, df_insn_rescan my not find any
892 differences even though the block will have changed. So we need
893 to mark the block dirty ourselves. */
894 df_set_bb_dirty (DF_REF_BB (ref));
895 df_reg_chain_unlink (ref);
896 }
897
898
899 /* Create the insn record for INSN. If there was one there, zero it
900 out. */
901
902 struct df_insn_info *
903 df_insn_create_insn_record (rtx insn)
904 {
905 struct df_scan_problem_data *problem_data
906 = (struct df_scan_problem_data *) df_scan->problem_data;
907 struct df_insn_info *insn_rec;
908
909 df_grow_insn_info ();
910 insn_rec = DF_INSN_GET (insn);
911 if (!insn_rec)
912 {
913 insn_rec = pool_alloc (problem_data->insn_pool);
914 DF_INSN_SET (insn, insn_rec);
915 }
916 memset (insn_rec, 0, sizeof (struct df_insn_info));
917 insn_rec->insn = insn;
918 return insn_rec;
919 }
920
921
922 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
923
924 static void
925 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
926 {
927 while (*ref_rec)
928 {
929 struct df_ref *ref = *ref_rec;
930 /* CHAIN is allocated by DF_CHAIN. So make sure to
931 pass df_scan instance for the problem. */
932 if (DF_REF_CHAIN (ref))
933 df_chain_unlink (ref);
934 ref_rec++;
935 }
936 }
937
938
939 /* Delete all refs in the ref chain. */
940
941 static void
942 df_ref_chain_delete (struct df_ref **ref_rec)
943 {
944 struct df_ref **start = ref_rec;
945 while (*ref_rec)
946 {
947 df_reg_chain_unlink (*ref_rec);
948 ref_rec++;
949 }
950
951 /* If the list is empty, it has a special shared element that is not
952 to be deleted. */
953 if (*start)
954 free (start);
955 }
956
957
958 /* Delete the hardreg chain. */
959
960 static void
961 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
962 {
963 struct df_scan_problem_data *problem_data;
964
965 if (!hardregs)
966 return;
967
968 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
969
970 while (*hardregs)
971 {
972 pool_free (problem_data->mw_reg_pool, *hardregs);
973 hardregs++;
974 }
975 }
976
977
978 /* Delete all of the refs information from INSN. BB must be passed in
979 except when called from df_process_deferred_rescans to mark the block
980 as dirty. */
981
982 void
983 df_insn_delete (basic_block bb, unsigned int uid)
984 {
985 struct df_insn_info *insn_info = NULL;
986 if (!df)
987 return;
988
989 df_grow_bb_info (df_scan);
990 df_grow_reg_info ();
991
992 /* The block must be marked as dirty now, rather than later as in
993 df_insn_rescan and df_notes_rescan because it may not be there at
994 rescanning time and the mark would blow up. */
995 if (bb)
996 df_set_bb_dirty (bb);
997
998 insn_info = DF_INSN_UID_SAFE_GET (uid);
999
1000 /* The client has deferred rescanning. */
1001 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1002 {
1003 if (insn_info)
1004 {
1005 bitmap_clear_bit (df->insns_to_rescan, uid);
1006 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1007 bitmap_set_bit (df->insns_to_delete, uid);
1008 }
1009 if (dump_file)
1010 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1011 return;
1012 }
1013
1014 if (dump_file)
1015 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1016
1017 bitmap_clear_bit (df->insns_to_delete, uid);
1018 bitmap_clear_bit (df->insns_to_rescan, uid);
1019 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1020 if (insn_info)
1021 {
1022 struct df_scan_problem_data *problem_data
1023 = (struct df_scan_problem_data *) df_scan->problem_data;
1024
1025 /* In general, notes do not have the insn_info fields
1026 initialized. However, combine deletes insns by changing them
1027 to notes. How clever. So we cannot just check if it is a
1028 valid insn before short circuiting this code, we need to see
1029 if we actually initialized it. */
1030 if (insn_info->defs)
1031 {
1032 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1033
1034 if (df_chain)
1035 {
1036 df_ref_chain_delete_du_chain (insn_info->defs);
1037 df_ref_chain_delete_du_chain (insn_info->uses);
1038 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1039 }
1040
1041 df_ref_chain_delete (insn_info->defs);
1042 df_ref_chain_delete (insn_info->uses);
1043 df_ref_chain_delete (insn_info->eq_uses);
1044 }
1045 pool_free (problem_data->insn_pool, insn_info);
1046 DF_INSN_UID_SET (uid, NULL);
1047 }
1048 }
1049
1050
1051 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1052
1053 static void
1054 df_free_collection_rec (struct df_collection_rec *collection_rec)
1055 {
1056 struct df_scan_problem_data *problem_data
1057 = (struct df_scan_problem_data *) df_scan->problem_data;
1058 struct df_ref **ref;
1059 struct df_mw_hardreg **mw;
1060
1061 if (collection_rec->def_vec)
1062 for (ref = collection_rec->def_vec; *ref; ref++)
1063 pool_free (problem_data->ref_pool, *ref);
1064 if (collection_rec->use_vec)
1065 for (ref = collection_rec->use_vec; *ref; ref++)
1066 pool_free (problem_data->ref_pool, *ref);
1067 if (collection_rec->eq_use_vec)
1068 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1069 pool_free (problem_data->ref_pool, *ref);
1070 if (collection_rec->mw_vec)
1071 for (mw = collection_rec->mw_vec; *mw; mw++)
1072 pool_free (problem_data->mw_reg_pool, *mw);
1073 }
1074
1075
1076 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1077
1078 bool
1079 df_insn_rescan (rtx insn)
1080 {
1081 unsigned int uid = INSN_UID (insn);
1082 struct df_insn_info *insn_info = NULL;
1083 basic_block bb = BLOCK_FOR_INSN (insn);
1084 struct df_collection_rec collection_rec;
1085 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1086 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1087 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1088 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1089
1090 if ((!df) || (!INSN_P (insn)))
1091 return false;
1092
1093 if (!bb)
1094 {
1095 if (dump_file)
1096 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1097 return false;
1098 }
1099
1100 /* The client has disabled rescanning and plans to do it itself. */
1101 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1102 return false;
1103
1104 df_grow_bb_info (df_scan);
1105 df_grow_reg_info ();
1106
1107 insn_info = DF_INSN_UID_SAFE_GET (uid);
1108
1109 /* The client has deferred rescanning. */
1110 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1111 {
1112 if (!insn_info)
1113 {
1114 insn_info = df_insn_create_insn_record (insn);
1115 insn_info->defs = df_null_ref_rec;
1116 insn_info->uses = df_null_ref_rec;
1117 insn_info->eq_uses = df_null_ref_rec;
1118 insn_info->mw_hardregs = df_null_mw_rec;
1119 }
1120 if (dump_file)
1121 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1122
1123 bitmap_clear_bit (df->insns_to_delete, uid);
1124 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1125 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1126 return false;
1127 }
1128
1129 bitmap_clear_bit (df->insns_to_delete, uid);
1130 bitmap_clear_bit (df->insns_to_rescan, uid);
1131 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1132 if (insn_info)
1133 {
1134 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1135 /* If there's no change, return false. */
1136 if (the_same)
1137 {
1138 df_free_collection_rec (&collection_rec);
1139 if (dump_file)
1140 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1141 return false;
1142 }
1143 if (dump_file)
1144 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1145
1146 /* There's change - we need to delete the existing info. */
1147 df_insn_delete (NULL, uid);
1148 df_insn_create_insn_record (insn);
1149 }
1150 else
1151 {
1152 df_insn_create_insn_record (insn);
1153 df_insn_refs_collect (&collection_rec, bb, insn);
1154 if (dump_file)
1155 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1156 }
1157
1158 df_refs_add_to_chains (&collection_rec, bb, insn);
1159 df_set_bb_dirty (bb);
1160 return true;
1161 }
1162
1163
1164 /* Rescan all of the insns in the function. Note that the artificial
1165 uses and defs are not touched. This function will destroy def-se
1166 or use-def chains. */
1167
1168 void
1169 df_insn_rescan_all (void)
1170 {
1171 bool no_insn_rescan = false;
1172 bool defer_insn_rescan = false;
1173 basic_block bb;
1174 bitmap_iterator bi;
1175 unsigned int uid;
1176 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1177
1178 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1179 {
1180 df_clear_flags (DF_NO_INSN_RESCAN);
1181 no_insn_rescan = true;
1182 }
1183
1184 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1185 {
1186 df_clear_flags (DF_DEFER_INSN_RESCAN);
1187 defer_insn_rescan = true;
1188 }
1189
1190 bitmap_copy (tmp, df->insns_to_delete);
1191 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1192 {
1193 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1194 if (insn_info)
1195 df_insn_delete (NULL, uid);
1196 }
1197
1198 BITMAP_FREE (tmp);
1199 bitmap_clear (df->insns_to_delete);
1200 bitmap_clear (df->insns_to_rescan);
1201 bitmap_clear (df->insns_to_notes_rescan);
1202
1203 FOR_EACH_BB (bb)
1204 {
1205 rtx insn;
1206 FOR_BB_INSNS (bb, insn)
1207 {
1208 df_insn_rescan (insn);
1209 }
1210 }
1211
1212 if (no_insn_rescan)
1213 df_set_flags (DF_NO_INSN_RESCAN);
1214 if (defer_insn_rescan)
1215 df_set_flags (DF_DEFER_INSN_RESCAN);
1216 }
1217
1218
1219 /* Process all of the deferred rescans or deletions. */
1220
1221 void
1222 df_process_deferred_rescans (void)
1223 {
1224 bool no_insn_rescan = false;
1225 bool defer_insn_rescan = false;
1226 bitmap_iterator bi;
1227 unsigned int uid;
1228 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1229
1230 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1231 {
1232 df_clear_flags (DF_NO_INSN_RESCAN);
1233 no_insn_rescan = true;
1234 }
1235
1236 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1237 {
1238 df_clear_flags (DF_DEFER_INSN_RESCAN);
1239 defer_insn_rescan = true;
1240 }
1241
1242 if (dump_file)
1243 fprintf (dump_file, "starting the processing of deferred insns\n");
1244
1245 bitmap_copy (tmp, df->insns_to_delete);
1246 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1247 {
1248 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1249 if (insn_info)
1250 df_insn_delete (NULL, uid);
1251 }
1252
1253 bitmap_copy (tmp, df->insns_to_rescan);
1254 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1255 {
1256 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1257 if (insn_info)
1258 df_insn_rescan (insn_info->insn);
1259 }
1260
1261 bitmap_copy (tmp, df->insns_to_notes_rescan);
1262 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1263 {
1264 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1265 if (insn_info)
1266 df_notes_rescan (insn_info->insn);
1267 }
1268
1269 if (dump_file)
1270 fprintf (dump_file, "ending the processing of deferred insns\n");
1271
1272 BITMAP_FREE (tmp);
1273 bitmap_clear (df->insns_to_delete);
1274 bitmap_clear (df->insns_to_rescan);
1275 bitmap_clear (df->insns_to_notes_rescan);
1276
1277 if (no_insn_rescan)
1278 df_set_flags (DF_NO_INSN_RESCAN);
1279 if (defer_insn_rescan)
1280 df_set_flags (DF_DEFER_INSN_RESCAN);
1281
1282 /* If someone changed regs_ever_live during this pass, fix up the
1283 entry and exit blocks. */
1284 if (df->redo_entry_and_exit)
1285 {
1286 df_update_entry_exit_and_calls ();
1287 df->redo_entry_and_exit = false;
1288 }
1289 }
1290
1291
1292 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1293 the uses if INCLUDE_USES. Include the eq_uses if
1294 INCLUDE_EQ_USES. */
1295
1296 static unsigned int
1297 df_count_refs (bool include_defs, bool include_uses,
1298 bool include_eq_uses)
1299 {
1300 unsigned int regno;
1301 int size = 0;
1302 unsigned int m = df->regs_inited;
1303
1304 for (regno = 0; regno < m; regno++)
1305 {
1306 if (include_defs)
1307 size += DF_REG_DEF_COUNT (regno);
1308 if (include_uses)
1309 size += DF_REG_USE_COUNT (regno);
1310 if (include_eq_uses)
1311 size += DF_REG_EQ_USE_COUNT (regno);
1312 }
1313 return size;
1314 }
1315
1316
1317 /* Take build ref table for either the uses or defs from the reg-use
1318 or reg-def chains. This version processes the refs in reg order
1319 which is likely to be best if processing the whole function. */
1320
1321 static void
1322 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1323 bool include_defs,
1324 bool include_uses,
1325 bool include_eq_uses)
1326 {
1327 unsigned int m = df->regs_inited;
1328 unsigned int regno;
1329 unsigned int offset = 0;
1330 unsigned int start;
1331
1332 if (df->changeable_flags & DF_NO_HARD_REGS)
1333 {
1334 start = FIRST_PSEUDO_REGISTER;
1335 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1336 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1337 }
1338 else
1339 start = 0;
1340
1341 ref_info->total_size
1342 = df_count_refs (include_defs, include_uses, include_eq_uses);
1343
1344 df_check_and_grow_ref_info (ref_info, 1);
1345
1346 for (regno = start; regno < m; regno++)
1347 {
1348 int count = 0;
1349 ref_info->begin[regno] = offset;
1350 if (include_defs)
1351 {
1352 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1353 while (ref)
1354 {
1355 ref_info->refs[offset] = ref;
1356 DF_REF_ID (ref) = offset++;
1357 count++;
1358 ref = DF_REF_NEXT_REG (ref);
1359 gcc_assert (offset < ref_info->refs_size);
1360 }
1361 }
1362 if (include_uses)
1363 {
1364 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1365 while (ref)
1366 {
1367 ref_info->refs[offset] = ref;
1368 DF_REF_ID (ref) = offset++;
1369 count++;
1370 ref = DF_REF_NEXT_REG (ref);
1371 gcc_assert (offset < ref_info->refs_size);
1372 }
1373 }
1374 if (include_eq_uses)
1375 {
1376 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1377 while (ref)
1378 {
1379 ref_info->refs[offset] = ref;
1380 DF_REF_ID (ref) = offset++;
1381 count++;
1382 ref = DF_REF_NEXT_REG (ref);
1383 gcc_assert (offset < ref_info->refs_size);
1384 }
1385 }
1386 ref_info->count[regno] = count;
1387 }
1388
1389 /* The bitmap size is not decremented when refs are deleted. So
1390 reset it now that we have squished out all of the empty
1391 slots. */
1392 ref_info->table_size = offset;
1393 }
1394
1395
1396 /* Take build ref table for either the uses or defs from the reg-use
1397 or reg-def chains. This version processes the refs in insn order
1398 which is likely to be best if processing some segment of the
1399 function. */
1400
1401 static void
1402 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1403 bool include_defs,
1404 bool include_uses,
1405 bool include_eq_uses)
1406 {
1407 bitmap_iterator bi;
1408 unsigned int bb_index;
1409 unsigned int m = df->regs_inited;
1410 unsigned int offset = 0;
1411 unsigned int r;
1412 unsigned int start
1413 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1414
1415 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1416 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1417
1418 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1419 df_check_and_grow_ref_info (ref_info, 1);
1420
1421 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1422 {
1423 basic_block bb = BASIC_BLOCK (bb_index);
1424 rtx insn;
1425 struct df_ref **ref_rec;
1426
1427 if (include_defs)
1428 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1429 {
1430 unsigned int regno = DF_REF_REGNO (*ref_rec);
1431 ref_info->count[regno]++;
1432 }
1433 if (include_uses)
1434 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1435 {
1436 unsigned int regno = DF_REF_REGNO (*ref_rec);
1437 ref_info->count[regno]++;
1438 }
1439
1440 FOR_BB_INSNS (bb, insn)
1441 {
1442 if (INSN_P (insn))
1443 {
1444 unsigned int uid = INSN_UID (insn);
1445
1446 if (include_defs)
1447 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1448 {
1449 unsigned int regno = DF_REF_REGNO (*ref_rec);
1450 ref_info->count[regno]++;
1451 }
1452 if (include_uses)
1453 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1454 {
1455 unsigned int regno = DF_REF_REGNO (*ref_rec);
1456 ref_info->count[regno]++;
1457 }
1458 if (include_eq_uses)
1459 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1460 {
1461 unsigned int regno = DF_REF_REGNO (*ref_rec);
1462 ref_info->count[regno]++;
1463 }
1464 }
1465 }
1466 }
1467
1468 for (r = start; r < m; r++)
1469 {
1470 ref_info->begin[r] = offset;
1471 offset += ref_info->count[r];
1472 ref_info->count[r] = 0;
1473 }
1474
1475 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1476 {
1477 basic_block bb = BASIC_BLOCK (bb_index);
1478 rtx insn;
1479 struct df_ref **ref_rec;
1480
1481 if (include_defs)
1482 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1483 {
1484 struct df_ref *ref = *ref_rec;
1485 unsigned int regno = DF_REF_REGNO (ref);
1486 if (regno >= start)
1487 {
1488 unsigned int id
1489 = ref_info->begin[regno] + ref_info->count[regno]++;
1490 DF_REF_ID (ref) = id;
1491 ref_info->refs[id] = ref;
1492 }
1493 }
1494 if (include_uses)
1495 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1496 {
1497 struct df_ref *ref = *ref_rec;
1498 unsigned int regno = DF_REF_REGNO (ref);
1499 if (regno >= start)
1500 {
1501 unsigned int id
1502 = ref_info->begin[regno] + ref_info->count[regno]++;
1503 DF_REF_ID (ref) = id;
1504 ref_info->refs[id] = ref;
1505 }
1506 }
1507
1508 FOR_BB_INSNS (bb, insn)
1509 {
1510 if (INSN_P (insn))
1511 {
1512 unsigned int uid = INSN_UID (insn);
1513
1514 if (include_defs)
1515 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1516 {
1517 struct df_ref *ref = *ref_rec;
1518 unsigned int regno = DF_REF_REGNO (ref);
1519 if (regno >= start)
1520 {
1521 unsigned int id
1522 = ref_info->begin[regno] + ref_info->count[regno]++;
1523 DF_REF_ID (ref) = id;
1524 ref_info->refs[id] = ref;
1525 }
1526 }
1527 if (include_uses)
1528 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1529 {
1530 struct df_ref *ref = *ref_rec;
1531 unsigned int regno = DF_REF_REGNO (ref);
1532 if (regno >= start)
1533 {
1534 unsigned int id
1535 = ref_info->begin[regno] + ref_info->count[regno]++;
1536 DF_REF_ID (ref) = id;
1537 ref_info->refs[id] = ref;
1538 }
1539 }
1540 if (include_eq_uses)
1541 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1542 {
1543 struct df_ref *ref = *ref_rec;
1544 unsigned int regno = DF_REF_REGNO (ref);
1545 if (regno >= start)
1546 {
1547 unsigned int id
1548 = ref_info->begin[regno] + ref_info->count[regno]++;
1549 DF_REF_ID (ref) = id;
1550 ref_info->refs[id] = ref;
1551 }
1552 }
1553 }
1554 }
1555 }
1556
1557 /* The bitmap size is not decremented when refs are deleted. So
1558 reset it now that we have squished out all of the empty
1559 slots. */
1560
1561 ref_info->table_size = offset;
1562 }
1563
1564 /* Take build ref table for either the uses or defs from the reg-use
1565 or reg-def chains. */
1566
1567 static void
1568 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1569 bool include_defs,
1570 bool include_uses,
1571 bool include_eq_uses)
1572 {
1573 if (df->analyze_subset)
1574 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1575 include_uses, include_eq_uses);
1576 else
1577 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1578 include_uses, include_eq_uses);
1579 }
1580
1581
1582 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1583 static unsigned int
1584 df_add_refs_to_table (unsigned int offset,
1585 struct df_ref_info *ref_info,
1586 struct df_ref **ref_vec)
1587 {
1588 while (*ref_vec)
1589 {
1590 struct df_ref *ref = *ref_vec;
1591 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1592 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1593 {
1594 ref_info->refs[offset] = ref;
1595 DF_REF_ID (*ref_vec) = offset++;
1596 }
1597 ref_vec++;
1598 }
1599 return offset;
1600 }
1601
1602
1603 /* Count the number of refs in all of the insns of BB. Include the
1604 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1605 eq_uses if INCLUDE_EQ_USES. */
1606
1607 static unsigned int
1608 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1609 struct df_ref_info *ref_info,
1610 bool include_defs, bool include_uses,
1611 bool include_eq_uses)
1612 {
1613 rtx insn;
1614
1615 if (include_defs)
1616 offset = df_add_refs_to_table (offset, ref_info,
1617 df_get_artificial_defs (bb->index));
1618 if (include_uses)
1619 offset = df_add_refs_to_table (offset, ref_info,
1620 df_get_artificial_uses (bb->index));
1621
1622 FOR_BB_INSNS (bb, insn)
1623 if (INSN_P (insn))
1624 {
1625 unsigned int uid = INSN_UID (insn);
1626 if (include_defs)
1627 offset = df_add_refs_to_table (offset, ref_info,
1628 DF_INSN_UID_DEFS (uid));
1629 if (include_uses)
1630 offset = df_add_refs_to_table (offset, ref_info,
1631 DF_INSN_UID_USES (uid));
1632 if (include_eq_uses)
1633 offset = df_add_refs_to_table (offset, ref_info,
1634 DF_INSN_UID_EQ_USES (uid));
1635 }
1636 return offset;
1637 }
1638
1639
1640 /* Organize the refs by insn into the table in REF_INFO. If
1641 blocks_to_analyze is defined, use that set, otherwise the entire
1642 program. Include the defs if INCLUDE_DEFS. Include the uses if
1643 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1644
1645 static void
1646 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1647 bool include_defs, bool include_uses,
1648 bool include_eq_uses)
1649 {
1650 basic_block bb;
1651 unsigned int offset = 0;
1652
1653 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1654 df_check_and_grow_ref_info (ref_info, 1);
1655 if (df->blocks_to_analyze)
1656 {
1657 bitmap_iterator bi;
1658 unsigned int index;
1659
1660 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1661 {
1662 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1663 include_defs, include_uses,
1664 include_eq_uses);
1665 }
1666
1667 ref_info->table_size = offset;
1668 }
1669 else
1670 {
1671 FOR_ALL_BB (bb)
1672 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1673 include_defs, include_uses,
1674 include_eq_uses);
1675 ref_info->table_size = offset;
1676 }
1677 }
1678
1679
1680 /* If the use refs in DF are not organized, reorganize them. */
1681
1682 void
1683 df_maybe_reorganize_use_refs (enum df_ref_order order)
1684 {
1685 if (order == df->use_info.ref_order)
1686 return;
1687
1688 switch (order)
1689 {
1690 case DF_REF_ORDER_BY_REG:
1691 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1692 break;
1693
1694 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1695 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1696 break;
1697
1698 case DF_REF_ORDER_BY_INSN:
1699 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1700 break;
1701
1702 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1703 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1704 break;
1705
1706 case DF_REF_ORDER_NO_TABLE:
1707 free (df->use_info.refs);
1708 df->use_info.refs = NULL;
1709 df->use_info.refs_size = 0;
1710 break;
1711
1712 case DF_REF_ORDER_UNORDERED:
1713 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1714 gcc_unreachable ();
1715 break;
1716 }
1717
1718 df->use_info.ref_order = order;
1719 }
1720
1721
1722 /* If the def refs in DF are not organized, reorganize them. */
1723
1724 void
1725 df_maybe_reorganize_def_refs (enum df_ref_order order)
1726 {
1727 if (order == df->def_info.ref_order)
1728 return;
1729
1730 switch (order)
1731 {
1732 case DF_REF_ORDER_BY_REG:
1733 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1734 break;
1735
1736 case DF_REF_ORDER_BY_INSN:
1737 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1738 break;
1739
1740 case DF_REF_ORDER_NO_TABLE:
1741 free (df->def_info.refs);
1742 df->def_info.refs = NULL;
1743 df->def_info.refs_size = 0;
1744 break;
1745
1746 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1747 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1748 case DF_REF_ORDER_UNORDERED:
1749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1750 gcc_unreachable ();
1751 break;
1752 }
1753
1754 df->def_info.ref_order = order;
1755 }
1756
1757
1758 /* Change the BB of all refs in the ref chain to NEW_BB.
1759 Assumes that all refs in the chain have the same BB.
1760 If changed, return the original bb the chain belonged to
1761 (or .
1762 If no change, return NEW_BB.
1763 If something's wrong, it will return NULL. */
1764
1765 static basic_block
1766 df_ref_chain_change_bb (struct df_ref **ref_rec,
1767 basic_block old_bb,
1768 basic_block new_bb)
1769 {
1770 while (*ref_rec)
1771 {
1772 struct df_ref *ref = *ref_rec;
1773
1774 if (DF_REF_BB (ref) == new_bb)
1775 return new_bb;
1776 else
1777 {
1778 gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
1779 old_bb = DF_REF_BB (ref);
1780 DF_REF_BB (ref) = new_bb;
1781 }
1782 ref_rec++;
1783 }
1784
1785 return old_bb;
1786 }
1787
1788
1789 /* Change all of the basic block references in INSN to use the insn's
1790 current basic block. This function is called from routines that move
1791 instructions from one block to another. */
1792
1793 void
1794 df_insn_change_bb (rtx insn)
1795 {
1796 basic_block new_bb = BLOCK_FOR_INSN (insn);
1797 basic_block old_bb = NULL;
1798 struct df_insn_info *insn_info;
1799 unsigned int uid = INSN_UID (insn);
1800
1801 if (!df)
1802 return;
1803
1804 if (dump_file)
1805 fprintf (dump_file, "changing bb of uid %d\n", uid);
1806
1807 insn_info = DF_INSN_UID_SAFE_GET (uid);
1808 if (insn_info == NULL)
1809 {
1810 if (dump_file)
1811 fprintf (dump_file, " unscanned insn\n");
1812 df_insn_rescan (insn);
1813 return;
1814 }
1815
1816 if (!INSN_P (insn))
1817 return;
1818
1819 old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1820 if (old_bb == new_bb)
1821 return;
1822
1823 old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1824 if (old_bb == new_bb)
1825 return;
1826
1827 old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1828 if (old_bb == new_bb)
1829 return;
1830
1831 df_set_bb_dirty (new_bb);
1832 if (old_bb)
1833 {
1834 if (dump_file)
1835 fprintf (dump_file, " from %d to %d\n",
1836 old_bb->index, new_bb->index);
1837 df_set_bb_dirty (old_bb);
1838 }
1839 else
1840 if (dump_file)
1841 fprintf (dump_file, " to %d\n", new_bb->index);
1842 }
1843
1844
1845 /* Helper function for df_ref_change_reg_with_loc. */
1846
1847 static void
1848 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1849 int new_regno, rtx loc)
1850 {
1851 struct df_ref *the_ref = old->reg_chain;
1852
1853 while (the_ref)
1854 {
1855 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1856 {
1857 struct df_ref *next_ref = the_ref->next_reg;
1858 struct df_ref *prev_ref = the_ref->prev_reg;
1859 struct df_ref **ref_vec, **ref_vec_t;
1860 unsigned int count = 0;
1861
1862 DF_REF_REGNO (the_ref) = new_regno;
1863 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1864
1865 /* Pull the_ref out of the old regno chain. */
1866 if (prev_ref)
1867 prev_ref->next_reg = next_ref;
1868 else
1869 old->reg_chain = next_ref;
1870 if (next_ref)
1871 next_ref->prev_reg = prev_ref;
1872 old->n_refs--;
1873
1874 /* Put the ref into the new regno chain. */
1875 the_ref->prev_reg = NULL;
1876 the_ref->next_reg = new->reg_chain;
1877 if (new->reg_chain)
1878 new->reg_chain->prev_reg = the_ref;
1879 new->reg_chain = the_ref;
1880 new->n_refs++;
1881 df_set_bb_dirty (DF_REF_BB (the_ref));
1882
1883 /* Need to resort the record that the ref was in because the
1884 regno is a sorting key. First, find the right record. */
1885 if (DF_REF_IS_ARTIFICIAL (the_ref))
1886 {
1887 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1888 if (DF_REF_REG_DEF_P (the_ref))
1889 ref_vec = df_get_artificial_defs (bb_index);
1890 else
1891 ref_vec = df_get_artificial_uses (bb_index);
1892 }
1893 else
1894 {
1895 struct df_insn_info *insn_info
1896 = DF_INSN_GET (DF_REF_INSN (the_ref));
1897 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1898 ref_vec = insn_info->eq_uses;
1899 else
1900 ref_vec = insn_info->uses;
1901 if (dump_file)
1902 fprintf (dump_file, "changing reg in insn %d\n",
1903 INSN_UID (DF_REF_INSN (the_ref)));
1904 }
1905 ref_vec_t = ref_vec;
1906
1907 /* Find the length. */
1908 while (*ref_vec_t)
1909 {
1910 count++;
1911 ref_vec_t++;
1912 }
1913 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1914
1915 the_ref = next_ref;
1916 }
1917 else
1918 the_ref = the_ref->next_reg;
1919 }
1920 }
1921
1922
1923 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1924 NEW_REGNO. Refs that do not match LOC are not changed. This call
1925 is to support the SET_REGNO macro. */
1926
1927 void
1928 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1929 {
1930 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1931 return;
1932
1933 df_grow_reg_info ();
1934
1935 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1936 DF_REG_DEF_GET (new_regno), new_regno, loc);
1937 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1938 DF_REG_USE_GET (new_regno), new_regno, loc);
1939 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1940 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1941 }
1942
1943
1944 /* Delete the mw_hardregs that point into the eq_notes. */
1945
1946 static unsigned int
1947 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1948 {
1949 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1950 unsigned int deleted = 0;
1951 unsigned int count = 0;
1952 struct df_scan_problem_data *problem_data
1953 = (struct df_scan_problem_data *) df_scan->problem_data;
1954
1955 if (!*mw_vec)
1956 return 0;
1957
1958 while (*mw_vec)
1959 {
1960 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1961 {
1962 struct df_mw_hardreg **temp_vec = mw_vec;
1963
1964 pool_free (problem_data->mw_reg_pool, *mw_vec);
1965 temp_vec = mw_vec;
1966 /* Shove the remaining ones down one to fill the gap. While
1967 this looks n**2, it is highly unusual to have any mw regs
1968 in eq_notes and the chances of more than one are almost
1969 non existent. */
1970 while (*temp_vec)
1971 {
1972 *temp_vec = *(temp_vec + 1);
1973 temp_vec++;
1974 }
1975 deleted++;
1976 }
1977 else
1978 {
1979 mw_vec++;
1980 count++;
1981 }
1982 }
1983
1984 if (count == 0)
1985 {
1986 free (insn_info->mw_hardregs);
1987 insn_info->mw_hardregs = df_null_mw_rec;
1988 return 0;
1989 }
1990 return deleted;
1991 }
1992
1993
1994 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1995
1996 void
1997 df_notes_rescan (rtx insn)
1998 {
1999 struct df_insn_info *insn_info;
2000 unsigned int uid = INSN_UID (insn);
2001
2002 if (!df)
2003 return;
2004
2005 /* The client has disabled rescanning and plans to do it itself. */
2006 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2007 return;
2008
2009 df_grow_bb_info (df_scan);
2010 df_grow_reg_info ();
2011
2012 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2013
2014 /* The client has deferred rescanning. */
2015 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2016 {
2017 if (!insn_info)
2018 {
2019 insn_info = df_insn_create_insn_record (insn);
2020 insn_info->defs = df_null_ref_rec;
2021 insn_info->uses = df_null_ref_rec;
2022 insn_info->eq_uses = df_null_ref_rec;
2023 insn_info->mw_hardregs = df_null_mw_rec;
2024 }
2025
2026 bitmap_clear_bit (df->insns_to_delete, uid);
2027 /* If the insn is set to be rescanned, it does not need to also
2028 be notes rescanned. */
2029 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2030 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2031 return;
2032 }
2033
2034 bitmap_clear_bit (df->insns_to_delete, uid);
2035 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2036
2037 if (insn_info)
2038 {
2039 basic_block bb = BLOCK_FOR_INSN (insn);
2040 rtx note;
2041 struct df_collection_rec collection_rec;
2042 unsigned int num_deleted;
2043
2044 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2045 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2046 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2047
2048 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2049 df_ref_chain_delete (insn_info->eq_uses);
2050 insn_info->eq_uses = NULL;
2051
2052 /* Process REG_EQUIV/REG_EQUAL notes */
2053 for (note = REG_NOTES (insn); note;
2054 note = XEXP (note, 1))
2055 {
2056 switch (REG_NOTE_KIND (note))
2057 {
2058 case REG_EQUIV:
2059 case REG_EQUAL:
2060 df_uses_record (&collection_rec,
2061 &XEXP (note, 0), DF_REF_REG_USE,
2062 bb, insn, DF_REF_IN_NOTE);
2063 default:
2064 break;
2065 }
2066 }
2067
2068 /* Find some place to put any new mw_hardregs. */
2069 df_canonize_collection_rec (&collection_rec);
2070 if (collection_rec.next_mw)
2071 {
2072 unsigned int count = 0;
2073 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2074 while (*mw_rec)
2075 {
2076 count++;
2077 mw_rec++;
2078 }
2079
2080 if (count)
2081 {
2082 /* Append to the end of the existing record after
2083 expanding it if necessary. */
2084 if (collection_rec.next_mw > num_deleted)
2085 {
2086 insn_info->mw_hardregs =
2087 xrealloc (insn_info->mw_hardregs,
2088 (count + 1 + collection_rec.next_mw)
2089 * sizeof (struct df_ref*));
2090 }
2091 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2092 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2093 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2094 sizeof (struct df_mw_hardreg *), df_mw_compare);
2095 }
2096 else
2097 {
2098 /* No vector there. */
2099 insn_info->mw_hardregs
2100 = XNEWVEC (struct df_mw_hardreg*,
2101 count + 1 + collection_rec.next_mw);
2102 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2103 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2104 }
2105 }
2106 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2107 ignore it. */
2108 collection_rec.mw_vec = NULL;
2109 collection_rec.next_mw = 0;
2110 df_refs_add_to_chains (&collection_rec, bb, insn);
2111 }
2112 else
2113 df_insn_rescan (insn);
2114
2115 }
2116
2117 \f
2118 /*----------------------------------------------------------------------------
2119 Hard core instruction scanning code. No external interfaces here,
2120 just a lot of routines that look inside insns.
2121 ----------------------------------------------------------------------------*/
2122
2123
2124 /* Return true if the contents of two df_ref's are identical.
2125 It ignores DF_REF_MARKER. */
2126
2127 static bool
2128 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2129 {
2130 if (!ref2)
2131 return false;
2132 return (ref1 == ref2) ||
2133 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2134 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2135 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2136 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2137 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2138 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2139 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2140 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2141 }
2142
2143
2144 /* Compare REF1 and REF2 for sorting. This is only called from places
2145 where all of the refs are of the same type, in the same insn, and
2146 have the same bb. So these fields are not checked. */
2147
2148 static int
2149 df_ref_compare (const void *r1, const void *r2)
2150 {
2151 const struct df_ref *ref1 = *(struct df_ref **)r1;
2152 const struct df_ref *ref2 = *(struct df_ref **)r2;
2153
2154 if (ref1 == ref2)
2155 return 0;
2156
2157 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2158 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2159
2160 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2161 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2162
2163 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2164 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2165 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2166
2167 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2168 {
2169 /* If two refs are identical except that one of them has is from
2170 a mw and one is not, we need to have the one with the mw
2171 first. */
2172 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2173 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2174 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2175 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2176 return -1;
2177 else
2178 return 1;
2179 }
2180 return 0;
2181 }
2182
2183 static void
2184 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2185 {
2186 struct df_ref *tmp = ref_vec[i];
2187 ref_vec[i] = ref_vec[j];
2188 ref_vec[j] = tmp;
2189 }
2190
2191 /* Sort and compress a set of refs. */
2192
2193 static unsigned int
2194 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2195 {
2196 struct df_scan_problem_data *problem_data
2197 = (struct df_scan_problem_data *) df_scan->problem_data;
2198 unsigned int i;
2199 unsigned int dist = 0;
2200
2201 ref_vec[count] = NULL;
2202 /* If there are 1 or 0 elements, there is nothing to do. */
2203 if (count < 2)
2204 return count;
2205 else if (count == 2)
2206 {
2207 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2208 df_swap_refs (ref_vec, 0, 1);
2209 }
2210 else
2211 {
2212 for (i = 0; i < count - 1; i++)
2213 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2214 break;
2215 /* If the array is already strictly ordered,
2216 which is the most common case for large COUNT case
2217 (which happens for CALL INSNs),
2218 no need to sort and filter out duplicate.
2219 Simply return the count.
2220 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2221 of DF_REF_COMPARE. */
2222 if (i == count - 1)
2223 return count;
2224 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2225 }
2226
2227 for (i=0; i<count-dist; i++)
2228 {
2229 /* Find the next ref that is not equal to the current ref. */
2230 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2231 {
2232 pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
2233 dist++;
2234 }
2235 /* Copy it down to the next position. */
2236 if (dist)
2237 ref_vec[i+1] = ref_vec[i + dist + 1];
2238 }
2239
2240 count -= dist;
2241 ref_vec[count] = NULL;
2242 return count;
2243 }
2244
2245
2246 /* Return true if the contents of two df_ref's are identical.
2247 It ignores DF_REF_MARKER. */
2248
2249 static bool
2250 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2251 {
2252 if (!mw2)
2253 return false;
2254 return (mw1 == mw2) ||
2255 (mw1->mw_reg == mw2->mw_reg
2256 && mw1->type == mw2->type
2257 && mw1->flags == mw2->flags
2258 && mw1->start_regno == mw2->start_regno
2259 && mw1->end_regno == mw2->end_regno);
2260 }
2261
2262
2263 /* Compare MW1 and MW2 for sorting. */
2264
2265 static int
2266 df_mw_compare (const void *m1, const void *m2)
2267 {
2268 const struct df_mw_hardreg *mw1 = *(struct df_mw_hardreg **)m1;
2269 const struct df_mw_hardreg *mw2 = *(struct df_mw_hardreg **)m2;
2270
2271 if (mw1 == mw2)
2272 return 0;
2273
2274 if (mw1->type != mw2->type)
2275 return mw1->type - mw2->type;
2276
2277 if (mw1->flags != mw2->flags)
2278 return mw1->flags - mw2->flags;
2279
2280 if (mw1->start_regno != mw2->start_regno)
2281 return mw1->start_regno - mw2->start_regno;
2282
2283 if (mw1->end_regno != mw2->end_regno)
2284 return mw1->end_regno - mw2->end_regno;
2285
2286 if (mw1->mw_reg != mw2->mw_reg)
2287 return mw1->mw_order - mw2->mw_order;
2288
2289 return 0;
2290 }
2291
2292
2293 /* Sort and compress a set of refs. */
2294
2295 static unsigned int
2296 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2297 {
2298 struct df_scan_problem_data *problem_data
2299 = (struct df_scan_problem_data *) df_scan->problem_data;
2300 unsigned int i;
2301 unsigned int dist = 0;
2302 mw_vec[count] = NULL;
2303
2304 if (count < 2)
2305 return count;
2306 else if (count == 2)
2307 {
2308 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2309 {
2310 struct df_mw_hardreg *tmp = mw_vec[0];
2311 mw_vec[0] = mw_vec[1];
2312 mw_vec[1] = tmp;
2313 }
2314 }
2315 else
2316 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2317
2318 for (i=0; i<count-dist; i++)
2319 {
2320 /* Find the next ref that is not equal to the current ref. */
2321 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2322 {
2323 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2324 dist++;
2325 }
2326 /* Copy it down to the next position. */
2327 if (dist)
2328 mw_vec[i+1] = mw_vec[i + dist + 1];
2329 }
2330
2331 count -= dist;
2332 mw_vec[count] = NULL;
2333 return count;
2334 }
2335
2336
2337 /* Sort and remove duplicates from the COLLECTION_REC. */
2338
2339 static void
2340 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2341 {
2342 if (collection_rec->def_vec)
2343 collection_rec->next_def
2344 = df_sort_and_compress_refs (collection_rec->def_vec,
2345 collection_rec->next_def);
2346 if (collection_rec->use_vec)
2347 collection_rec->next_use
2348 = df_sort_and_compress_refs (collection_rec->use_vec,
2349 collection_rec->next_use);
2350 if (collection_rec->eq_use_vec)
2351 collection_rec->next_eq_use
2352 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2353 collection_rec->next_eq_use);
2354 if (collection_rec->mw_vec)
2355 collection_rec->next_mw
2356 = df_sort_and_compress_mws (collection_rec->mw_vec,
2357 collection_rec->next_mw);
2358 }
2359
2360
2361 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2362
2363 static void
2364 df_install_ref (struct df_ref *this_ref,
2365 struct df_reg_info *reg_info,
2366 struct df_ref_info *ref_info,
2367 bool add_to_table)
2368 {
2369 unsigned int regno = DF_REF_REGNO (this_ref);
2370 /* Add the ref to the reg_{def,use,eq_use} chain. */
2371 struct df_ref *head = reg_info->reg_chain;
2372
2373 reg_info->reg_chain = this_ref;
2374 reg_info->n_refs++;
2375
2376 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2377 {
2378 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2379 df->hard_regs_live_count[regno]++;
2380 }
2381
2382 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2383 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2384
2385 DF_REF_NEXT_REG (this_ref) = head;
2386
2387 /* We cannot actually link to the head of the chain. */
2388 DF_REF_PREV_REG (this_ref) = NULL;
2389
2390 if (head)
2391 DF_REF_PREV_REG (head) = this_ref;
2392
2393 if (add_to_table)
2394 {
2395 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2396 df_check_and_grow_ref_info (ref_info, 1);
2397 DF_REF_ID (this_ref) = ref_info->table_size;
2398 /* Add the ref to the big array of defs. */
2399 ref_info->refs[ref_info->table_size] = this_ref;
2400 ref_info->table_size++;
2401 }
2402 else
2403 DF_REF_ID (this_ref) = -1;
2404
2405 ref_info->total_size++;
2406 }
2407
2408
2409 /* This function takes one of the groups of refs (defs, uses or
2410 eq_uses) and installs the entire group into the insn. It also adds
2411 each of these refs into the appropriate chains. */
2412
2413 static struct df_ref **
2414 df_install_refs (basic_block bb,
2415 struct df_ref **old_vec, unsigned int count,
2416 struct df_reg_info **reg_info,
2417 struct df_ref_info *ref_info,
2418 bool is_notes)
2419 {
2420 if (count)
2421 {
2422 unsigned int i;
2423 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2424 bool add_to_table;
2425
2426 switch (ref_info->ref_order)
2427 {
2428 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2429 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2430 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2431 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2432 add_to_table = true;
2433 break;
2434 case DF_REF_ORDER_UNORDERED:
2435 case DF_REF_ORDER_BY_REG:
2436 case DF_REF_ORDER_BY_INSN:
2437 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2438 add_to_table = !is_notes;
2439 break;
2440 default:
2441 add_to_table = false;
2442 break;
2443 }
2444
2445 /* Do not add if ref is not in the right blocks. */
2446 if (add_to_table && df->analyze_subset)
2447 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2448
2449 for (i = 0; i < count; i++)
2450 {
2451 struct df_ref *this_ref = old_vec[i];
2452 new_vec[i] = this_ref;
2453 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2454 ref_info, add_to_table);
2455 }
2456
2457 new_vec[count] = NULL;
2458 return new_vec;
2459 }
2460 else
2461 return df_null_ref_rec;
2462 }
2463
2464
2465 /* This function takes the mws installs the entire group into the
2466 insn. */
2467
2468 static struct df_mw_hardreg **
2469 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2470 {
2471 if (count)
2472 {
2473 struct df_mw_hardreg **new_vec
2474 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2475 memcpy (new_vec, old_vec,
2476 sizeof (struct df_mw_hardreg*) * (count + 1));
2477 return new_vec;
2478 }
2479 else
2480 return df_null_mw_rec;
2481 }
2482
2483
2484 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2485 chains and update other necessary information. */
2486
2487 static void
2488 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2489 basic_block bb, rtx insn)
2490 {
2491 if (insn)
2492 {
2493 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2494 /* If there is a vector in the collection rec, add it to the
2495 insn. A null rec is a signal that the caller will handle the
2496 chain specially. */
2497 if (collection_rec->def_vec)
2498 {
2499 if (insn_rec->defs && *insn_rec->defs)
2500 free (insn_rec->defs);
2501 insn_rec->defs
2502 = df_install_refs (bb, collection_rec->def_vec,
2503 collection_rec->next_def,
2504 df->def_regs,
2505 &df->def_info, false);
2506 }
2507 if (collection_rec->use_vec)
2508 {
2509 if (insn_rec->uses && *insn_rec->uses)
2510 free (insn_rec->uses);
2511 insn_rec->uses
2512 = df_install_refs (bb, collection_rec->use_vec,
2513 collection_rec->next_use,
2514 df->use_regs,
2515 &df->use_info, false);
2516 }
2517 if (collection_rec->eq_use_vec)
2518 {
2519 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2520 free (insn_rec->eq_uses);
2521 insn_rec->eq_uses
2522 = df_install_refs (bb, collection_rec->eq_use_vec,
2523 collection_rec->next_eq_use,
2524 df->eq_use_regs,
2525 &df->use_info, true);
2526 }
2527 if (collection_rec->mw_vec)
2528 {
2529 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2530 free (insn_rec->mw_hardregs);
2531 insn_rec->mw_hardregs
2532 = df_install_mws (collection_rec->mw_vec,
2533 collection_rec->next_mw);
2534 }
2535 }
2536 else
2537 {
2538 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2539
2540 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2541 free (bb_info->artificial_defs);
2542 bb_info->artificial_defs
2543 = df_install_refs (bb, collection_rec->def_vec,
2544 collection_rec->next_def,
2545 df->def_regs,
2546 &df->def_info, false);
2547 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2548 free (bb_info->artificial_uses);
2549 bb_info->artificial_uses
2550 = df_install_refs (bb, collection_rec->use_vec,
2551 collection_rec->next_use,
2552 df->use_regs,
2553 &df->use_info, false);
2554 }
2555 }
2556
2557
2558 /* Allocate a ref and initialize its fields. */
2559
2560 static struct df_ref *
2561 df_ref_create_structure (struct df_collection_rec *collection_rec,
2562 rtx reg, rtx *loc,
2563 basic_block bb, rtx insn,
2564 enum df_ref_type ref_type,
2565 enum df_ref_flags ref_flags)
2566 {
2567 struct df_ref *this_ref;
2568 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2569 struct df_scan_problem_data *problem_data
2570 = (struct df_scan_problem_data *) df_scan->problem_data;
2571
2572 this_ref = pool_alloc (problem_data->ref_pool);
2573 DF_REF_ID (this_ref) = -1;
2574 DF_REF_REG (this_ref) = reg;
2575 DF_REF_REGNO (this_ref) = regno;
2576 DF_REF_LOC (this_ref) = loc;
2577 DF_REF_INSN (this_ref) = insn;
2578 DF_REF_CHAIN (this_ref) = NULL;
2579 DF_REF_TYPE (this_ref) = ref_type;
2580 DF_REF_FLAGS (this_ref) = ref_flags;
2581 DF_REF_BB (this_ref) = bb;
2582 DF_REF_NEXT_REG (this_ref) = NULL;
2583 DF_REF_PREV_REG (this_ref) = NULL;
2584 DF_REF_ORDER (this_ref) = df->ref_order++;
2585
2586 /* We need to clear this bit because fwprop, and in the future
2587 possibly other optimizations sometimes create new refs using ond
2588 refs as the model. */
2589 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2590
2591 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2592 if ((regno < FIRST_PSEUDO_REGISTER)
2593 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2594 {
2595 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2596 {
2597 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2598 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2599 }
2600 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2601 && (regno == FRAME_POINTER_REGNUM
2602 || regno == ARG_POINTER_REGNUM)))
2603 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2604 }
2605
2606 if (collection_rec)
2607 {
2608 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2609 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2610 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2611 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2612 else
2613 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2614 }
2615
2616 return this_ref;
2617 }
2618
2619
2620 /* Create new references of type DF_REF_TYPE for each part of register REG
2621 at address LOC within INSN of BB. */
2622
2623 static void
2624 df_ref_record (struct df_collection_rec *collection_rec,
2625 rtx reg, rtx *loc,
2626 basic_block bb, rtx insn,
2627 enum df_ref_type ref_type,
2628 enum df_ref_flags ref_flags)
2629 {
2630 rtx oldreg = reg;
2631 unsigned int regno;
2632
2633 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2634
2635 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2636 if (regno < FIRST_PSEUDO_REGISTER)
2637 {
2638 struct df_mw_hardreg *hardreg = NULL;
2639 struct df_scan_problem_data *problem_data
2640 = (struct df_scan_problem_data *) df_scan->problem_data;
2641 unsigned int i;
2642 unsigned int endregno;
2643 struct df_ref *ref;
2644
2645 if (GET_CODE (reg) == SUBREG)
2646 {
2647 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2648 SUBREG_BYTE (reg), GET_MODE (reg));
2649 endregno = regno + subreg_nregs (reg);
2650 }
2651 else
2652 endregno = END_HARD_REGNO (reg);
2653
2654 /* If this is a multiword hardreg, we create some extra
2655 datastructures that will enable us to easily build REG_DEAD
2656 and REG_UNUSED notes. */
2657 if ((endregno != regno + 1) && insn)
2658 {
2659 /* Sets to a subreg of a multiword register are partial.
2660 Sets to a non-subreg of a multiword register are not. */
2661 if (GET_CODE (oldreg) == SUBREG)
2662 ref_flags |= DF_REF_PARTIAL;
2663 ref_flags |= DF_REF_MW_HARDREG;
2664
2665 hardreg = pool_alloc (problem_data->mw_reg_pool);
2666 hardreg->type = ref_type;
2667 hardreg->flags = ref_flags;
2668 hardreg->mw_reg = reg;
2669 hardreg->loc = loc;
2670 hardreg->start_regno = regno;
2671 hardreg->end_regno = endregno - 1;
2672 hardreg->mw_order = df->ref_order++;
2673 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2674 }
2675
2676 for (i = regno; i < endregno; i++)
2677 {
2678 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2679 bb, insn, ref_type, ref_flags);
2680
2681 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2682 }
2683 }
2684 else
2685 {
2686 struct df_ref *ref;
2687 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2688 ref_type, ref_flags);
2689 }
2690 }
2691
2692
2693 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2694 covered by the outer mode is smaller than that covered by the inner mode,
2695 is a read-modify-write operation.
2696 This function returns true iff the SUBREG X is such a SUBREG. */
2697
2698 bool
2699 df_read_modify_subreg_p (rtx x)
2700 {
2701 unsigned int isize, osize;
2702 if (GET_CODE (x) != SUBREG)
2703 return false;
2704 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2705 osize = GET_MODE_SIZE (GET_MODE (x));
2706 return isize > osize
2707 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2708 }
2709
2710
2711 /* Process all the registers defined in the rtx, X.
2712 Autoincrement/decrement definitions will be picked up by
2713 df_uses_record. */
2714
2715 static void
2716 df_def_record_1 (struct df_collection_rec *collection_rec,
2717 rtx x, basic_block bb, rtx insn,
2718 enum df_ref_flags flags)
2719 {
2720 rtx *loc;
2721 rtx dst;
2722
2723 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2724 construct. */
2725 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2726 loc = &XEXP (x, 0);
2727 else
2728 loc = &SET_DEST (x);
2729 dst = *loc;
2730
2731 /* It is legal to have a set destination be a parallel. */
2732 if (GET_CODE (dst) == PARALLEL)
2733 {
2734 int i;
2735
2736 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2737 {
2738 rtx temp = XVECEXP (dst, 0, i);
2739 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2740 || GET_CODE (temp) == SET)
2741 df_def_record_1 (collection_rec,
2742 temp, bb, insn,
2743 GET_CODE (temp) == CLOBBER
2744 ? flags | DF_REF_MUST_CLOBBER : flags);
2745 }
2746 return;
2747 }
2748
2749 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
2750 be handy for the reg allocator. */
2751 while (GET_CODE (dst) == STRICT_LOW_PART
2752 || GET_CODE (dst) == ZERO_EXTRACT)
2753 {
2754 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2755 loc = &XEXP (dst, 0);
2756 dst = *loc;
2757 }
2758
2759 if (df_read_modify_subreg_p (dst))
2760 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2761
2762 if (REG_P (dst)
2763 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
2764 df_ref_record (collection_rec,
2765 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2766 }
2767
2768
2769 /* Process all the registers defined in the pattern rtx, X. */
2770
2771 static void
2772 df_defs_record (struct df_collection_rec *collection_rec,
2773 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2774 {
2775 RTX_CODE code = GET_CODE (x);
2776
2777 if (code == SET || code == CLOBBER)
2778 {
2779 /* Mark the single def within the pattern. */
2780 enum df_ref_flags clobber_flags = flags;
2781 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2782 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2783 }
2784 else if (code == COND_EXEC)
2785 {
2786 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2787 bb, insn, DF_REF_CONDITIONAL);
2788 }
2789 else if (code == PARALLEL)
2790 {
2791 int i;
2792
2793 /* Mark the multiple defs within the pattern. */
2794 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2795 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2796 }
2797 }
2798
2799
2800 /* Process all the registers used in the rtx at address LOC. */
2801
2802 static void
2803 df_uses_record (struct df_collection_rec *collection_rec,
2804 rtx *loc, enum df_ref_type ref_type,
2805 basic_block bb, rtx insn, enum df_ref_flags flags)
2806 {
2807 RTX_CODE code;
2808 rtx x;
2809
2810 retry:
2811 x = *loc;
2812 if (!x)
2813 return;
2814 code = GET_CODE (x);
2815 switch (code)
2816 {
2817 case LABEL_REF:
2818 case SYMBOL_REF:
2819 case CONST_INT:
2820 case CONST:
2821 case CONST_DOUBLE:
2822 case CONST_VECTOR:
2823 case PC:
2824 case CC0:
2825 case ADDR_VEC:
2826 case ADDR_DIFF_VEC:
2827 return;
2828
2829 case CLOBBER:
2830 /* If we are clobbering a MEM, mark any registers inside the address
2831 as being used. */
2832 if (MEM_P (XEXP (x, 0)))
2833 df_uses_record (collection_rec,
2834 &XEXP (XEXP (x, 0), 0),
2835 DF_REF_REG_MEM_STORE, bb, insn, flags);
2836
2837 /* If we're clobbering a REG then we have a def so ignore. */
2838 return;
2839
2840 case MEM:
2841 df_uses_record (collection_rec,
2842 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2843 bb, insn, flags & DF_REF_IN_NOTE);
2844 return;
2845
2846 case SUBREG:
2847 /* While we're here, optimize this case. */
2848 flags |= DF_REF_PARTIAL;
2849 /* In case the SUBREG is not of a REG, do not optimize. */
2850 if (!REG_P (SUBREG_REG (x)))
2851 {
2852 loc = &SUBREG_REG (x);
2853 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
2854 return;
2855 }
2856 /* ... Fall through ... */
2857
2858 case REG:
2859 df_ref_record (collection_rec,
2860 x, loc, bb, insn, ref_type, flags);
2861 return;
2862
2863 case SET:
2864 {
2865 rtx dst = SET_DEST (x);
2866 gcc_assert (!(flags & DF_REF_IN_NOTE));
2867 df_uses_record (collection_rec,
2868 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
2869
2870 switch (GET_CODE (dst))
2871 {
2872 case SUBREG:
2873 if (df_read_modify_subreg_p (dst))
2874 {
2875 df_uses_record (collection_rec, &SUBREG_REG (dst),
2876 DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
2877 break;
2878 }
2879 /* Fall through. */
2880 case REG:
2881 case PARALLEL:
2882 case SCRATCH:
2883 case PC:
2884 case CC0:
2885 break;
2886 case MEM:
2887 df_uses_record (collection_rec, &XEXP (dst, 0),
2888 DF_REF_REG_MEM_STORE, bb, insn, flags);
2889 break;
2890 case STRICT_LOW_PART:
2891 {
2892 rtx *temp = &XEXP (dst, 0);
2893 /* A strict_low_part uses the whole REG and not just the
2894 SUBREG. */
2895 dst = XEXP (dst, 0);
2896 df_uses_record (collection_rec,
2897 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2898 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2899 }
2900 break;
2901 case ZERO_EXTRACT:
2902 case SIGN_EXTRACT:
2903 df_uses_record (collection_rec, &XEXP (dst, 0),
2904 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2905 df_uses_record (collection_rec, &XEXP (dst, 1),
2906 DF_REF_REG_USE, bb, insn, flags);
2907 df_uses_record (collection_rec, &XEXP (dst, 2),
2908 DF_REF_REG_USE, bb, insn, flags);
2909 dst = XEXP (dst, 0);
2910 break;
2911 default:
2912 gcc_unreachable ();
2913 }
2914 return;
2915 }
2916
2917 case RETURN:
2918 break;
2919
2920 case ASM_OPERANDS:
2921 case UNSPEC_VOLATILE:
2922 case TRAP_IF:
2923 case ASM_INPUT:
2924 {
2925 /* Traditional and volatile asm instructions must be
2926 considered to use and clobber all hard registers, all
2927 pseudo-registers and all of memory. So must TRAP_IF and
2928 UNSPEC_VOLATILE operations.
2929
2930 Consider for instance a volatile asm that changes the fpu
2931 rounding mode. An insn should not be moved across this
2932 even if it only uses pseudo-regs because it might give an
2933 incorrectly rounded result.
2934
2935 However, flow.c's liveness computation did *not* do this,
2936 giving the reasoning as " ?!? Unfortunately, marking all
2937 hard registers as live causes massive problems for the
2938 register allocator and marking all pseudos as live creates
2939 mountains of uninitialized variable warnings."
2940
2941 In order to maintain the status quo with regard to liveness
2942 and uses, we do what flow.c did and just mark any regs we
2943 can find in ASM_OPERANDS as used. In global asm insns are
2944 scanned and regs_asm_clobbered is filled out.
2945
2946 For all ASM_OPERANDS, we must traverse the vector of input
2947 operands. We can not just fall through here since then we
2948 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2949 which do not indicate traditional asms unlike their normal
2950 usage. */
2951 if (code == ASM_OPERANDS)
2952 {
2953 int j;
2954
2955 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2956 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2957 DF_REF_REG_USE, bb, insn, flags);
2958 return;
2959 }
2960 break;
2961 }
2962
2963 case PRE_DEC:
2964 case POST_DEC:
2965 case PRE_INC:
2966 case POST_INC:
2967 case PRE_MODIFY:
2968 case POST_MODIFY:
2969 /* Catch the def of the register being modified. */
2970 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
2971 DF_REF_REG_DEF,
2972 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
2973
2974 /* ... Fall through to handle uses ... */
2975
2976 default:
2977 break;
2978 }
2979
2980 /* Recursively scan the operands of this expression. */
2981 {
2982 const char *fmt = GET_RTX_FORMAT (code);
2983 int i;
2984
2985 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2986 {
2987 if (fmt[i] == 'e')
2988 {
2989 /* Tail recursive case: save a function call level. */
2990 if (i == 0)
2991 {
2992 loc = &XEXP (x, 0);
2993 goto retry;
2994 }
2995 df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
2996 }
2997 else if (fmt[i] == 'E')
2998 {
2999 int j;
3000 for (j = 0; j < XVECLEN (x, i); j++)
3001 df_uses_record (collection_rec,
3002 &XVECEXP (x, i, j), ref_type, bb, insn, flags);
3003 }
3004 }
3005 }
3006
3007 return;
3008 }
3009
3010
3011 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3012
3013 static void
3014 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3015 {
3016 unsigned int i;
3017 for (i = 0; i < collection_rec->next_def; i++)
3018 {
3019 struct df_ref *ref = collection_rec->def_vec[i];
3020 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3021 {
3022 struct df_ref *use
3023 = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3024 DF_REF_LOC (ref), DF_REF_BB (ref),
3025 DF_REF_INSN (ref), DF_REF_REG_USE,
3026 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3027 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3028 }
3029 }
3030 }
3031
3032
3033 /* Get call's extra defs and uses. */
3034
3035 static void
3036 df_get_call_refs (struct df_collection_rec * collection_rec,
3037 basic_block bb,
3038 rtx insn,
3039 enum df_ref_flags flags)
3040 {
3041 rtx note;
3042 bitmap_iterator bi;
3043 unsigned int ui;
3044 bool is_sibling_call;
3045 unsigned int i;
3046 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3047
3048 /* Do not generate clobbers for registers that are the result of the
3049 call. This causes ordering problems in the chain building code
3050 depending on which def is seen first. */
3051 for (i=0; i<collection_rec->next_def; i++)
3052 {
3053 struct df_ref *def = collection_rec->def_vec[i];
3054 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3055 }
3056
3057 /* Record the registers used to pass arguments, and explicitly
3058 noted as clobbered. */
3059 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3060 note = XEXP (note, 1))
3061 {
3062 if (GET_CODE (XEXP (note, 0)) == USE)
3063 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3064 DF_REF_REG_USE, bb, insn, flags);
3065 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3066 {
3067 if (REG_P (XEXP (XEXP (note, 0), 0)))
3068 {
3069 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3070 if (!bitmap_bit_p (defs_generated, regno))
3071 df_defs_record (collection_rec, XEXP (note, 0), bb,
3072 insn, flags);
3073 }
3074 else
3075 df_uses_record (collection_rec, &XEXP (note, 0),
3076 DF_REF_REG_USE, bb, insn, flags);
3077 }
3078 }
3079
3080 /* The stack ptr is used (honorarily) by a CALL insn. */
3081 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3082 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
3083
3084 /* Calls may also reference any of the global registers,
3085 so they are recorded as used. */
3086 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3087 if (global_regs[i])
3088 df_ref_record (collection_rec, regno_reg_rtx[i],
3089 NULL, bb, insn, DF_REF_REG_USE, flags);
3090
3091 is_sibling_call = SIBLING_CALL_P (insn);
3092 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3093 {
3094 if ((!bitmap_bit_p (defs_generated, ui))
3095 && (!is_sibling_call
3096 || !bitmap_bit_p (df->exit_block_uses, ui)
3097 || refers_to_regno_p (ui, ui+1,
3098 current_function_return_rtx, NULL)))
3099
3100 df_ref_record (collection_rec, regno_reg_rtx[ui],
3101 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
3102 }
3103
3104 BITMAP_FREE (defs_generated);
3105 return;
3106 }
3107
3108 /* Collect all refs in the INSN. This function is free of any
3109 side-effect - it will create and return a lists of df_ref's in the
3110 COLLECTION_REC without putting those refs into existing ref chains
3111 and reg chains. */
3112
3113 static void
3114 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3115 basic_block bb, rtx insn)
3116 {
3117 rtx note;
3118 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3119
3120 /* Clear out the collection record. */
3121 collection_rec->next_def = 0;
3122 collection_rec->next_use = 0;
3123 collection_rec->next_eq_use = 0;
3124 collection_rec->next_mw = 0;
3125
3126 /* Record register defs. */
3127 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3128
3129 /* Process REG_EQUIV/REG_EQUAL notes */
3130 for (note = REG_NOTES (insn); note;
3131 note = XEXP (note, 1))
3132 {
3133 switch (REG_NOTE_KIND (note))
3134 {
3135 case REG_EQUIV:
3136 case REG_EQUAL:
3137 df_uses_record (collection_rec,
3138 &XEXP (note, 0), DF_REF_REG_USE,
3139 bb, insn, DF_REF_IN_NOTE);
3140 break;
3141 case REG_NON_LOCAL_GOTO:
3142 /* The frame ptr is used by a non-local goto. */
3143 df_ref_record (collection_rec,
3144 regno_reg_rtx[FRAME_POINTER_REGNUM],
3145 NULL,
3146 bb, insn,
3147 DF_REF_REG_USE, 0);
3148 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3149 df_ref_record (collection_rec,
3150 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3151 NULL,
3152 bb, insn,
3153 DF_REF_REG_USE, 0);
3154 #endif
3155 break;
3156 default:
3157 break;
3158 }
3159 }
3160
3161 if (CALL_P (insn))
3162 df_get_call_refs (collection_rec, bb, insn,
3163 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3164
3165 /* Record the register uses. */
3166 df_uses_record (collection_rec,
3167 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
3168
3169 /* DF_REF_CONDITIONAL needs corresponding USES. */
3170 if (is_cond_exec)
3171 df_get_conditional_uses (collection_rec);
3172
3173 df_canonize_collection_rec (collection_rec);
3174 }
3175
3176 /* Return true if any pred of BB is an eh. */
3177
3178 bool
3179 df_has_eh_preds (basic_block bb)
3180 {
3181 edge e;
3182 edge_iterator ei;
3183
3184 FOR_EACH_EDGE (e, ei, bb->preds)
3185 {
3186 if (e->flags & EDGE_EH)
3187 return true;
3188 }
3189 return false;
3190 }
3191
3192
3193 /* Recompute the luids for the insns in BB. */
3194
3195 void
3196 df_recompute_luids (basic_block bb)
3197 {
3198 rtx insn;
3199 int luid = 0;
3200
3201 df_grow_insn_info ();
3202
3203 /* Scan the block an insn at a time from beginning to end. */
3204 FOR_BB_INSNS (bb, insn)
3205 {
3206 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3207 /* Inserting labels does not always trigger the incremental
3208 rescanning. */
3209 if (!insn_info)
3210 {
3211 gcc_assert (!INSN_P (insn));
3212 df_insn_create_insn_record (insn);
3213 }
3214
3215 DF_INSN_LUID (insn) = luid;
3216 if (INSN_P (insn))
3217 luid++;
3218 }
3219 }
3220
3221
3222 /* Returns true if the function entry needs to
3223 define the static chain register. */
3224
3225 static bool
3226 df_need_static_chain_reg (struct function *fun)
3227 {
3228 tree fun_context = decl_function_context (fun->decl);
3229 return fun_context
3230 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3231 }
3232
3233
3234 /* Collect all artificial refs at the block level for BB and add them
3235 to COLLECTION_REC. */
3236
3237 static void
3238 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3239 {
3240 collection_rec->next_def = 0;
3241 collection_rec->next_use = 0;
3242 collection_rec->next_eq_use = 0;
3243 collection_rec->next_mw = 0;
3244
3245 if (bb->index == ENTRY_BLOCK)
3246 {
3247 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3248 return;
3249 }
3250 else if (bb->index == EXIT_BLOCK)
3251 {
3252 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3253 return;
3254 }
3255
3256 #ifdef EH_RETURN_DATA_REGNO
3257 if (df_has_eh_preds (bb))
3258 {
3259 unsigned int i;
3260 /* Mark the registers that will contain data for the handler. */
3261 for (i = 0; ; ++i)
3262 {
3263 unsigned regno = EH_RETURN_DATA_REGNO (i);
3264 if (regno == INVALID_REGNUM)
3265 break;
3266 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3267 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3268 }
3269 }
3270 #endif
3271
3272
3273 #ifdef EH_USES
3274 if (df_has_eh_preds (bb))
3275 {
3276 unsigned int i;
3277 /* This code is putting in an artificial ref for the use at the
3278 TOP of the block that receives the exception. It is too
3279 cumbersome to actually put the ref on the edge. We could
3280 either model this at the top of the receiver block or the
3281 bottom of the sender block.
3282
3283 The bottom of the sender block is problematic because not all
3284 out-edges of the a block are eh-edges. However, it is true
3285 that all edges into a block are either eh-edges or none of
3286 them are eh-edges. Thus, we can model this at the top of the
3287 eh-receiver for all of the edges at once. */
3288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3289 if (EH_USES (i))
3290 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3291 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
3292 }
3293 #endif
3294
3295 /* Add the hard_frame_pointer if this block is the target of a
3296 non-local goto. */
3297 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3298 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3299 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3300
3301 /* Add the artificial uses. */
3302 if (bb->index >= NUM_FIXED_BLOCKS)
3303 {
3304 bitmap_iterator bi;
3305 unsigned int regno;
3306 bitmap au = df_has_eh_preds (bb)
3307 ? df->eh_block_artificial_uses
3308 : df->regular_block_artificial_uses;
3309
3310 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3311 {
3312 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3313 bb, NULL, DF_REF_REG_USE, 0);
3314 }
3315 }
3316
3317 df_canonize_collection_rec (collection_rec);
3318 }
3319
3320
3321 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3322
3323 void
3324 df_bb_refs_record (int bb_index, bool scan_insns)
3325 {
3326 basic_block bb = BASIC_BLOCK (bb_index);
3327 rtx insn;
3328 int luid = 0;
3329 struct df_scan_bb_info *bb_info;
3330 struct df_collection_rec collection_rec;
3331 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3332 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3333 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3334 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3335
3336 if (!df)
3337 return;
3338
3339 bb_info = df_scan_get_bb_info (bb_index);
3340
3341 /* Need to make sure that there is a record in the basic block info. */
3342 if (!bb_info)
3343 {
3344 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3345 df_scan_set_bb_info (bb_index, bb_info);
3346 bb_info->artificial_defs = NULL;
3347 bb_info->artificial_uses = NULL;
3348 }
3349
3350 if (scan_insns)
3351 /* Scan the block an insn at a time from beginning to end. */
3352 FOR_BB_INSNS (bb, insn)
3353 {
3354 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3355 gcc_assert (!insn_info);
3356
3357 df_insn_create_insn_record (insn);
3358 if (INSN_P (insn))
3359 {
3360 /* Record refs within INSN. */
3361 DF_INSN_LUID (insn) = luid++;
3362 df_insn_refs_collect (&collection_rec, bb, insn);
3363 df_refs_add_to_chains (&collection_rec, bb, insn);
3364 }
3365 DF_INSN_LUID (insn) = luid;
3366 }
3367
3368 /* Other block level artificial refs */
3369 df_bb_refs_collect (&collection_rec, bb);
3370 df_refs_add_to_chains (&collection_rec, bb, NULL);
3371
3372 /* Now that the block has been processed, set the block as dirty so
3373 lr and ur will get it processed. */
3374 df_set_bb_dirty (bb);
3375 }
3376
3377
3378 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3379 block. */
3380
3381 static void
3382 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3383 {
3384 bitmap_clear (regular_block_artificial_uses);
3385
3386 if (reload_completed)
3387 {
3388 if (frame_pointer_needed)
3389 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3390 }
3391 else
3392 /* Before reload, there are a few registers that must be forced
3393 live everywhere -- which might not already be the case for
3394 blocks within infinite loops. */
3395 {
3396 /* Any reference to any pseudo before reload is a potential
3397 reference of the frame pointer. */
3398 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3399
3400 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3401 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3402 #endif
3403
3404 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3405 /* Pseudos with argument area equivalences may require
3406 reloading via the argument pointer. */
3407 if (fixed_regs[ARG_POINTER_REGNUM])
3408 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3409 #endif
3410
3411 /* Any constant, or pseudo with constant equivalences, may
3412 require reloading from memory using the pic register. */
3413 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3414 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3415 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3416 }
3417 /* The all-important stack pointer must always be live. */
3418 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3419 }
3420
3421
3422 /* Get the artificial use set for an eh block. */
3423
3424 static void
3425 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3426 {
3427 bitmap_clear (eh_block_artificial_uses);
3428
3429 /* The following code (down thru the arg_pointer seting APPEARS
3430 to be necessary because there is nothing that actually
3431 describes what the exception handling code may actually need
3432 to keep alive. */
3433 if (reload_completed)
3434 {
3435 if (frame_pointer_needed)
3436 {
3437 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3438 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3439 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3440 #endif
3441 }
3442 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3443 if (fixed_regs[ARG_POINTER_REGNUM])
3444 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3445 #endif
3446 }
3447 }
3448
3449
3450 \f
3451 /*----------------------------------------------------------------------------
3452 Specialized hard register scanning functions.
3453 ----------------------------------------------------------------------------*/
3454
3455
3456 /* Mark a register in SET. Hard registers in large modes get all
3457 of their component registers set as well. */
3458
3459 static void
3460 df_mark_reg (rtx reg, void *vset)
3461 {
3462 bitmap set = (bitmap) vset;
3463 int regno = REGNO (reg);
3464
3465 gcc_assert (GET_MODE (reg) != BLKmode);
3466
3467 bitmap_set_bit (set, regno);
3468 if (regno < FIRST_PSEUDO_REGISTER)
3469 {
3470 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3471 while (--n > 0)
3472 bitmap_set_bit (set, regno + n);
3473 }
3474 }
3475
3476
3477
3478
3479 /* Set the bit for regs that are considered being defined at the entry. */
3480
3481 static void
3482 df_get_entry_block_def_set (bitmap entry_block_defs)
3483 {
3484 rtx r;
3485 int i;
3486
3487 bitmap_clear (entry_block_defs);
3488
3489 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3490 {
3491 if (FUNCTION_ARG_REGNO_P (i))
3492 #ifdef INCOMING_REGNO
3493 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3494 #else
3495 bitmap_set_bit (entry_block_defs, i);
3496 #endif
3497 }
3498
3499 /* Once the prologue has been generated, all of these registers
3500 should just show up in the first regular block. */
3501 if (HAVE_prologue && epilogue_completed)
3502 {
3503 /* Defs for the callee saved registers are inserted so that the
3504 pushes have some defining location. */
3505 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3506 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3507 bitmap_set_bit (entry_block_defs, i);
3508 }
3509 else
3510 {
3511 /* The always important stack pointer. */
3512 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3513
3514 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3515 only STATIC_CHAIN_REGNUM is defined. If they are different,
3516 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3517 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3518 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3519 #else
3520 #ifdef STATIC_CHAIN_REGNUM
3521 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3522 #endif
3523 #endif
3524
3525 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3526 if (r && REG_P (r))
3527 bitmap_set_bit (entry_block_defs, REGNO (r));
3528 }
3529
3530 if ((!reload_completed) || frame_pointer_needed)
3531 {
3532 /* Any reference to any pseudo before reload is a potential
3533 reference of the frame pointer. */
3534 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3535 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3536 /* If they are different, also mark the hard frame pointer as live. */
3537 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3538 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3539 #endif
3540 }
3541
3542 /* These registers are live everywhere. */
3543 if (!reload_completed)
3544 {
3545 #ifdef EH_USES
3546 /* The ia-64, the only machine that uses this, does not define these
3547 until after reload. */
3548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3549 if (EH_USES (i))
3550 {
3551 bitmap_set_bit (entry_block_defs, i);
3552 }
3553 #endif
3554
3555 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3556 /* Pseudos with argument area equivalences may require
3557 reloading via the argument pointer. */
3558 if (fixed_regs[ARG_POINTER_REGNUM])
3559 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3560 #endif
3561
3562 #ifdef PIC_OFFSET_TABLE_REGNUM
3563 /* Any constant, or pseudo with constant equivalences, may
3564 require reloading from memory using the pic register. */
3565 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3566 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3567 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3568 #endif
3569 }
3570
3571 #ifdef INCOMING_RETURN_ADDR_RTX
3572 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3573 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3574 #endif
3575
3576 targetm.live_on_entry (entry_block_defs);
3577
3578 /* If the function has an incoming STATIC_CHAIN,
3579 it has to show up in the entry def set. */
3580 if (df_need_static_chain_reg (cfun))
3581 {
3582 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3583 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3584 #else
3585 #ifdef STATIC_CHAIN_REGNUM
3586 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3587 #endif
3588 #endif
3589 }
3590 }
3591
3592
3593 /* Return the (conservative) set of hard registers that are defined on
3594 entry to the function.
3595 It uses df->entry_block_defs to determine which register
3596 reference to include. */
3597
3598 static void
3599 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3600 bitmap entry_block_defs)
3601 {
3602 unsigned int i;
3603 bitmap_iterator bi;
3604
3605 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3606 {
3607 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3608 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3609 }
3610
3611 df_canonize_collection_rec (collection_rec);
3612 }
3613
3614
3615 /* Record the (conservative) set of hard registers that are defined on
3616 entry to the function. */
3617
3618 static void
3619 df_record_entry_block_defs (bitmap entry_block_defs)
3620 {
3621 struct df_collection_rec collection_rec;
3622 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3623 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3624
3625 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3626
3627 /* Process bb_refs chain */
3628 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3629 }
3630
3631
3632 /* Update the defs in the entry bolck. */
3633
3634 void
3635 df_update_entry_block_defs (void)
3636 {
3637 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3638 bool changed = false;
3639
3640 df_get_entry_block_def_set (refs);
3641 if (df->entry_block_defs)
3642 {
3643 if (!bitmap_equal_p (df->entry_block_defs, refs))
3644 {
3645 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3646 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3647 df_ref_chain_delete (bb_info->artificial_defs);
3648 bb_info->artificial_defs = NULL;
3649 changed = true;
3650 }
3651 }
3652 else
3653 {
3654 struct df_scan_problem_data *problem_data
3655 = (struct df_scan_problem_data *) df_scan->problem_data;
3656 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3657 changed = true;
3658 }
3659
3660 if (changed)
3661 {
3662 df_record_entry_block_defs (refs);
3663 bitmap_copy (df->entry_block_defs, refs);
3664 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3665 }
3666 BITMAP_FREE (refs);
3667 }
3668
3669
3670 /* Set the bit for regs that are considered being used at the exit. */
3671
3672 static void
3673 df_get_exit_block_use_set (bitmap exit_block_uses)
3674 {
3675 unsigned int i;
3676
3677 bitmap_clear (exit_block_uses);
3678
3679 /* Stack pointer is always live at the exit. */
3680 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3681
3682 /* Mark the frame pointer if needed at the end of the function.
3683 If we end up eliminating it, it will be removed from the live
3684 list of each basic block by reload. */
3685
3686 if ((!reload_completed) || frame_pointer_needed)
3687 {
3688 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3689 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3690 /* If they are different, also mark the hard frame pointer as live. */
3691 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3692 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3693 #endif
3694 }
3695
3696 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3697 /* Many architectures have a GP register even without flag_pic.
3698 Assume the pic register is not in use, or will be handled by
3699 other means, if it is not fixed. */
3700 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3701 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3702 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3703 #endif
3704
3705 /* Mark all global registers, and all registers used by the
3706 epilogue as being live at the end of the function since they
3707 may be referenced by our caller. */
3708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3709 if (global_regs[i] || EPILOGUE_USES (i))
3710 bitmap_set_bit (exit_block_uses, i);
3711
3712 if (HAVE_epilogue && epilogue_completed)
3713 {
3714 /* Mark all call-saved registers that we actually used. */
3715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3716 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3717 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3718 bitmap_set_bit (exit_block_uses, i);
3719 }
3720
3721 #ifdef EH_RETURN_DATA_REGNO
3722 /* Mark the registers that will contain data for the handler. */
3723 if (reload_completed && current_function_calls_eh_return)
3724 for (i = 0; ; ++i)
3725 {
3726 unsigned regno = EH_RETURN_DATA_REGNO (i);
3727 if (regno == INVALID_REGNUM)
3728 break;
3729 bitmap_set_bit (exit_block_uses, regno);
3730 }
3731 #endif
3732
3733 #ifdef EH_RETURN_STACKADJ_RTX
3734 if ((!HAVE_epilogue || ! epilogue_completed)
3735 && current_function_calls_eh_return)
3736 {
3737 rtx tmp = EH_RETURN_STACKADJ_RTX;
3738 if (tmp && REG_P (tmp))
3739 df_mark_reg (tmp, exit_block_uses);
3740 }
3741 #endif
3742
3743 #ifdef EH_RETURN_HANDLER_RTX
3744 if ((!HAVE_epilogue || ! epilogue_completed)
3745 && current_function_calls_eh_return)
3746 {
3747 rtx tmp = EH_RETURN_HANDLER_RTX;
3748 if (tmp && REG_P (tmp))
3749 df_mark_reg (tmp, exit_block_uses);
3750 }
3751 #endif
3752
3753 /* Mark function return value. */
3754 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3755 }
3756
3757
3758 /* Return the refs of hard registers that are used in the exit block.
3759 It uses df->exit_block_uses to determine register to include. */
3760
3761 static void
3762 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3763 {
3764 unsigned int i;
3765 bitmap_iterator bi;
3766
3767 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3768 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3769 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3770
3771 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3772 /* It is deliberate that this is not put in the exit block uses but
3773 I do not know why. */
3774 if (reload_completed
3775 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3776 && df_has_eh_preds (EXIT_BLOCK_PTR)
3777 && fixed_regs[ARG_POINTER_REGNUM])
3778 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3779 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3780 #endif
3781
3782 df_canonize_collection_rec (collection_rec);
3783 }
3784
3785
3786 /* Record the set of hard registers that are used in the exit block.
3787 It uses df->exit_block_uses to determine which bit to include. */
3788
3789 static void
3790 df_record_exit_block_uses (bitmap exit_block_uses)
3791 {
3792 struct df_collection_rec collection_rec;
3793 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3794 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3795
3796 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3797
3798 /* Process bb_refs chain */
3799 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3800 }
3801
3802
3803 /* Update the uses in the exit block. */
3804
3805 void
3806 df_update_exit_block_uses (void)
3807 {
3808 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3809 bool changed = false;
3810
3811 df_get_exit_block_use_set (refs);
3812 if (df->exit_block_uses)
3813 {
3814 if (!bitmap_equal_p (df->exit_block_uses, refs))
3815 {
3816 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3817 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3818 df_ref_chain_delete (bb_info->artificial_uses);
3819 bb_info->artificial_uses = NULL;
3820 changed = true;
3821 }
3822 }
3823 else
3824 {
3825 struct df_scan_problem_data *problem_data
3826 = (struct df_scan_problem_data *) df_scan->problem_data;
3827 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3828 changed = true;
3829 }
3830
3831 if (changed)
3832 {
3833 df_record_exit_block_uses (refs);
3834 bitmap_copy (df->exit_block_uses, refs);
3835 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3836 }
3837 BITMAP_FREE (refs);
3838 }
3839
3840 static bool initialized = false;
3841
3842
3843 /* Initialize some platform specific structures. */
3844
3845 void
3846 df_hard_reg_init (void)
3847 {
3848 int i;
3849 #ifdef ELIMINABLE_REGS
3850 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3851 #endif
3852 if (initialized)
3853 return;
3854
3855 bitmap_obstack_initialize (&persistent_obstack);
3856
3857 /* Record which registers will be eliminated. We use this in
3858 mark_used_regs. */
3859 CLEAR_HARD_REG_SET (elim_reg_set);
3860
3861 #ifdef ELIMINABLE_REGS
3862 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3863 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3864 #else
3865 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3866 #endif
3867
3868 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3869
3870 /* Inconveniently, this is only readily available in hard reg set
3871 form. */
3872 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3873 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3874 bitmap_set_bit (df_invalidated_by_call, i);
3875
3876 initialized = true;
3877 }
3878
3879
3880 /* Recompute the parts of scanning that are based on regs_ever_live
3881 because something changed in that array. */
3882
3883 void
3884 df_update_entry_exit_and_calls (void)
3885 {
3886 basic_block bb;
3887
3888 df_update_entry_block_defs ();
3889 df_update_exit_block_uses ();
3890
3891 /* The call insns need to be rescanned because there may be changes
3892 in the set of registers clobbered across the call. */
3893 FOR_EACH_BB (bb)
3894 {
3895 rtx insn;
3896 FOR_BB_INSNS (bb, insn)
3897 {
3898 if (INSN_P (insn) && CALL_P (insn))
3899 df_insn_rescan (insn);
3900 }
3901 }
3902 }
3903
3904
3905 /* Return true if hard REG is actually used in the some instruction.
3906 There are a fair number of conditions that affect the setting of
3907 this array. See the comment in df.h for df->hard_regs_live_count
3908 for the conditions that this array is set. */
3909
3910 bool
3911 df_hard_reg_used_p (unsigned int reg)
3912 {
3913 gcc_assert (df);
3914 return df->hard_regs_live_count[reg] != 0;
3915 }
3916
3917
3918 /* A count of the number of times REG is actually used in the some
3919 instruction. There are a fair number of conditions that affect the
3920 setting of this array. See the comment in df.h for
3921 df->hard_regs_live_count for the conditions that this array is
3922 set. */
3923
3924
3925 unsigned int
3926 df_hard_reg_used_count (unsigned int reg)
3927 {
3928 gcc_assert (df);
3929 return df->hard_regs_live_count[reg];
3930 }
3931
3932
3933 /* Get the value of regs_ever_live[REGNO]. */
3934
3935 bool
3936 df_regs_ever_live_p (unsigned int regno)
3937 {
3938 return regs_ever_live[regno];
3939 }
3940
3941
3942 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3943 to change, schedule that change for the next update. */
3944
3945 void
3946 df_set_regs_ever_live (unsigned int regno, bool value)
3947 {
3948 if (regs_ever_live[regno] == value)
3949 return;
3950
3951 regs_ever_live[regno] = value;
3952 if (df)
3953 df->redo_entry_and_exit = true;
3954 }
3955
3956
3957 /* Compute "regs_ever_live" information from the underlying df
3958 information. Set the vector to all false if RESET. */
3959
3960 void
3961 df_compute_regs_ever_live (bool reset)
3962 {
3963 unsigned int i;
3964 bool changed = df->redo_entry_and_exit;
3965
3966 if (reset)
3967 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3968
3969 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3970 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3971 {
3972 regs_ever_live[i] = true;
3973 changed = true;
3974 }
3975 if (changed)
3976 df_update_entry_exit_and_calls ();
3977 df->redo_entry_and_exit = false;
3978 }
3979
3980 \f
3981 /*----------------------------------------------------------------------------
3982 Dataflow ref information verification functions.
3983
3984 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3985 df_reg_chain_verify_unmarked (refs)
3986 df_refs_verify (ref*, ref*, bool)
3987 df_mws_verify (mw*, mw*, bool)
3988 df_insn_refs_verify (collection_rec, bb, insn, bool)
3989 df_bb_refs_verify (bb, refs, bool)
3990 df_bb_verify (bb)
3991 df_exit_block_bitmap_verify (bool)
3992 df_entry_block_bitmap_verify (bool)
3993 df_scan_verify ()
3994 ----------------------------------------------------------------------------*/
3995
3996
3997 /* Mark all refs in the reg chain. Verify that all of the registers
3998 are in the correct chain. */
3999
4000 static unsigned int
4001 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4002 bool is_def, bool is_eq_use)
4003 {
4004 unsigned int count = 0;
4005 struct df_ref *ref;
4006 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4007 {
4008 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4009
4010 /* If there are no def-use or use-def chains, make sure that all
4011 of the chains are clear. */
4012 if (!df_chain)
4013 gcc_assert (!DF_REF_CHAIN (ref));
4014
4015 /* Check to make sure the ref is in the correct chain. */
4016 gcc_assert (DF_REF_REGNO (ref) == regno);
4017 if (is_def)
4018 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4019 else
4020 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4021
4022 if (is_eq_use)
4023 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4024 else
4025 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4026
4027 if (ref->next_reg)
4028 gcc_assert (ref->next_reg->prev_reg == ref);
4029 count++;
4030 DF_REF_REG_MARK (ref);
4031 }
4032 return count;
4033 }
4034
4035
4036 /* Verify that all of the registers in the chain are unmarked. */
4037
4038 static void
4039 df_reg_chain_verify_unmarked (struct df_ref *refs)
4040 {
4041 struct df_ref *ref;
4042 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4043 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4044 }
4045
4046
4047 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4048
4049 static bool
4050 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4051 bool abort_if_fail)
4052 {
4053 while ((*new_rec) && (*old_rec))
4054 {
4055 if (!df_ref_equal_p (*new_rec, *old_rec))
4056 {
4057 if (abort_if_fail)
4058 gcc_assert (0);
4059 else
4060 return false;
4061 }
4062
4063 /* Abort if fail is called from the function level verifier. If
4064 that is the context, mark this reg as being seem. */
4065 if (abort_if_fail)
4066 {
4067 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4068 DF_REF_REG_UNMARK (*old_rec);
4069 }
4070
4071 new_rec++;
4072 old_rec++;
4073 }
4074
4075 if (abort_if_fail)
4076 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4077 else
4078 return ((*new_rec == NULL) && (*old_rec == NULL));
4079 return false;
4080 }
4081
4082
4083 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4084
4085 static bool
4086 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4087 bool abort_if_fail)
4088 {
4089 while ((*new_rec) && (*old_rec))
4090 {
4091 if (!df_mw_equal_p (*new_rec, *old_rec))
4092 {
4093 if (abort_if_fail)
4094 gcc_assert (0);
4095 else
4096 return false;
4097 }
4098 new_rec++;
4099 old_rec++;
4100 }
4101
4102 if (abort_if_fail)
4103 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4104 else
4105 return ((*new_rec == NULL) && (*old_rec == NULL));
4106 return false;
4107 }
4108
4109
4110 /* Return true if the existing insn refs information is complete and
4111 correct. Otherwise (i.e. if there's any missing or extra refs),
4112 return the correct df_ref chain in REFS_RETURN.
4113
4114 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4115 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4116 verification mode instead of the whole function, so unmark
4117 everything.
4118
4119 If ABORT_IF_FAIL is set, this function never returns false. */
4120
4121 static bool
4122 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4123 basic_block bb,
4124 rtx insn,
4125 bool abort_if_fail)
4126 {
4127 bool ret1, ret2, ret3, ret4;
4128 unsigned int uid = INSN_UID (insn);
4129
4130 df_insn_refs_collect (collection_rec, bb, insn);
4131
4132 if (!DF_INSN_UID_DEFS (uid))
4133 {
4134 /* The insn_rec was created but it was never filled out. */
4135 if (abort_if_fail)
4136 gcc_assert (0);
4137 else
4138 return false;
4139 }
4140
4141 /* Unfortunately we cannot opt out early if one of these is not
4142 right because the marks will not get cleared. */
4143 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4144 abort_if_fail);
4145 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4146 abort_if_fail);
4147 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4148 abort_if_fail);
4149 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4150 abort_if_fail);
4151 return (ret1 && ret2 && ret3 && ret4);
4152 }
4153
4154
4155 /* Return true if all refs in the basic block are correct and complete.
4156 Due to df_ref_chain_verify, it will cause all refs
4157 that are verified to have DF_REF_MARK bit set. */
4158
4159 static bool
4160 df_bb_verify (basic_block bb)
4161 {
4162 rtx insn;
4163 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4164 struct df_collection_rec collection_rec;
4165
4166 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4167 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4168 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4169 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4170 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4171
4172 gcc_assert (bb_info);
4173
4174 /* Scan the block an insn at a time from beginning to end. */
4175 FOR_BB_INSNS_REVERSE (bb, insn)
4176 {
4177 if (!INSN_P (insn))
4178 continue;
4179 df_insn_refs_verify (&collection_rec, bb, insn, true);
4180 df_free_collection_rec (&collection_rec);
4181 }
4182
4183 /* Do the artificial defs and uses. */
4184 df_bb_refs_collect (&collection_rec, bb);
4185 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4186 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4187 df_free_collection_rec (&collection_rec);
4188
4189 return true;
4190 }
4191
4192
4193 /* Returns true if the entry block has correct and complete df_ref set.
4194 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4195
4196 static bool
4197 df_entry_block_bitmap_verify (bool abort_if_fail)
4198 {
4199 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4200 bool is_eq;
4201
4202 df_get_entry_block_def_set (entry_block_defs);
4203
4204 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4205
4206 if (!is_eq && abort_if_fail)
4207 {
4208 print_current_pass (stderr);
4209 fprintf (stderr, "entry_block_defs = ");
4210 df_print_regset (stderr, entry_block_defs);
4211 fprintf (stderr, "df->entry_block_defs = ");
4212 df_print_regset (stderr, df->entry_block_defs);
4213 gcc_assert (0);
4214 }
4215
4216 BITMAP_FREE (entry_block_defs);
4217
4218 return is_eq;
4219 }
4220
4221
4222 /* Returns true if the exit block has correct and complete df_ref set.
4223 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4224
4225 static bool
4226 df_exit_block_bitmap_verify (bool abort_if_fail)
4227 {
4228 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4229 bool is_eq;
4230
4231 df_get_exit_block_use_set (exit_block_uses);
4232
4233 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4234
4235 if (!is_eq && abort_if_fail)
4236 {
4237 print_current_pass (stderr);
4238 fprintf (stderr, "exit_block_uses = ");
4239 df_print_regset (stderr, exit_block_uses);
4240 fprintf (stderr, "df->exit_block_uses = ");
4241 df_print_regset (stderr, df->exit_block_uses);
4242 gcc_assert (0);
4243 }
4244
4245 BITMAP_FREE (exit_block_uses);
4246
4247 return is_eq;
4248 }
4249
4250
4251 /* Return true if df_ref information for all insns in all BLOCKS are
4252 correct and complete. If BLOCKS is null, all blocks are
4253 checked. */
4254
4255 void
4256 df_scan_verify (void)
4257 {
4258 unsigned int i;
4259 basic_block bb;
4260 bitmap regular_block_artificial_uses;
4261 bitmap eh_block_artificial_uses;
4262
4263 if (!df)
4264 return;
4265
4266 /* This is a hack, but a necessary one. If you do not do this,
4267 insn_attrtab can never be compiled in a bootstrap. This
4268 verification is just too expensive. */
4269 if (n_basic_blocks > 250)
4270 return;
4271
4272 /* Verification is a 4 step process. */
4273
4274 /* (1) All of the refs are marked by going thru the reg chains. */
4275 for (i = 0; i < DF_REG_SIZE (df); i++)
4276 {
4277 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4278 == DF_REG_DEF_COUNT(i));
4279 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4280 == DF_REG_USE_COUNT(i));
4281 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4282 == DF_REG_EQ_USE_COUNT(i));
4283 }
4284
4285 /* (2) There are various bitmaps whose value may change over the
4286 course of the compilation. This step recomputes them to make
4287 sure that they have not slipped out of date. */
4288 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4289 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4290
4291 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4292 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4293
4294 bitmap_ior_into (eh_block_artificial_uses,
4295 regular_block_artificial_uses);
4296
4297 /* Check artificial_uses bitmaps didn't change. */
4298 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4299 df->regular_block_artificial_uses));
4300 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4301 df->eh_block_artificial_uses));
4302
4303 BITMAP_FREE (regular_block_artificial_uses);
4304 BITMAP_FREE (eh_block_artificial_uses);
4305
4306 /* Verify entry block and exit block. These only verify the bitmaps,
4307 the refs are verified in df_bb_verify. */
4308 df_entry_block_bitmap_verify (true);
4309 df_exit_block_bitmap_verify (true);
4310
4311 /* (3) All of the insns in all of the blocks are traversed and the
4312 marks are cleared both in the artificial refs attached to the
4313 blocks and the real refs inside the insns. It is a failure to
4314 clear a mark that has not been set as this means that the ref in
4315 the block or insn was not in the reg chain. */
4316
4317 FOR_ALL_BB (bb)
4318 df_bb_verify (bb);
4319
4320 /* (4) See if all reg chains are traversed a second time. This time
4321 a check is made that the marks are clear. A set mark would be a
4322 from a reg that is not in any insn or basic block. */
4323
4324 for (i = 0; i < DF_REG_SIZE (df); i++)
4325 {
4326 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4327 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4328 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4329 }
4330 }