auto-inc-dec.c, [...]: Fix comment typos.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 02110-1301, USA.
25 */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tm_p.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "regs.h"
37 #include "output.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
42 #include "sbitmap.h"
43 #include "bitmap.h"
44 #include "timevar.h"
45 #include "tree.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "df.h"
49 #include "tree-pass.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* The bitmap_obstack is used to hold some static variables that
66 should not be reset after each function is compiled. */
67
68 static bitmap_obstack persistent_obstack;
69
70 /* The set of hard registers in eliminables[i].from. */
71
72 static HARD_REG_SET elim_reg_set;
73
74 /* This is a bitmap copy of regs_invalidated_by_call so that we can
75 easily add it into bitmaps, etc. */
76
77 bitmap df_invalidated_by_call = NULL;
78
79 /* Initialize ur_in and ur_out as if all hard registers were partially
80 available. */
81
82 struct df_collection_rec
83 {
84 struct df_ref ** def_vec;
85 unsigned int next_def;
86 struct df_ref ** use_vec;
87 unsigned int next_use;
88 struct df_ref ** eq_use_vec;
89 unsigned int next_eq_use;
90 struct df_mw_hardreg **mw_vec;
91 unsigned int next_mw;
92 };
93
94 static struct df_ref * df_null_ref_rec[1];
95 static struct df_mw_hardreg * df_null_mw_rec[1];
96
97 static void df_ref_record (struct df_collection_rec *,
98 rtx, rtx *,
99 basic_block, rtx, enum df_ref_type,
100 enum df_ref_flags);
101 static void df_def_record_1 (struct df_collection_rec *,
102 rtx, basic_block, rtx,
103 enum df_ref_flags);
104 static void df_defs_record (struct df_collection_rec *,
105 rtx, basic_block, rtx,
106 enum df_ref_flags);
107 static void df_uses_record (struct df_collection_rec *,
108 rtx *, enum df_ref_type,
109 basic_block, rtx, enum df_ref_flags);
110
111 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
112 basic_block, rtx, enum df_ref_type,
113 enum df_ref_flags);
114
115 static void df_insn_refs_collect (struct df_collection_rec*,
116 basic_block, rtx);
117 static void df_canonize_collection_rec (struct df_collection_rec *);
118
119 static void df_get_regular_block_artificial_uses (bitmap);
120 static void df_get_eh_block_artificial_uses (bitmap);
121
122 static void df_record_entry_block_defs (bitmap);
123 static void df_record_exit_block_uses (bitmap);
124 static void df_get_exit_block_use_set (bitmap);
125 static void df_get_entry_block_def_set (bitmap);
126 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
127 static void df_ref_chain_delete_du_chain (struct df_ref **);
128 static void df_ref_chain_delete (struct df_ref **);
129
130 static void df_refs_add_to_chains (struct df_collection_rec *,
131 basic_block, rtx);
132
133 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
134 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
135 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
136 static void df_install_ref (struct df_ref *, struct df_reg_info *,
137 struct df_ref_info *, bool);
138
139 static int df_ref_compare (const void *, const void *);
140 static int df_mw_compare (const void *, const void *);
141
142 /* Indexed by hardware reg number, is true if that register is ever
143 used in the current function.
144
145 In df-scan.c, this is set up to record the hard regs used
146 explicitly. Reload adds in the hard regs used for holding pseudo
147 regs. Final uses it to generate the code in the function prologue
148 and epilogue to save and restore registers as needed. */
149
150 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
151 \f
152 /*----------------------------------------------------------------------------
153 SCANNING DATAFLOW PROBLEM
154
155 There are several ways in which scanning looks just like the other
156 dataflow problems. It shares the all the mechanisms for local info
157 as well as basic block info. Where it differs is when and how often
158 it gets run. It also has no need for the iterative solver.
159 ----------------------------------------------------------------------------*/
160
161 /* Problem data for the scanning dataflow function. */
162 struct df_scan_problem_data
163 {
164 alloc_pool ref_pool;
165 alloc_pool insn_pool;
166 alloc_pool reg_pool;
167 alloc_pool mw_reg_pool;
168 alloc_pool mw_link_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
171 };
172
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
174
175 static void
176 df_scan_free_internal (void)
177 {
178 struct df_scan_problem_data *problem_data
179 = (struct df_scan_problem_data *) df_scan->problem_data;
180
181 free (df->def_info.refs);
182 free (df->def_info.begin);
183 free (df->def_info.count);
184 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
185
186 free (df->use_info.refs);
187 free (df->use_info.begin);
188 free (df->use_info.count);
189 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
190
191 free (df->def_regs);
192 df->def_regs = NULL;
193 free (df->use_regs);
194 df->use_regs = NULL;
195 free (df->eq_use_regs);
196 df->eq_use_regs = NULL;
197 df->regs_size = 0;
198 DF_REG_SIZE(df) = 0;
199
200 free (df->insns);
201 df->insns = NULL;
202 DF_INSN_SIZE () = 0;
203
204 free (df_scan->block_info);
205 df_scan->block_info = NULL;
206 df_scan->block_info_size = 0;
207
208 BITMAP_FREE (df->hardware_regs_used);
209 BITMAP_FREE (df->regular_block_artificial_uses);
210 BITMAP_FREE (df->eh_block_artificial_uses);
211 BITMAP_FREE (df->entry_block_defs);
212 BITMAP_FREE (df->exit_block_uses);
213 BITMAP_FREE (df->insns_to_delete);
214 BITMAP_FREE (df->insns_to_rescan);
215 BITMAP_FREE (df->insns_to_notes_rescan);
216
217 free_alloc_pool (df_scan->block_pool);
218 free_alloc_pool (problem_data->ref_pool);
219 free_alloc_pool (problem_data->insn_pool);
220 free_alloc_pool (problem_data->reg_pool);
221 free_alloc_pool (problem_data->mw_reg_pool);
222 free_alloc_pool (problem_data->mw_link_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
226 }
227
228
229 /* Set basic block info. */
230
231 static void
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
234 {
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
238 }
239
240
241 /* Free basic block info. */
242
243 static void
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
245 {
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
248 if (bb_info)
249 {
250 rtx insn;
251 FOR_BB_INSNS (bb, insn)
252 {
253 if (INSN_P (insn))
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
256 }
257
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
260
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
269 }
270 }
271
272
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
275 be rescanned. */
276 void
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
278 {
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
282 basic_block bb;
283
284 /* Given the number of pools, this is really faster than tearing
285 everything apart. */
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
288
289 df_scan->block_pool
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
292 block_size);
293
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
297
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->insn_pool
302 = create_alloc_pool ("df_scan_insn pool",
303 sizeof (struct df_insn_info), block_size);
304 problem_data->reg_pool
305 = create_alloc_pool ("df_scan_reg pool",
306 sizeof (struct df_reg_info), block_size);
307 problem_data->mw_reg_pool
308 = create_alloc_pool ("df_scan_mw_reg pool",
309 sizeof (struct df_mw_hardreg), block_size);
310 problem_data->mw_link_pool
311 = create_alloc_pool ("df_scan_mw_link pool",
312 sizeof (struct df_link), block_size);
313
314 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
315 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
316
317 insn_num += insn_num / 4;
318 df_grow_reg_info ();
319
320 df_grow_insn_info ();
321 df_grow_bb_info (df_scan);
322
323 FOR_ALL_BB (bb)
324 {
325 unsigned int bb_index = bb->index;
326 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
327 if (!bb_info)
328 {
329 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
330 df_scan_set_bb_info (bb_index, bb_info);
331 }
332 bb_info->artificial_defs = NULL;
333 bb_info->artificial_uses = NULL;
334 }
335
336 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
343 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 }
345
346
347 /* Free all of the data associated with the scan problem. */
348
349 static void
350 df_scan_free (void)
351 {
352 if (df_scan->problem_data)
353 df_scan_free_internal ();
354
355 if (df->blocks_to_analyze)
356 {
357 BITMAP_FREE (df->blocks_to_analyze);
358 df->blocks_to_analyze = NULL;
359 }
360
361 free (df_scan);
362 }
363
364 /* Dump the preamble for DF_SCAN dump. */
365 static void
366 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
367 {
368 int i;
369
370 fprintf (file, ";; invalidated by call \t");
371 df_print_regset (file, df_invalidated_by_call);
372 fprintf (file, ";; hardware regs used \t");
373 df_print_regset (file, df->hardware_regs_used);
374 fprintf (file, ";; regular block artificial uses \t");
375 df_print_regset (file, df->regular_block_artificial_uses);
376 fprintf (file, ";; eh block artificial uses \t");
377 df_print_regset (file, df->eh_block_artificial_uses);
378 fprintf (file, ";; entry block defs \t");
379 df_print_regset (file, df->entry_block_defs);
380 fprintf (file, ";; exit block uses \t");
381 df_print_regset (file, df->exit_block_uses);
382 fprintf (file, ";; regs ever live \t");
383 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
384 if (df_regs_ever_live_p (i))
385 fprintf (file, " %d[%s]", i, reg_names[i]);
386
387 fprintf (file, "\n");
388 }
389
390 /* Dump the bb_info for a given basic block. */
391 static void
392 df_scan_start_block (basic_block bb, FILE *file)
393 {
394 struct df_scan_bb_info *bb_info
395 = df_scan_get_bb_info (bb->index);
396
397 if (bb_info)
398 {
399 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
400 df_refs_chain_dump (bb_info->artificial_defs, true, file);
401 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
402 df_refs_chain_dump (bb_info->artificial_uses, true, file);
403 fprintf (file, "\n");
404 }
405 #if 0
406 {
407 rtx insn;
408 FOR_BB_INSNS (bb, insn)
409 if (INSN_P (insn))
410 df_insn_debug (insn, false, file);
411 }
412 #endif
413 }
414
415 static struct df_problem problem_SCAN =
416 {
417 DF_SCAN, /* Problem id. */
418 DF_NONE, /* Direction. */
419 df_scan_alloc, /* Allocate the problem specific data. */
420 NULL, /* Reset global information. */
421 df_scan_free_bb_info, /* Free basic block info. */
422 NULL, /* Local compute function. */
423 NULL, /* Init the solution specific data. */
424 NULL, /* Iterative solver. */
425 NULL, /* Confluence operator 0. */
426 NULL, /* Confluence operator n. */
427 NULL, /* Transfer function. */
428 NULL, /* Finalize function. */
429 df_scan_free, /* Free all of the problem information. */
430 NULL, /* Remove this problem from the stack of dataflow problems. */
431 df_scan_start_dump, /* Debugging. */
432 df_scan_start_block, /* Debugging start block. */
433 NULL, /* Debugging end block. */
434 NULL, /* Incremental solution verify start. */
435 NULL, /* Incremental solution verfiy end. */
436 NULL, /* Dependent problem. */
437 TV_DF_SCAN /* Timing variable. */
438 };
439
440
441 /* Create a new DATAFLOW instance and add it to an existing instance
442 of DF. The returned structure is what is used to get at the
443 solution. */
444
445 void
446 df_scan_add_problem (void)
447 {
448 df_add_problem (&problem_SCAN);
449 }
450
451 \f
452 /*----------------------------------------------------------------------------
453 Storage Allocation Utilities
454 ----------------------------------------------------------------------------*/
455
456
457 /* First, grow the reg_info information. If the current size is less than
458 the number of psuedos, grow to 25% more than the number of
459 pseudos.
460
461 Second, assure that all of the slots up to max_reg_num have been
462 filled with reg_info structures. */
463
464 void
465 df_grow_reg_info (void)
466 {
467 unsigned int max_reg = max_reg_num ();
468 unsigned int new_size = max_reg;
469 struct df_scan_problem_data *problem_data
470 = (struct df_scan_problem_data *) df_scan->problem_data;
471 unsigned int i;
472
473 if (df->regs_size < new_size)
474 {
475 new_size += new_size / 4;
476 df->def_regs = xrealloc (df->def_regs,
477 new_size *sizeof (struct df_reg_info*));
478 df->use_regs = xrealloc (df->use_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->eq_use_regs = xrealloc (df->eq_use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->def_info.begin = xrealloc (df->def_info.begin,
483 new_size *sizeof (int));
484 df->def_info.count = xrealloc (df->def_info.count,
485 new_size *sizeof (int));
486 df->use_info.begin = xrealloc (df->use_info.begin,
487 new_size *sizeof (int));
488 df->use_info.count = xrealloc (df->use_info.count,
489 new_size *sizeof (int));
490 df->regs_size = new_size;
491 }
492
493 for (i = df->regs_inited; i < max_reg; i++)
494 {
495 struct df_reg_info *reg_info;
496
497 reg_info = pool_alloc (problem_data->reg_pool);
498 memset (reg_info, 0, sizeof (struct df_reg_info));
499 df->def_regs[i] = reg_info;
500 reg_info = pool_alloc (problem_data->reg_pool);
501 memset (reg_info, 0, sizeof (struct df_reg_info));
502 df->use_regs[i] = reg_info;
503 reg_info = pool_alloc (problem_data->reg_pool);
504 memset (reg_info, 0, sizeof (struct df_reg_info));
505 df->eq_use_regs[i] = reg_info;
506 df->def_info.begin[i] = 0;
507 df->def_info.count[i] = 0;
508 df->use_info.begin[i] = 0;
509 df->use_info.count[i] = 0;
510 }
511
512 df->regs_inited = max_reg;
513 }
514
515
516 /* Grow the ref information. */
517
518 static void
519 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
520 {
521 if (ref_info->refs_size < new_size)
522 {
523 ref_info->refs = xrealloc (ref_info->refs,
524 new_size *sizeof (struct df_ref *));
525 memset (ref_info->refs + ref_info->refs_size, 0,
526 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
527 ref_info->refs_size = new_size;
528 }
529 }
530
531
532 /* Check and grow the ref information if necessary. This routine
533 guarantees total_size + BITMAP_ADDEND amount of entries in refs
534 array. It updates ref_info->refs_size only and does not change
535 ref_info->total_size. */
536
537 static void
538 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
539 unsigned bitmap_addend)
540 {
541 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
542 {
543 int new_size = ref_info->total_size + bitmap_addend;
544 new_size += ref_info->total_size / 4;
545 df_grow_ref_info (ref_info, new_size);
546 }
547 }
548
549
550 /* Grow the ref information. If the current size is less than the
551 number of instructions, grow to 25% more than the number of
552 instructions. */
553
554 void
555 df_grow_insn_info (void)
556 {
557 unsigned int new_size = get_max_uid () + 1;
558 if (DF_INSN_SIZE () < new_size)
559 {
560 new_size += new_size / 4;
561 df->insns = xrealloc (df->insns,
562 new_size *sizeof (struct df_insn_info *));
563 memset (df->insns + df->insns_size, 0,
564 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
565 DF_INSN_SIZE () = new_size;
566 }
567 }
568
569
570
571 \f
572 /*----------------------------------------------------------------------------
573 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
574 ----------------------------------------------------------------------------*/
575
576 /* Rescan all of the block_to_analyze or all of the blocks in the
577 function if df_set_blocks if blocks_to_analyze is NULL; */
578
579 void
580 df_scan_blocks (void)
581 {
582 basic_block bb;
583
584 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
585 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
586
587 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
588 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
589
590 bitmap_ior_into (df->eh_block_artificial_uses,
591 df->regular_block_artificial_uses);
592
593 /* ENTRY and EXIT blocks have special defs/uses. */
594 df_get_entry_block_def_set (df->entry_block_defs);
595 df_record_entry_block_defs (df->entry_block_defs);
596 df_get_exit_block_use_set (df->exit_block_uses);
597 df_record_exit_block_uses (df->exit_block_uses);
598 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
599 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
600
601 /* Regular blocks */
602 FOR_EACH_BB (bb)
603 {
604 unsigned int bb_index = bb->index;
605 df_bb_refs_record (bb_index, true);
606 }
607 }
608
609
610 /* Create a new ref of type DF_REF_TYPE for register REG at address
611 LOC within INSN of BB. */
612
613 struct df_ref *
614 df_ref_create (rtx reg, rtx *loc, rtx insn,
615 basic_block bb,
616 enum df_ref_type ref_type,
617 enum df_ref_flags ref_flags)
618 {
619 struct df_ref *ref;
620 struct df_reg_info **reg_info;
621 struct df_ref_info *ref_info;
622 struct df_ref **ref_rec;
623 struct df_ref ***ref_rec_ptr;
624 unsigned int count = 0;
625 bool add_to_table;
626
627 df_grow_reg_info ();
628
629 /* You cannot hack artificial refs. */
630 gcc_assert (insn);
631 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
632 ref_type, ref_flags);
633
634 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
635 {
636 reg_info = df->def_regs;
637 ref_info = &df->def_info;
638 ref_rec_ptr = &DF_INSN_DEFS (insn);
639 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
640 }
641 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
642 {
643 reg_info = df->eq_use_regs;
644 ref_info = &df->use_info;
645 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
646 switch (ref_info->ref_order)
647 {
648 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
649 case DF_REF_ORDER_BY_REG_WITH_NOTES:
650 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
651 add_to_table = true;
652 break;
653 default:
654 add_to_table = false;
655 break;
656 }
657 }
658 else
659 {
660 reg_info = df->use_regs;
661 ref_info = &df->use_info;
662 ref_rec_ptr = &DF_INSN_USES (insn);
663 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
664 }
665
666 /* Do not add if ref is not in the right blocks. */
667 if (add_to_table && df->analyze_subset)
668 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
669
670 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
671
672 if (add_to_table)
673 switch (ref_info->ref_order)
674 {
675 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
676 case DF_REF_ORDER_BY_REG_WITH_NOTES:
677 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
678 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
679 break;
680 default:
681 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
682 break;
683 }
684
685 ref_rec = *ref_rec_ptr;
686 while (*ref_rec)
687 {
688 count++;
689 ref_rec++;
690 }
691
692 ref_rec = *ref_rec_ptr;
693 if (count)
694 {
695 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
696 *ref_rec_ptr = ref_rec;
697 ref_rec[count] = ref;
698 ref_rec[count+1] = NULL;
699 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
700 }
701 else
702 {
703 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
704 ref_rec[0] = ref;
705 ref_rec[1] = NULL;
706 *ref_rec_ptr = ref_rec;
707 }
708
709 #if 0
710 if (dump_file)
711 {
712 fprintf (dump_file, "adding ref ");
713 df_ref_debug (ref, dump_file);
714 }
715 #endif
716 /* By adding the ref directly, df_insn_rescan my not find any
717 differences even though the block will have changed. So we need
718 to mark the block dirty ourselves. */
719 df_set_bb_dirty (bb);
720
721 return ref;
722 }
723
724
725 \f
726 /*----------------------------------------------------------------------------
727 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
728 ----------------------------------------------------------------------------*/
729
730
731 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
732 Also delete the def-use or use-def chain if it exists. */
733
734 static void
735 df_reg_chain_unlink (struct df_ref *ref)
736 {
737 struct df_ref *next = DF_REF_NEXT_REG (ref);
738 struct df_ref *prev = DF_REF_PREV_REG (ref);
739 struct df_scan_problem_data *problem_data
740 = (struct df_scan_problem_data *) df_scan->problem_data;
741 int id = DF_REF_ID (ref);
742 struct df_reg_info *reg_info;
743 struct df_ref **refs = NULL;
744
745 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
746 {
747 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
748 refs = df->def_info.refs;
749 }
750 else
751 {
752 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
753 {
754 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
755 switch (df->use_info.ref_order)
756 {
757 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
758 case DF_REF_ORDER_BY_REG_WITH_NOTES:
759 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
760 refs = df->use_info.refs;
761 break;
762 default:
763 break;
764 }
765 }
766 else
767 {
768 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
769 refs = df->use_info.refs;
770 }
771 }
772
773 if (refs)
774 {
775 if (df->analyze_subset)
776 {
777 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
778 refs[id] = NULL;
779 }
780 else
781 refs[id] = NULL;
782 }
783
784 /* Delete any def-use or use-def chains that start here. It is
785 possible that there is trash in this field. This happens for
786 insns that have been deleted when rescanning has been deferred
787 and the chain problem has also been deleted. The chain tear down
788 code skips deleted insns. */
789 if (df_chain && DF_REF_CHAIN (ref))
790 df_chain_unlink (ref);
791
792 reg_info->n_refs--;
793 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
794 {
795 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
796 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
797 }
798
799 /* Unlink from the reg chain. If there is no prev, this is the
800 first of the list. If not, just join the next and prev. */
801 if (prev)
802 DF_REF_NEXT_REG (prev) = next;
803 else
804 {
805 gcc_assert (reg_info->reg_chain == ref);
806 reg_info->reg_chain = next;
807 }
808 if (next)
809 DF_REF_PREV_REG (next) = prev;
810
811 pool_free (problem_data->ref_pool, ref);
812 }
813
814
815 /* Remove REF from VEC. */
816
817 static void
818 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
819 {
820 struct df_ref **vec = *vec_ptr;
821
822 if (vec[1])
823 {
824 while (*vec && *vec != ref)
825 vec++;
826
827 while (*vec)
828 {
829 *vec = *(vec+1);
830 vec++;
831 }
832 }
833 else
834 {
835 free (vec);
836 *vec_ptr = df_null_ref_rec;
837 }
838 }
839
840
841 /* Unlink REF from all def-use/use-def chains, etc. */
842
843 void
844 df_ref_remove (struct df_ref *ref)
845 {
846 #if 0
847 if (dump_file)
848 {
849 fprintf (dump_file, "removing ref ");
850 df_ref_debug (ref, dump_file);
851 }
852 #endif
853
854 if (DF_REF_REG_DEF_P (ref))
855 {
856 if (DF_REF_IS_ARTIFICIAL (ref))
857 {
858 struct df_scan_bb_info *bb_info
859 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
860 df_ref_compress_rec (&bb_info->artificial_defs, ref);
861 }
862 else
863 {
864 unsigned int uid = DF_REF_INSN_UID (ref);
865 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
866 df_ref_compress_rec (&insn_rec->defs, ref);
867 }
868 }
869 else
870 {
871 if (DF_REF_IS_ARTIFICIAL (ref))
872 {
873 struct df_scan_bb_info *bb_info
874 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
875 df_ref_compress_rec (&bb_info->artificial_uses, ref);
876 }
877 else
878 {
879 unsigned int uid = DF_REF_INSN_UID (ref);
880 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
881
882 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
883 df_ref_compress_rec (&insn_rec->eq_uses, ref);
884 else
885 df_ref_compress_rec (&insn_rec->uses, ref);
886 }
887 }
888
889 /* By deleting the ref directly, df_insn_rescan my not find any
890 differences even though the block will have changed. So we need
891 to mark the block dirty ourselves. */
892 df_set_bb_dirty (DF_REF_BB (ref));
893 df_reg_chain_unlink (ref);
894 }
895
896
897 /* Create the insn record for INSN. If there was one there, zero it
898 out. */
899
900 struct df_insn_info *
901 df_insn_create_insn_record (rtx insn)
902 {
903 struct df_scan_problem_data *problem_data
904 = (struct df_scan_problem_data *) df_scan->problem_data;
905 struct df_insn_info *insn_rec;
906
907 df_grow_insn_info ();
908 insn_rec = DF_INSN_GET (insn);
909 if (!insn_rec)
910 {
911 insn_rec = pool_alloc (problem_data->insn_pool);
912 DF_INSN_SET (insn, insn_rec);
913 }
914 memset (insn_rec, 0, sizeof (struct df_insn_info));
915 insn_rec->insn = insn;
916 return insn_rec;
917 }
918
919
920 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
921
922 static void
923 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
924 {
925 while (*ref_rec)
926 {
927 struct df_ref *ref = *ref_rec;
928 /* CHAIN is allocated by DF_CHAIN. So make sure to
929 pass df_scan instance for the problem. */
930 if (DF_REF_CHAIN (ref))
931 df_chain_unlink (ref);
932 ref_rec++;
933 }
934 }
935
936
937 /* Delete all refs in the ref chain. */
938
939 static void
940 df_ref_chain_delete (struct df_ref **ref_rec)
941 {
942 struct df_ref **start = ref_rec;
943 while (*ref_rec)
944 {
945 df_reg_chain_unlink (*ref_rec);
946 ref_rec++;
947 }
948
949 /* If the list is empty, it has a special shared element that is not
950 to be deleted. */
951 if (*start)
952 free (start);
953 }
954
955
956 /* Delete the hardreg chain. */
957
958 static void
959 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
960 {
961 struct df_scan_problem_data *problem_data;
962
963 if (!hardregs)
964 return;
965
966 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
967
968 while (*hardregs)
969 {
970 pool_free (problem_data->mw_reg_pool, *hardregs);
971 hardregs++;
972 }
973 }
974
975
976 /* Delete all of the refs information from INSN. BB must be passed in
977 except when called from df_process_deferred_rescans to mark the block
978 as dirty. */
979
980 void
981 df_insn_delete (basic_block bb, unsigned int uid)
982 {
983 struct df_insn_info *insn_info = NULL;
984 if (!df)
985 return;
986
987 df_grow_bb_info (df_scan);
988 df_grow_reg_info ();
989
990 /* The block must be marked as dirty now, rather than later as in
991 df_insn_rescan and df_notes_rescan because it may not be there at
992 rescanning time and the mark would blow up. */
993 if (bb)
994 df_set_bb_dirty (bb);
995
996 insn_info = DF_INSN_UID_SAFE_GET (uid);
997
998 /* The client has deferred rescanning. */
999 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1000 {
1001 if (insn_info)
1002 {
1003 bitmap_clear_bit (df->insns_to_rescan, uid);
1004 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1005 bitmap_set_bit (df->insns_to_delete, uid);
1006 }
1007 if (dump_file)
1008 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1009 return;
1010 }
1011
1012 if (dump_file)
1013 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1014
1015 bitmap_clear_bit (df->insns_to_delete, uid);
1016 bitmap_clear_bit (df->insns_to_rescan, uid);
1017 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1018 if (insn_info)
1019 {
1020 struct df_scan_problem_data *problem_data
1021 = (struct df_scan_problem_data *) df_scan->problem_data;
1022
1023 /* In general, notes do not have the insn_info fields
1024 initialized. However, combine deletes insns by changing them
1025 to notes. How clever. So we cannot just check if it is a
1026 valid insn before short circuiting this code, we need to see
1027 if we actually initialized it. */
1028 if (insn_info->defs)
1029 {
1030 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1031
1032 if (df_chain)
1033 {
1034 df_ref_chain_delete_du_chain (insn_info->defs);
1035 df_ref_chain_delete_du_chain (insn_info->uses);
1036 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1037 }
1038
1039 df_ref_chain_delete (insn_info->defs);
1040 df_ref_chain_delete (insn_info->uses);
1041 df_ref_chain_delete (insn_info->eq_uses);
1042 }
1043 pool_free (problem_data->insn_pool, insn_info);
1044 DF_INSN_UID_SET (uid, NULL);
1045 }
1046 }
1047
1048
1049 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1050
1051 static void
1052 df_free_collection_rec (struct df_collection_rec *collection_rec)
1053 {
1054 struct df_scan_problem_data *problem_data
1055 = (struct df_scan_problem_data *) df_scan->problem_data;
1056 struct df_ref **ref;
1057 struct df_mw_hardreg **mw;
1058
1059 if (collection_rec->def_vec)
1060 for (ref = collection_rec->def_vec; *ref; ref++)
1061 pool_free (problem_data->ref_pool, *ref);
1062 if (collection_rec->use_vec)
1063 for (ref = collection_rec->use_vec; *ref; ref++)
1064 pool_free (problem_data->ref_pool, *ref);
1065 if (collection_rec->eq_use_vec)
1066 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1067 pool_free (problem_data->ref_pool, *ref);
1068 if (collection_rec->mw_vec)
1069 for (mw = collection_rec->mw_vec; *mw; mw++)
1070 pool_free (problem_data->mw_reg_pool, *mw);
1071 }
1072
1073
1074 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1075
1076 bool
1077 df_insn_rescan (rtx insn)
1078 {
1079 unsigned int uid = INSN_UID (insn);
1080 struct df_insn_info *insn_info = NULL;
1081 basic_block bb = BLOCK_FOR_INSN (insn);
1082 struct df_collection_rec collection_rec;
1083 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1084 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1085 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1086 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1087
1088 if ((!df) || (!INSN_P (insn)))
1089 return false;
1090
1091 if (!bb)
1092 {
1093 if (dump_file)
1094 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1095 return false;
1096 }
1097
1098 /* The client has disabled rescanning and plans to do it itself. */
1099 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1100 return false;
1101
1102 df_grow_bb_info (df_scan);
1103 df_grow_reg_info ();
1104
1105 insn_info = DF_INSN_UID_SAFE_GET (uid);
1106
1107 /* The client has deferred rescanning. */
1108 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1109 {
1110 if (!insn_info)
1111 {
1112 insn_info = df_insn_create_insn_record (insn);
1113 insn_info->defs = df_null_ref_rec;
1114 insn_info->uses = df_null_ref_rec;
1115 insn_info->eq_uses = df_null_ref_rec;
1116 insn_info->mw_hardregs = df_null_mw_rec;
1117 }
1118 if (dump_file)
1119 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1120
1121 bitmap_clear_bit (df->insns_to_delete, uid);
1122 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1123 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1124 return false;
1125 }
1126
1127 bitmap_clear_bit (df->insns_to_delete, uid);
1128 bitmap_clear_bit (df->insns_to_rescan, uid);
1129 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1130 if (insn_info)
1131 {
1132 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1133 /* If there's no change, return false. */
1134 if (the_same)
1135 {
1136 df_free_collection_rec (&collection_rec);
1137 if (dump_file)
1138 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1139 return false;
1140 }
1141 if (dump_file)
1142 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1143
1144 /* There's change - we need to delete the existing info. */
1145 df_insn_delete (NULL, uid);
1146 df_insn_create_insn_record (insn);
1147 }
1148 else
1149 {
1150 df_insn_create_insn_record (insn);
1151 df_insn_refs_collect (&collection_rec, bb, insn);
1152 if (dump_file)
1153 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1154 }
1155
1156 df_refs_add_to_chains (&collection_rec, bb, insn);
1157 df_set_bb_dirty (bb);
1158 return true;
1159 }
1160
1161
1162 /* Rescan all of the insns in the function. Note that the artificial
1163 uses and defs are not touched. This function will destroy def-se
1164 or use-def chains. */
1165
1166 void
1167 df_insn_rescan_all (void)
1168 {
1169 bool no_insn_rescan = false;
1170 bool defer_insn_rescan = false;
1171 basic_block bb;
1172 bitmap_iterator bi;
1173 unsigned int uid;
1174 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1175
1176 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1177 {
1178 df_clear_flags (DF_NO_INSN_RESCAN);
1179 no_insn_rescan = true;
1180 }
1181
1182 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1183 {
1184 df_clear_flags (DF_DEFER_INSN_RESCAN);
1185 defer_insn_rescan = true;
1186 }
1187
1188 bitmap_copy (tmp, df->insns_to_delete);
1189 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1190 {
1191 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1192 if (insn_info)
1193 df_insn_delete (NULL, uid);
1194 }
1195
1196 BITMAP_FREE (tmp);
1197 bitmap_clear (df->insns_to_delete);
1198 bitmap_clear (df->insns_to_rescan);
1199 bitmap_clear (df->insns_to_notes_rescan);
1200
1201 FOR_EACH_BB (bb)
1202 {
1203 rtx insn;
1204 FOR_BB_INSNS (bb, insn)
1205 {
1206 df_insn_rescan (insn);
1207 }
1208 }
1209
1210 if (no_insn_rescan)
1211 df_set_flags (DF_NO_INSN_RESCAN);
1212 if (defer_insn_rescan)
1213 df_set_flags (DF_DEFER_INSN_RESCAN);
1214 }
1215
1216
1217 /* Process all of the deferred rescans or deletions. */
1218
1219 void
1220 df_process_deferred_rescans (void)
1221 {
1222 bool no_insn_rescan = false;
1223 bool defer_insn_rescan = false;
1224 bitmap_iterator bi;
1225 unsigned int uid;
1226 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1227
1228 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1229 {
1230 df_clear_flags (DF_NO_INSN_RESCAN);
1231 no_insn_rescan = true;
1232 }
1233
1234 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1235 {
1236 df_clear_flags (DF_DEFER_INSN_RESCAN);
1237 defer_insn_rescan = true;
1238 }
1239
1240 if (dump_file)
1241 fprintf (dump_file, "starting the processing of deferred insns\n");
1242
1243 bitmap_copy (tmp, df->insns_to_delete);
1244 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1245 {
1246 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1247 if (insn_info)
1248 df_insn_delete (NULL, uid);
1249 }
1250
1251 bitmap_copy (tmp, df->insns_to_rescan);
1252 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1253 {
1254 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1255 if (insn_info)
1256 df_insn_rescan (insn_info->insn);
1257 }
1258
1259 bitmap_copy (tmp, df->insns_to_notes_rescan);
1260 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1261 {
1262 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1263 if (insn_info)
1264 df_notes_rescan (insn_info->insn);
1265 }
1266
1267 if (dump_file)
1268 fprintf (dump_file, "ending the processing of deferred insns\n");
1269
1270 BITMAP_FREE (tmp);
1271 bitmap_clear (df->insns_to_delete);
1272 bitmap_clear (df->insns_to_rescan);
1273 bitmap_clear (df->insns_to_notes_rescan);
1274
1275 if (no_insn_rescan)
1276 df_set_flags (DF_NO_INSN_RESCAN);
1277 if (defer_insn_rescan)
1278 df_set_flags (DF_DEFER_INSN_RESCAN);
1279
1280 /* If someone changed regs_ever_live during this pass, fix up the
1281 entry and exit blocks. */
1282 if (df->redo_entry_and_exit)
1283 {
1284 df_update_entry_exit_and_calls ();
1285 df->redo_entry_and_exit = false;
1286 }
1287 }
1288
1289
1290 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1291 the uses if INCLUDE_USES. Include the eq_uses if
1292 INCLUDE_EQ_USES. */
1293
1294 static unsigned int
1295 df_count_refs (bool include_defs, bool include_uses,
1296 bool include_eq_uses)
1297 {
1298 unsigned int regno;
1299 int size = 0;
1300 unsigned int m = df->regs_inited;
1301
1302 for (regno = 0; regno < m; regno++)
1303 {
1304 if (include_defs)
1305 size += DF_REG_DEF_COUNT (regno);
1306 if (include_uses)
1307 size += DF_REG_USE_COUNT (regno);
1308 if (include_eq_uses)
1309 size += DF_REG_EQ_USE_COUNT (regno);
1310 }
1311 return size;
1312 }
1313
1314
1315 /* Take build ref table for either the uses or defs from the reg-use
1316 or reg-def chains. This version processes the refs in reg order
1317 which is likely to be best if processing the whole function. */
1318
1319 static void
1320 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1321 bool include_defs,
1322 bool include_uses,
1323 bool include_eq_uses)
1324 {
1325 unsigned int m = df->regs_inited;
1326 unsigned int regno;
1327 unsigned int offset = 0;
1328 unsigned int start;
1329
1330 if (df->changeable_flags & DF_NO_HARD_REGS)
1331 {
1332 start = FIRST_PSEUDO_REGISTER;
1333 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1334 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1335 }
1336 else
1337 start = 0;
1338
1339 ref_info->total_size
1340 = df_count_refs (include_defs, include_uses, include_eq_uses);
1341
1342 df_check_and_grow_ref_info (ref_info, 1);
1343
1344 for (regno = start; regno < m; regno++)
1345 {
1346 int count = 0;
1347 ref_info->begin[regno] = offset;
1348 if (include_defs)
1349 {
1350 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1351 while (ref)
1352 {
1353 ref_info->refs[offset] = ref;
1354 DF_REF_ID (ref) = offset++;
1355 count++;
1356 ref = DF_REF_NEXT_REG (ref);
1357 gcc_assert (offset < ref_info->refs_size);
1358 }
1359 }
1360 if (include_uses)
1361 {
1362 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1363 while (ref)
1364 {
1365 ref_info->refs[offset] = ref;
1366 DF_REF_ID (ref) = offset++;
1367 count++;
1368 ref = DF_REF_NEXT_REG (ref);
1369 gcc_assert (offset < ref_info->refs_size);
1370 }
1371 }
1372 if (include_eq_uses)
1373 {
1374 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1375 while (ref)
1376 {
1377 ref_info->refs[offset] = ref;
1378 DF_REF_ID (ref) = offset++;
1379 count++;
1380 ref = DF_REF_NEXT_REG (ref);
1381 gcc_assert (offset < ref_info->refs_size);
1382 }
1383 }
1384 ref_info->count[regno] = count;
1385 }
1386
1387 /* The bitmap size is not decremented when refs are deleted. So
1388 reset it now that we have squished out all of the empty
1389 slots. */
1390 ref_info->table_size = offset;
1391 }
1392
1393
1394 /* Take build ref table for either the uses or defs from the reg-use
1395 or reg-def chains. This version processes the refs in insn order
1396 which is likely to be best if processing some segment of the
1397 function. */
1398
1399 static void
1400 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1401 bool include_defs,
1402 bool include_uses,
1403 bool include_eq_uses)
1404 {
1405 bitmap_iterator bi;
1406 unsigned int bb_index;
1407 unsigned int m = df->regs_inited;
1408 unsigned int offset = 0;
1409 unsigned int r;
1410 unsigned int start
1411 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1412
1413 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1414 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1415
1416 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1417 df_check_and_grow_ref_info (ref_info, 1);
1418
1419 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1420 {
1421 basic_block bb = BASIC_BLOCK (bb_index);
1422 rtx insn;
1423 struct df_ref **ref_rec;
1424
1425 if (include_defs)
1426 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1427 {
1428 unsigned int regno = DF_REF_REGNO (*ref_rec);
1429 ref_info->count[regno]++;
1430 }
1431 if (include_uses)
1432 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1433 {
1434 unsigned int regno = DF_REF_REGNO (*ref_rec);
1435 ref_info->count[regno]++;
1436 }
1437
1438 FOR_BB_INSNS (bb, insn)
1439 {
1440 if (INSN_P (insn))
1441 {
1442 unsigned int uid = INSN_UID (insn);
1443
1444 if (include_defs)
1445 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1446 {
1447 unsigned int regno = DF_REF_REGNO (*ref_rec);
1448 ref_info->count[regno]++;
1449 }
1450 if (include_uses)
1451 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1452 {
1453 unsigned int regno = DF_REF_REGNO (*ref_rec);
1454 ref_info->count[regno]++;
1455 }
1456 if (include_eq_uses)
1457 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1458 {
1459 unsigned int regno = DF_REF_REGNO (*ref_rec);
1460 ref_info->count[regno]++;
1461 }
1462 }
1463 }
1464 }
1465
1466 for (r = start; r < m; r++)
1467 {
1468 ref_info->begin[r] = offset;
1469 offset += ref_info->count[r];
1470 ref_info->count[r] = 0;
1471 }
1472
1473 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1474 {
1475 basic_block bb = BASIC_BLOCK (bb_index);
1476 rtx insn;
1477 struct df_ref **ref_rec;
1478
1479 if (include_defs)
1480 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1481 {
1482 struct df_ref *ref = *ref_rec;
1483 unsigned int regno = DF_REF_REGNO (ref);
1484 if (regno >= start)
1485 {
1486 unsigned int id
1487 = ref_info->begin[regno] + ref_info->count[regno]++;
1488 DF_REF_ID (ref) = id;
1489 ref_info->refs[id] = ref;
1490 }
1491 }
1492 if (include_uses)
1493 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1494 {
1495 struct df_ref *ref = *ref_rec;
1496 unsigned int regno = DF_REF_REGNO (ref);
1497 if (regno >= start)
1498 {
1499 unsigned int id
1500 = ref_info->begin[regno] + ref_info->count[regno]++;
1501 DF_REF_ID (ref) = id;
1502 ref_info->refs[id] = ref;
1503 }
1504 }
1505
1506 FOR_BB_INSNS (bb, insn)
1507 {
1508 if (INSN_P (insn))
1509 {
1510 unsigned int uid = INSN_UID (insn);
1511
1512 if (include_defs)
1513 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1514 {
1515 struct df_ref *ref = *ref_rec;
1516 unsigned int regno = DF_REF_REGNO (ref);
1517 if (regno >= start)
1518 {
1519 unsigned int id
1520 = ref_info->begin[regno] + ref_info->count[regno]++;
1521 DF_REF_ID (ref) = id;
1522 ref_info->refs[id] = ref;
1523 }
1524 }
1525 if (include_uses)
1526 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1527 {
1528 struct df_ref *ref = *ref_rec;
1529 unsigned int regno = DF_REF_REGNO (ref);
1530 if (regno >= start)
1531 {
1532 unsigned int id
1533 = ref_info->begin[regno] + ref_info->count[regno]++;
1534 DF_REF_ID (ref) = id;
1535 ref_info->refs[id] = ref;
1536 }
1537 }
1538 if (include_eq_uses)
1539 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1540 {
1541 struct df_ref *ref = *ref_rec;
1542 unsigned int regno = DF_REF_REGNO (ref);
1543 if (regno >= start)
1544 {
1545 unsigned int id
1546 = ref_info->begin[regno] + ref_info->count[regno]++;
1547 DF_REF_ID (ref) = id;
1548 ref_info->refs[id] = ref;
1549 }
1550 }
1551 }
1552 }
1553 }
1554
1555 /* The bitmap size is not decremented when refs are deleted. So
1556 reset it now that we have squished out all of the empty
1557 slots. */
1558
1559 ref_info->table_size = offset;
1560 }
1561
1562 /* Take build ref table for either the uses or defs from the reg-use
1563 or reg-def chains. */
1564
1565 static void
1566 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1567 bool include_defs,
1568 bool include_uses,
1569 bool include_eq_uses)
1570 {
1571 if (df->analyze_subset)
1572 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1573 include_uses, include_eq_uses);
1574 else
1575 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1576 include_uses, include_eq_uses);
1577 }
1578
1579
1580 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1581 static unsigned int
1582 df_add_refs_to_table (unsigned int offset,
1583 struct df_ref_info *ref_info,
1584 struct df_ref **ref_vec)
1585 {
1586 while (*ref_vec)
1587 {
1588 struct df_ref *ref = *ref_vec;
1589 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1590 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1591 {
1592 ref_info->refs[offset] = ref;
1593 DF_REF_ID (*ref_vec) = offset++;
1594 }
1595 ref_vec++;
1596 }
1597 return offset;
1598 }
1599
1600
1601 /* Count the number of refs in all of the insns of BB. Include the
1602 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1603 eq_uses if INCLUDE_EQ_USES. */
1604
1605 static unsigned int
1606 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1607 struct df_ref_info *ref_info,
1608 bool include_defs, bool include_uses,
1609 bool include_eq_uses)
1610 {
1611 rtx insn;
1612
1613 if (include_defs)
1614 offset = df_add_refs_to_table (offset, ref_info,
1615 df_get_artificial_defs (bb->index));
1616 if (include_uses)
1617 offset = df_add_refs_to_table (offset, ref_info,
1618 df_get_artificial_uses (bb->index));
1619
1620 FOR_BB_INSNS (bb, insn)
1621 if (INSN_P (insn))
1622 {
1623 unsigned int uid = INSN_UID (insn);
1624 if (include_defs)
1625 offset = df_add_refs_to_table (offset, ref_info,
1626 DF_INSN_UID_DEFS (uid));
1627 if (include_uses)
1628 offset = df_add_refs_to_table (offset, ref_info,
1629 DF_INSN_UID_USES (uid));
1630 if (include_eq_uses)
1631 offset = df_add_refs_to_table (offset, ref_info,
1632 DF_INSN_UID_EQ_USES (uid));
1633 }
1634 return offset;
1635 }
1636
1637
1638 /* Organize the refs by insn into the table in REF_INFO. If
1639 blocks_to_analyze is defined, use that set, otherwise the entire
1640 program. Include the defs if INCLUDE_DEFS. Include the uses if
1641 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1642
1643 static void
1644 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1645 bool include_defs, bool include_uses,
1646 bool include_eq_uses)
1647 {
1648 basic_block bb;
1649 unsigned int offset = 0;
1650
1651 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1652 df_check_and_grow_ref_info (ref_info, 1);
1653 if (df->blocks_to_analyze)
1654 {
1655 bitmap_iterator bi;
1656 unsigned int index;
1657
1658 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1659 {
1660 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1661 include_defs, include_uses,
1662 include_eq_uses);
1663 }
1664
1665 ref_info->table_size = offset;
1666 }
1667 else
1668 {
1669 FOR_ALL_BB (bb)
1670 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1671 include_defs, include_uses,
1672 include_eq_uses);
1673 ref_info->table_size = offset;
1674 }
1675 }
1676
1677
1678 /* If the use refs in DF are not organized, reorganize them. */
1679
1680 void
1681 df_maybe_reorganize_use_refs (enum df_ref_order order)
1682 {
1683 if (order == df->use_info.ref_order)
1684 return;
1685
1686 switch (order)
1687 {
1688 case DF_REF_ORDER_BY_REG:
1689 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1690 break;
1691
1692 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1693 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1694 break;
1695
1696 case DF_REF_ORDER_BY_INSN:
1697 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1698 break;
1699
1700 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1701 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1702 break;
1703
1704 case DF_REF_ORDER_NO_TABLE:
1705 free (df->use_info.refs);
1706 df->use_info.refs = NULL;
1707 df->use_info.refs_size = 0;
1708 break;
1709
1710 case DF_REF_ORDER_UNORDERED:
1711 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1712 gcc_unreachable ();
1713 break;
1714 }
1715
1716 df->use_info.ref_order = order;
1717 }
1718
1719
1720 /* If the def refs in DF are not organized, reorganize them. */
1721
1722 void
1723 df_maybe_reorganize_def_refs (enum df_ref_order order)
1724 {
1725 if (order == df->def_info.ref_order)
1726 return;
1727
1728 switch (order)
1729 {
1730 case DF_REF_ORDER_BY_REG:
1731 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1732 break;
1733
1734 case DF_REF_ORDER_BY_INSN:
1735 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1736 break;
1737
1738 case DF_REF_ORDER_NO_TABLE:
1739 free (df->def_info.refs);
1740 df->def_info.refs = NULL;
1741 df->def_info.refs_size = 0;
1742 break;
1743
1744 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1745 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1746 case DF_REF_ORDER_UNORDERED:
1747 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1748 gcc_unreachable ();
1749 break;
1750 }
1751
1752 df->def_info.ref_order = order;
1753 }
1754
1755
1756 /* Change the BB of all refs in the ref chain to NEW_BB.
1757 Assumes that all refs in the chain have the same BB.
1758 If changed, return the original bb the chain belonged to
1759 (or .
1760 If no change, return NEW_BB.
1761 If something's wrong, it will return NULL. */
1762
1763 static basic_block
1764 df_ref_chain_change_bb (struct df_ref **ref_rec,
1765 basic_block old_bb,
1766 basic_block new_bb)
1767 {
1768 while (*ref_rec)
1769 {
1770 struct df_ref *ref = *ref_rec;
1771
1772 if (DF_REF_BB (ref) == new_bb)
1773 return new_bb;
1774 else
1775 {
1776 gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
1777 old_bb = DF_REF_BB (ref);
1778 DF_REF_BB (ref) = new_bb;
1779 }
1780 ref_rec++;
1781 }
1782
1783 return old_bb;
1784 }
1785
1786
1787 /* Change all of the basic block references in INSN to use the insn's
1788 current basic block. This function is called from routines that move
1789 instructions from one block to another. */
1790
1791 void
1792 df_insn_change_bb (rtx insn)
1793 {
1794 basic_block new_bb = BLOCK_FOR_INSN (insn);
1795 basic_block old_bb = NULL;
1796 struct df_insn_info *insn_info;
1797 unsigned int uid = INSN_UID (insn);
1798
1799 if (!df)
1800 return;
1801
1802 if (dump_file)
1803 fprintf (dump_file, "changing bb of uid %d\n", uid);
1804
1805 insn_info = DF_INSN_UID_SAFE_GET (uid);
1806 if (insn_info == NULL)
1807 {
1808 if (dump_file)
1809 fprintf (dump_file, " unscanned insn\n");
1810 df_insn_rescan (insn);
1811 return;
1812 }
1813
1814 if (!INSN_P (insn))
1815 return;
1816
1817 old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1818 if (old_bb == new_bb)
1819 return;
1820
1821 old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1822 if (old_bb == new_bb)
1823 return;
1824
1825 old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1826 if (old_bb == new_bb)
1827 return;
1828
1829 df_set_bb_dirty (new_bb);
1830 if (old_bb)
1831 {
1832 if (dump_file)
1833 fprintf (dump_file, " from %d to %d\n",
1834 old_bb->index, new_bb->index);
1835 df_set_bb_dirty (old_bb);
1836 }
1837 else
1838 if (dump_file)
1839 fprintf (dump_file, " to %d\n", new_bb->index);
1840 }
1841
1842
1843 /* Helper function for df_ref_change_reg_with_loc. */
1844
1845 static void
1846 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1847 int new_regno, rtx loc)
1848 {
1849 struct df_ref *the_ref = old->reg_chain;
1850
1851 while (the_ref)
1852 {
1853 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1854 {
1855 struct df_ref *next_ref = the_ref->next_reg;
1856 struct df_ref *prev_ref = the_ref->prev_reg;
1857 struct df_ref **ref_vec, **ref_vec_t;
1858 unsigned int count = 0;
1859
1860 DF_REF_REGNO (the_ref) = new_regno;
1861 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1862
1863 /* Pull the_ref out of the old regno chain. */
1864 if (prev_ref)
1865 prev_ref->next_reg = next_ref;
1866 else
1867 old->reg_chain = next_ref;
1868 if (next_ref)
1869 next_ref->prev_reg = prev_ref;
1870 old->n_refs--;
1871
1872 /* Put the ref into the new regno chain. */
1873 the_ref->prev_reg = NULL;
1874 the_ref->next_reg = new->reg_chain;
1875 if (new->reg_chain)
1876 new->reg_chain->prev_reg = the_ref;
1877 new->reg_chain = the_ref;
1878 new->n_refs++;
1879 df_set_bb_dirty (DF_REF_BB (the_ref));
1880
1881 /* Need to resort the record that the ref was in because the
1882 regno is a sorting key. First, find the right record. */
1883 if (DF_REF_IS_ARTIFICIAL (the_ref))
1884 {
1885 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1886 if (DF_REF_REG_DEF_P (the_ref))
1887 ref_vec = df_get_artificial_defs (bb_index);
1888 else
1889 ref_vec = df_get_artificial_uses (bb_index);
1890 }
1891 else
1892 {
1893 struct df_insn_info *insn_info
1894 = DF_INSN_GET (DF_REF_INSN (the_ref));
1895 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1896 ref_vec = insn_info->eq_uses;
1897 else
1898 ref_vec = insn_info->uses;
1899 if (dump_file)
1900 fprintf (dump_file, "changing reg in insn %d\n",
1901 INSN_UID (DF_REF_INSN (the_ref)));
1902 }
1903 ref_vec_t = ref_vec;
1904
1905 /* Find the length. */
1906 while (*ref_vec_t)
1907 {
1908 count++;
1909 ref_vec_t++;
1910 }
1911 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1912
1913 the_ref = next_ref;
1914 }
1915 else
1916 the_ref = the_ref->next_reg;
1917 }
1918 }
1919
1920
1921 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1922 NEW_REGNO. Refs that do not match LOC are not changed. This call
1923 is to support the SET_REGNO macro. */
1924
1925 void
1926 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1927 {
1928 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1929 return;
1930
1931 df_grow_reg_info ();
1932
1933 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1934 DF_REG_DEF_GET (new_regno), new_regno, loc);
1935 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1936 DF_REG_USE_GET (new_regno), new_regno, loc);
1937 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1938 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1939 }
1940
1941
1942 /* Delete the mw_hardregs that point into the eq_notes. */
1943
1944 static unsigned int
1945 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1946 {
1947 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1948 unsigned int deleted = 0;
1949 unsigned int count = 0;
1950 struct df_scan_problem_data *problem_data
1951 = (struct df_scan_problem_data *) df_scan->problem_data;
1952
1953 if (!*mw_vec)
1954 return 0;
1955
1956 while (*mw_vec)
1957 {
1958 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1959 {
1960 struct df_mw_hardreg **temp_vec = mw_vec;
1961
1962 pool_free (problem_data->mw_reg_pool, *mw_vec);
1963 temp_vec = mw_vec;
1964 /* Shove the remaining ones down one to fill the gap. While
1965 this looks n**2, it is highly unusual to have any mw regs
1966 in eq_notes and the chances of more than one are almost
1967 non existent. */
1968 while (*temp_vec)
1969 {
1970 *temp_vec = *(temp_vec + 1);
1971 temp_vec++;
1972 }
1973 deleted++;
1974 }
1975 else
1976 {
1977 mw_vec++;
1978 count++;
1979 }
1980 }
1981
1982 if (count == 0)
1983 {
1984 free (insn_info->mw_hardregs);
1985 insn_info->mw_hardregs = df_null_mw_rec;
1986 return 0;
1987 }
1988 return deleted;
1989 }
1990
1991
1992 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1993
1994 void
1995 df_notes_rescan (rtx insn)
1996 {
1997 struct df_insn_info *insn_info;
1998 unsigned int uid = INSN_UID (insn);
1999
2000 if (!df)
2001 return;
2002
2003 /* The client has disabled rescanning and plans to do it itself. */
2004 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2005 return;
2006
2007 df_grow_bb_info (df_scan);
2008 df_grow_reg_info ();
2009
2010 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2011
2012 /* The client has deferred rescanning. */
2013 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2014 {
2015 if (!insn_info)
2016 {
2017 insn_info = df_insn_create_insn_record (insn);
2018 insn_info->defs = df_null_ref_rec;
2019 insn_info->uses = df_null_ref_rec;
2020 insn_info->eq_uses = df_null_ref_rec;
2021 insn_info->mw_hardregs = df_null_mw_rec;
2022 }
2023
2024 bitmap_clear_bit (df->insns_to_delete, uid);
2025 /* If the insn is set to be rescanned, it does not need to also
2026 be notes rescanned. */
2027 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2028 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2029 return;
2030 }
2031
2032 bitmap_clear_bit (df->insns_to_delete, uid);
2033 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2034
2035 if (insn_info)
2036 {
2037 basic_block bb = BLOCK_FOR_INSN (insn);
2038 rtx note;
2039 struct df_collection_rec collection_rec;
2040 unsigned int num_deleted;
2041
2042 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2043 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2044 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2045
2046 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2047 df_ref_chain_delete (insn_info->eq_uses);
2048 insn_info->eq_uses = NULL;
2049
2050 /* Process REG_EQUIV/REG_EQUAL notes */
2051 for (note = REG_NOTES (insn); note;
2052 note = XEXP (note, 1))
2053 {
2054 switch (REG_NOTE_KIND (note))
2055 {
2056 case REG_EQUIV:
2057 case REG_EQUAL:
2058 df_uses_record (&collection_rec,
2059 &XEXP (note, 0), DF_REF_REG_USE,
2060 bb, insn, DF_REF_IN_NOTE);
2061 default:
2062 break;
2063 }
2064 }
2065
2066 /* Find some place to put any new mw_hardregs. */
2067 df_canonize_collection_rec (&collection_rec);
2068 if (collection_rec.next_mw)
2069 {
2070 unsigned int count = 0;
2071 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2072 while (*mw_rec)
2073 {
2074 count++;
2075 mw_rec++;
2076 }
2077
2078 if (count)
2079 {
2080 /* Append to the end of the existing record after
2081 expanding it if necessary. */
2082 if (collection_rec.next_mw > num_deleted)
2083 {
2084 insn_info->mw_hardregs =
2085 xrealloc (insn_info->mw_hardregs,
2086 (count + 1 + collection_rec.next_mw)
2087 * sizeof (struct df_ref*));
2088 }
2089 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2090 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2091 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2092 sizeof (struct df_mw_hardreg *), df_mw_compare);
2093 }
2094 else
2095 {
2096 /* No vector there. */
2097 insn_info->mw_hardregs
2098 = XNEWVEC (struct df_mw_hardreg*,
2099 count + 1 + collection_rec.next_mw);
2100 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2101 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2102 }
2103 }
2104 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2105 ignore it. */
2106 collection_rec.mw_vec = NULL;
2107 collection_rec.next_mw = 0;
2108 df_refs_add_to_chains (&collection_rec, bb, insn);
2109 }
2110 else
2111 df_insn_rescan (insn);
2112
2113 }
2114
2115 \f
2116 /*----------------------------------------------------------------------------
2117 Hard core instruction scanning code. No external interfaces here,
2118 just a lot of routines that look inside insns.
2119 ----------------------------------------------------------------------------*/
2120
2121
2122 /* Return true if the contents of two df_ref's are identical.
2123 It ignores DF_REF_MARKER. */
2124
2125 static bool
2126 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2127 {
2128 if (!ref2)
2129 return false;
2130 return (ref1 == ref2) ||
2131 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2132 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2133 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2134 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2135 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2136 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2137 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2138 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2139 }
2140
2141
2142 /* Compare REF1 and REF2 for sorting. This is only called from places
2143 where all of the refs are of the same type, in the same insn, and
2144 have the same bb. So these fields are not checked. */
2145
2146 static int
2147 df_ref_compare (const void *r1, const void *r2)
2148 {
2149 const struct df_ref *ref1 = *(struct df_ref **)r1;
2150 const struct df_ref *ref2 = *(struct df_ref **)r2;
2151
2152 if (ref1 == ref2)
2153 return 0;
2154
2155 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2156 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2157
2158 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2159 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2160
2161 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2162 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2163 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2164
2165 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2166 {
2167 /* If two refs are identical except that one of them has is from
2168 a mw and one is not, we need to have the one with the mw
2169 first. */
2170 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2171 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2172 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2173 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2174 return -1;
2175 else
2176 return 1;
2177 }
2178 return 0;
2179 }
2180
2181 static void
2182 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2183 {
2184 struct df_ref *tmp = ref_vec[i];
2185 ref_vec[i] = ref_vec[j];
2186 ref_vec[j] = tmp;
2187 }
2188
2189 /* Sort and compress a set of refs. */
2190
2191 static unsigned int
2192 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2193 {
2194 struct df_scan_problem_data *problem_data
2195 = (struct df_scan_problem_data *) df_scan->problem_data;
2196 unsigned int i;
2197 unsigned int dist = 0;
2198
2199 ref_vec[count] = NULL;
2200 /* If there are 1 or 0 elements, there is nothing to do. */
2201 if (count < 2)
2202 return count;
2203 else if (count == 2)
2204 {
2205 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2206 df_swap_refs (ref_vec, 0, 1);
2207 }
2208 else
2209 {
2210 for (i = 0; i < count - 1; i++)
2211 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2212 break;
2213 /* If the array is already strictly ordered,
2214 which is the most common case for large COUNT case
2215 (which happens for CALL INSNs),
2216 no need to sort and filter out duplicate.
2217 Simply return the count.
2218 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2219 of DF_REF_COMPARE. */
2220 if (i == count - 1)
2221 return count;
2222 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2223 }
2224
2225 for (i=0; i<count-dist; i++)
2226 {
2227 /* Find the next ref that is not equal to the current ref. */
2228 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2229 {
2230 pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
2231 dist++;
2232 }
2233 /* Copy it down to the next position. */
2234 if (dist)
2235 ref_vec[i+1] = ref_vec[i + dist + 1];
2236 }
2237
2238 count -= dist;
2239 ref_vec[count] = NULL;
2240 return count;
2241 }
2242
2243
2244 /* Return true if the contents of two df_ref's are identical.
2245 It ignores DF_REF_MARKER. */
2246
2247 static bool
2248 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2249 {
2250 if (!mw2)
2251 return false;
2252 return (mw1 == mw2) ||
2253 (mw1->mw_reg == mw2->mw_reg
2254 && mw1->type == mw2->type
2255 && mw1->flags == mw2->flags
2256 && mw1->start_regno == mw2->start_regno
2257 && mw1->end_regno == mw2->end_regno);
2258 }
2259
2260
2261 /* Compare MW1 and MW2 for sorting. */
2262
2263 static int
2264 df_mw_compare (const void *m1, const void *m2)
2265 {
2266 const struct df_mw_hardreg *mw1 = *(struct df_mw_hardreg **)m1;
2267 const struct df_mw_hardreg *mw2 = *(struct df_mw_hardreg **)m2;
2268
2269 if (mw1 == mw2)
2270 return 0;
2271
2272 if (mw1->type != mw2->type)
2273 return mw1->type - mw2->type;
2274
2275 if (mw1->flags != mw2->flags)
2276 return mw1->flags - mw2->flags;
2277
2278 if (mw1->start_regno != mw2->start_regno)
2279 return mw1->start_regno - mw2->start_regno;
2280
2281 if (mw1->end_regno != mw2->end_regno)
2282 return mw1->end_regno - mw2->end_regno;
2283
2284 if (mw1->mw_reg != mw2->mw_reg)
2285 return mw1->mw_order - mw2->mw_order;
2286
2287 return 0;
2288 }
2289
2290
2291 /* Sort and compress a set of refs. */
2292
2293 static unsigned int
2294 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2295 {
2296 struct df_scan_problem_data *problem_data
2297 = (struct df_scan_problem_data *) df_scan->problem_data;
2298 unsigned int i;
2299 unsigned int dist = 0;
2300 mw_vec[count] = NULL;
2301
2302 if (count < 2)
2303 return count;
2304 else if (count == 2)
2305 {
2306 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2307 {
2308 struct df_mw_hardreg *tmp = mw_vec[0];
2309 mw_vec[0] = mw_vec[1];
2310 mw_vec[1] = tmp;
2311 }
2312 }
2313 else
2314 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2315
2316 for (i=0; i<count-dist; i++)
2317 {
2318 /* Find the next ref that is not equal to the current ref. */
2319 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2320 {
2321 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2322 dist++;
2323 }
2324 /* Copy it down to the next position. */
2325 if (dist)
2326 mw_vec[i+1] = mw_vec[i + dist + 1];
2327 }
2328
2329 count -= dist;
2330 mw_vec[count] = NULL;
2331 return count;
2332 }
2333
2334
2335 /* Sort and remove duplicates from the COLLECTION_REC. */
2336
2337 static void
2338 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2339 {
2340 if (collection_rec->def_vec)
2341 collection_rec->next_def
2342 = df_sort_and_compress_refs (collection_rec->def_vec,
2343 collection_rec->next_def);
2344 if (collection_rec->use_vec)
2345 collection_rec->next_use
2346 = df_sort_and_compress_refs (collection_rec->use_vec,
2347 collection_rec->next_use);
2348 if (collection_rec->eq_use_vec)
2349 collection_rec->next_eq_use
2350 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2351 collection_rec->next_eq_use);
2352 if (collection_rec->mw_vec)
2353 collection_rec->next_mw
2354 = df_sort_and_compress_mws (collection_rec->mw_vec,
2355 collection_rec->next_mw);
2356 }
2357
2358
2359 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2360
2361 static void
2362 df_install_ref (struct df_ref *this_ref,
2363 struct df_reg_info *reg_info,
2364 struct df_ref_info *ref_info,
2365 bool add_to_table)
2366 {
2367 unsigned int regno = DF_REF_REGNO (this_ref);
2368 /* Add the ref to the reg_{def,use,eq_use} chain. */
2369 struct df_ref *head = reg_info->reg_chain;
2370
2371 reg_info->reg_chain = this_ref;
2372 reg_info->n_refs++;
2373
2374 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2375 {
2376 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2377 df->hard_regs_live_count[regno]++;
2378 }
2379
2380 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2381 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2382
2383 DF_REF_NEXT_REG (this_ref) = head;
2384
2385 /* We cannot actually link to the head of the chain. */
2386 DF_REF_PREV_REG (this_ref) = NULL;
2387
2388 if (head)
2389 DF_REF_PREV_REG (head) = this_ref;
2390
2391 if (add_to_table)
2392 {
2393 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2394 df_check_and_grow_ref_info (ref_info, 1);
2395 DF_REF_ID (this_ref) = ref_info->table_size;
2396 /* Add the ref to the big array of defs. */
2397 ref_info->refs[ref_info->table_size] = this_ref;
2398 ref_info->table_size++;
2399 }
2400 else
2401 DF_REF_ID (this_ref) = -1;
2402
2403 ref_info->total_size++;
2404 }
2405
2406
2407 /* This function takes one of the groups of refs (defs, uses or
2408 eq_uses) and installs the entire group into the insn. It also adds
2409 each of these refs into the appropriate chains. */
2410
2411 static struct df_ref **
2412 df_install_refs (basic_block bb,
2413 struct df_ref **old_vec, unsigned int count,
2414 struct df_reg_info **reg_info,
2415 struct df_ref_info *ref_info,
2416 bool is_notes)
2417 {
2418 if (count)
2419 {
2420 unsigned int i;
2421 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2422 bool add_to_table;
2423
2424 switch (ref_info->ref_order)
2425 {
2426 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2427 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2428 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2429 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2430 add_to_table = true;
2431 break;
2432 case DF_REF_ORDER_UNORDERED:
2433 case DF_REF_ORDER_BY_REG:
2434 case DF_REF_ORDER_BY_INSN:
2435 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2436 add_to_table = !is_notes;
2437 break;
2438 default:
2439 add_to_table = false;
2440 break;
2441 }
2442
2443 /* Do not add if ref is not in the right blocks. */
2444 if (add_to_table && df->analyze_subset)
2445 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2446
2447 for (i = 0; i < count; i++)
2448 {
2449 struct df_ref *this_ref = old_vec[i];
2450 new_vec[i] = this_ref;
2451 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2452 ref_info, add_to_table);
2453 }
2454
2455 new_vec[count] = NULL;
2456 return new_vec;
2457 }
2458 else
2459 return df_null_ref_rec;
2460 }
2461
2462
2463 /* This function takes the mws installs the entire group into the
2464 insn. */
2465
2466 static struct df_mw_hardreg **
2467 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2468 {
2469 if (count)
2470 {
2471 struct df_mw_hardreg **new_vec
2472 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2473 memcpy (new_vec, old_vec,
2474 sizeof (struct df_mw_hardreg*) * (count + 1));
2475 return new_vec;
2476 }
2477 else
2478 return df_null_mw_rec;
2479 }
2480
2481
2482 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2483 chains and update other necessary information. */
2484
2485 static void
2486 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2487 basic_block bb, rtx insn)
2488 {
2489 if (insn)
2490 {
2491 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2492 /* If there is a vector in the collection rec, add it to the
2493 insn. A null rec is a signal that the caller will handle the
2494 chain specially. */
2495 if (collection_rec->def_vec)
2496 {
2497 if (insn_rec->defs && *insn_rec->defs)
2498 free (insn_rec->defs);
2499 insn_rec->defs
2500 = df_install_refs (bb, collection_rec->def_vec,
2501 collection_rec->next_def,
2502 df->def_regs,
2503 &df->def_info, false);
2504 }
2505 if (collection_rec->use_vec)
2506 {
2507 if (insn_rec->uses && *insn_rec->uses)
2508 free (insn_rec->uses);
2509 insn_rec->uses
2510 = df_install_refs (bb, collection_rec->use_vec,
2511 collection_rec->next_use,
2512 df->use_regs,
2513 &df->use_info, false);
2514 }
2515 if (collection_rec->eq_use_vec)
2516 {
2517 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2518 free (insn_rec->eq_uses);
2519 insn_rec->eq_uses
2520 = df_install_refs (bb, collection_rec->eq_use_vec,
2521 collection_rec->next_eq_use,
2522 df->eq_use_regs,
2523 &df->use_info, true);
2524 }
2525 if (collection_rec->mw_vec)
2526 {
2527 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2528 free (insn_rec->mw_hardregs);
2529 insn_rec->mw_hardregs
2530 = df_install_mws (collection_rec->mw_vec,
2531 collection_rec->next_mw);
2532 }
2533 }
2534 else
2535 {
2536 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2537
2538 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2539 free (bb_info->artificial_defs);
2540 bb_info->artificial_defs
2541 = df_install_refs (bb, collection_rec->def_vec,
2542 collection_rec->next_def,
2543 df->def_regs,
2544 &df->def_info, false);
2545 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2546 free (bb_info->artificial_uses);
2547 bb_info->artificial_uses
2548 = df_install_refs (bb, collection_rec->use_vec,
2549 collection_rec->next_use,
2550 df->use_regs,
2551 &df->use_info, false);
2552 }
2553 }
2554
2555
2556 /* Allocate a ref and initialize its fields. */
2557
2558 static struct df_ref *
2559 df_ref_create_structure (struct df_collection_rec *collection_rec,
2560 rtx reg, rtx *loc,
2561 basic_block bb, rtx insn,
2562 enum df_ref_type ref_type,
2563 enum df_ref_flags ref_flags)
2564 {
2565 struct df_ref *this_ref;
2566 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2567 struct df_scan_problem_data *problem_data
2568 = (struct df_scan_problem_data *) df_scan->problem_data;
2569
2570 this_ref = pool_alloc (problem_data->ref_pool);
2571 DF_REF_ID (this_ref) = -1;
2572 DF_REF_REG (this_ref) = reg;
2573 DF_REF_REGNO (this_ref) = regno;
2574 DF_REF_LOC (this_ref) = loc;
2575 DF_REF_INSN (this_ref) = insn;
2576 DF_REF_CHAIN (this_ref) = NULL;
2577 DF_REF_TYPE (this_ref) = ref_type;
2578 DF_REF_FLAGS (this_ref) = ref_flags;
2579 DF_REF_BB (this_ref) = bb;
2580 DF_REF_NEXT_REG (this_ref) = NULL;
2581 DF_REF_PREV_REG (this_ref) = NULL;
2582 DF_REF_ORDER (this_ref) = df->ref_order++;
2583
2584 /* We need to clear this bit because fwprop, and in the future
2585 possibly other optimizations sometimes create new refs using ond
2586 refs as the model. */
2587 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2588
2589 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2590 if ((regno < FIRST_PSEUDO_REGISTER)
2591 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2592 {
2593 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2594 {
2595 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2596 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2597 }
2598 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2599 && (regno == FRAME_POINTER_REGNUM
2600 || regno == ARG_POINTER_REGNUM)))
2601 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2602 }
2603
2604 if (collection_rec)
2605 {
2606 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2607 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2608 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2609 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2610 else
2611 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2612 }
2613
2614 return this_ref;
2615 }
2616
2617
2618 /* Create new references of type DF_REF_TYPE for each part of register REG
2619 at address LOC within INSN of BB. */
2620
2621 static void
2622 df_ref_record (struct df_collection_rec *collection_rec,
2623 rtx reg, rtx *loc,
2624 basic_block bb, rtx insn,
2625 enum df_ref_type ref_type,
2626 enum df_ref_flags ref_flags)
2627 {
2628 rtx oldreg = reg;
2629 unsigned int regno;
2630
2631 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2632
2633 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2634 if (regno < FIRST_PSEUDO_REGISTER)
2635 {
2636 struct df_mw_hardreg *hardreg = NULL;
2637 struct df_scan_problem_data *problem_data
2638 = (struct df_scan_problem_data *) df_scan->problem_data;
2639 unsigned int i;
2640 unsigned int endregno;
2641 struct df_ref *ref;
2642
2643 if (GET_CODE (reg) == SUBREG)
2644 {
2645 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2646 SUBREG_BYTE (reg), GET_MODE (reg));
2647 endregno = regno + subreg_nregs (reg);
2648 }
2649 else
2650 endregno = END_HARD_REGNO (reg);
2651
2652 /* If this is a multiword hardreg, we create some extra
2653 datastructures that will enable us to easily build REG_DEAD
2654 and REG_UNUSED notes. */
2655 if ((endregno != regno + 1) && insn)
2656 {
2657 /* Sets to a subreg of a multiword register are partial.
2658 Sets to a non-subreg of a multiword register are not. */
2659 if (GET_CODE (oldreg) == SUBREG)
2660 ref_flags |= DF_REF_PARTIAL;
2661 ref_flags |= DF_REF_MW_HARDREG;
2662
2663 hardreg = pool_alloc (problem_data->mw_reg_pool);
2664 hardreg->type = ref_type;
2665 hardreg->flags = ref_flags;
2666 hardreg->mw_reg = reg;
2667 hardreg->loc = loc;
2668 hardreg->start_regno = regno;
2669 hardreg->end_regno = endregno - 1;
2670 hardreg->mw_order = df->ref_order++;
2671 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2672 }
2673
2674 for (i = regno; i < endregno; i++)
2675 {
2676 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2677 bb, insn, ref_type, ref_flags);
2678
2679 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2680 }
2681 }
2682 else
2683 {
2684 struct df_ref *ref;
2685 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2686 ref_type, ref_flags);
2687 }
2688 }
2689
2690
2691 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2692 covered by the outer mode is smaller than that covered by the inner mode,
2693 is a read-modify-write operation.
2694 This function returns true iff the SUBREG X is such a SUBREG. */
2695
2696 bool
2697 df_read_modify_subreg_p (rtx x)
2698 {
2699 unsigned int isize, osize;
2700 if (GET_CODE (x) != SUBREG)
2701 return false;
2702 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2703 osize = GET_MODE_SIZE (GET_MODE (x));
2704 return (isize > osize && isize > UNITS_PER_WORD);
2705 }
2706
2707
2708 /* Process all the registers defined in the rtx, X.
2709 Autoincrement/decrement definitions will be picked up by
2710 df_uses_record. */
2711
2712 static void
2713 df_def_record_1 (struct df_collection_rec *collection_rec,
2714 rtx x, basic_block bb, rtx insn,
2715 enum df_ref_flags flags)
2716 {
2717 rtx *loc;
2718 rtx dst;
2719 bool dst_in_strict_lowpart = false;
2720
2721 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2722 construct. */
2723 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2724 loc = &XEXP (x, 0);
2725 else
2726 loc = &SET_DEST (x);
2727 dst = *loc;
2728
2729 /* It is legal to have a set destination be a parallel. */
2730 if (GET_CODE (dst) == PARALLEL)
2731 {
2732 int i;
2733
2734 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2735 {
2736 rtx temp = XVECEXP (dst, 0, i);
2737 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2738 || GET_CODE (temp) == SET)
2739 df_def_record_1 (collection_rec,
2740 temp, bb, insn,
2741 GET_CODE (temp) == CLOBBER
2742 ? flags | DF_REF_MUST_CLOBBER : flags);
2743 }
2744 return;
2745 }
2746
2747 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
2748 be handy for the reg allocator. */
2749 while (GET_CODE (dst) == STRICT_LOW_PART
2750 || GET_CODE (dst) == ZERO_EXTRACT
2751 || df_read_modify_subreg_p (dst))
2752 {
2753 #if 0
2754 /* Strict low part always contains SUBREG, but we do not want to make
2755 it appear outside, as whole register is always considered. */
2756 if (GET_CODE (dst) == STRICT_LOW_PART)
2757 {
2758 loc = &XEXP (dst, 0);
2759 dst = *loc;
2760 }
2761 #endif
2762 loc = &XEXP (dst, 0);
2763 if (GET_CODE (dst) == STRICT_LOW_PART)
2764 dst_in_strict_lowpart = true;
2765 dst = *loc;
2766 flags |= DF_REF_READ_WRITE;
2767
2768 }
2769
2770 /* Sets to a subreg of a single word register are partial sets if
2771 they are wrapped in a strict lowpart, and not partial otherwise.
2772 */
2773 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
2774 && dst_in_strict_lowpart)
2775 flags |= DF_REF_PARTIAL;
2776
2777 if (REG_P (dst)
2778 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
2779 df_ref_record (collection_rec,
2780 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2781 }
2782
2783
2784 /* Process all the registers defined in the pattern rtx, X. */
2785
2786 static void
2787 df_defs_record (struct df_collection_rec *collection_rec,
2788 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2789 {
2790 RTX_CODE code = GET_CODE (x);
2791
2792 if (code == SET || code == CLOBBER)
2793 {
2794 /* Mark the single def within the pattern. */
2795 enum df_ref_flags clobber_flags = flags;
2796 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2797 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2798 }
2799 else if (code == COND_EXEC)
2800 {
2801 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2802 bb, insn, DF_REF_CONDITIONAL);
2803 }
2804 else if (code == PARALLEL)
2805 {
2806 int i;
2807
2808 /* Mark the multiple defs within the pattern. */
2809 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2810 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2811 }
2812 }
2813
2814
2815 /* Process all the registers used in the rtx at address LOC. */
2816
2817 static void
2818 df_uses_record (struct df_collection_rec *collection_rec,
2819 rtx *loc, enum df_ref_type ref_type,
2820 basic_block bb, rtx insn, enum df_ref_flags flags)
2821 {
2822 RTX_CODE code;
2823 rtx x;
2824
2825 retry:
2826 x = *loc;
2827 if (!x)
2828 return;
2829 code = GET_CODE (x);
2830 switch (code)
2831 {
2832 case LABEL_REF:
2833 case SYMBOL_REF:
2834 case CONST_INT:
2835 case CONST:
2836 case CONST_DOUBLE:
2837 case CONST_VECTOR:
2838 case PC:
2839 case CC0:
2840 case ADDR_VEC:
2841 case ADDR_DIFF_VEC:
2842 return;
2843
2844 case CLOBBER:
2845 /* If we are clobbering a MEM, mark any registers inside the address
2846 as being used. */
2847 if (MEM_P (XEXP (x, 0)))
2848 df_uses_record (collection_rec,
2849 &XEXP (XEXP (x, 0), 0),
2850 DF_REF_REG_MEM_STORE, bb, insn, flags);
2851
2852 /* If we're clobbering a REG then we have a def so ignore. */
2853 return;
2854
2855 case MEM:
2856 df_uses_record (collection_rec,
2857 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2858 bb, insn, flags & DF_REF_IN_NOTE);
2859 return;
2860
2861 case SUBREG:
2862 /* While we're here, optimize this case. */
2863 flags |= DF_REF_PARTIAL;
2864 /* In case the SUBREG is not of a REG, do not optimize. */
2865 if (!REG_P (SUBREG_REG (x)))
2866 {
2867 loc = &SUBREG_REG (x);
2868 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
2869 return;
2870 }
2871 /* ... Fall through ... */
2872
2873 case REG:
2874 df_ref_record (collection_rec,
2875 x, loc, bb, insn, ref_type, flags);
2876 return;
2877
2878 case SET:
2879 {
2880 rtx dst = SET_DEST (x);
2881 gcc_assert (!(flags & DF_REF_IN_NOTE));
2882 df_uses_record (collection_rec,
2883 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
2884
2885 switch (GET_CODE (dst))
2886 {
2887 case SUBREG:
2888 if (df_read_modify_subreg_p (dst))
2889 {
2890 df_uses_record (collection_rec, &SUBREG_REG (dst),
2891 DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
2892 break;
2893 }
2894 /* Fall through. */
2895 case REG:
2896 case PARALLEL:
2897 case SCRATCH:
2898 case PC:
2899 case CC0:
2900 break;
2901 case MEM:
2902 df_uses_record (collection_rec, &XEXP (dst, 0),
2903 DF_REF_REG_MEM_STORE, bb, insn, flags);
2904 break;
2905 case STRICT_LOW_PART:
2906 {
2907 rtx *temp = &XEXP (dst, 0);
2908 /* A strict_low_part uses the whole REG and not just the
2909 SUBREG. */
2910 dst = XEXP (dst, 0);
2911 df_uses_record (collection_rec,
2912 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2913 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2914 }
2915 break;
2916 case ZERO_EXTRACT:
2917 case SIGN_EXTRACT:
2918 df_uses_record (collection_rec, &XEXP (dst, 0),
2919 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2920 df_uses_record (collection_rec, &XEXP (dst, 1),
2921 DF_REF_REG_USE, bb, insn, flags);
2922 df_uses_record (collection_rec, &XEXP (dst, 2),
2923 DF_REF_REG_USE, bb, insn, flags);
2924 dst = XEXP (dst, 0);
2925 break;
2926 default:
2927 gcc_unreachable ();
2928 }
2929 return;
2930 }
2931
2932 case RETURN:
2933 break;
2934
2935 case ASM_OPERANDS:
2936 case UNSPEC_VOLATILE:
2937 case TRAP_IF:
2938 case ASM_INPUT:
2939 {
2940 /* Traditional and volatile asm instructions must be
2941 considered to use and clobber all hard registers, all
2942 pseudo-registers and all of memory. So must TRAP_IF and
2943 UNSPEC_VOLATILE operations.
2944
2945 Consider for instance a volatile asm that changes the fpu
2946 rounding mode. An insn should not be moved across this
2947 even if it only uses pseudo-regs because it might give an
2948 incorrectly rounded result.
2949
2950 However, flow.c's liveness computation did *not* do this,
2951 giving the reasoning as " ?!? Unfortunately, marking all
2952 hard registers as live causes massive problems for the
2953 register allocator and marking all pseudos as live creates
2954 mountains of uninitialized variable warnings."
2955
2956 In order to maintain the status quo with regard to liveness
2957 and uses, we do what flow.c did and just mark any regs we
2958 can find in ASM_OPERANDS as used. In global asm insns are
2959 scanned and regs_asm_clobbered is filled out.
2960
2961 For all ASM_OPERANDS, we must traverse the vector of input
2962 operands. We can not just fall through here since then we
2963 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2964 which do not indicate traditional asms unlike their normal
2965 usage. */
2966 if (code == ASM_OPERANDS)
2967 {
2968 int j;
2969
2970 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2971 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2972 DF_REF_REG_USE, bb, insn, flags);
2973 return;
2974 }
2975 break;
2976 }
2977
2978 case PRE_DEC:
2979 case POST_DEC:
2980 case PRE_INC:
2981 case POST_INC:
2982 case PRE_MODIFY:
2983 case POST_MODIFY:
2984 /* Catch the def of the register being modified. */
2985 flags |= DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY;
2986 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
2987 DF_REF_REG_DEF, flags);
2988
2989 /* ... Fall through to handle uses ... */
2990
2991 default:
2992 break;
2993 }
2994
2995 /* Recursively scan the operands of this expression. */
2996 {
2997 const char *fmt = GET_RTX_FORMAT (code);
2998 int i;
2999
3000 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3001 {
3002 if (fmt[i] == 'e')
3003 {
3004 /* Tail recursive case: save a function call level. */
3005 if (i == 0)
3006 {
3007 loc = &XEXP (x, 0);
3008 goto retry;
3009 }
3010 df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
3011 }
3012 else if (fmt[i] == 'E')
3013 {
3014 int j;
3015 for (j = 0; j < XVECLEN (x, i); j++)
3016 df_uses_record (collection_rec,
3017 &XVECEXP (x, i, j), ref_type, bb, insn, flags);
3018 }
3019 }
3020 }
3021
3022 return;
3023 }
3024
3025
3026 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3027
3028 static void
3029 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3030 {
3031 unsigned int i;
3032 for (i = 0; i < collection_rec->next_def; i++)
3033 {
3034 struct df_ref *ref = collection_rec->def_vec[i];
3035 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3036 {
3037 struct df_ref *use
3038 = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3039 DF_REF_LOC (ref), DF_REF_BB (ref),
3040 DF_REF_INSN (ref), DF_REF_REG_USE,
3041 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3042 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3043 }
3044 }
3045 }
3046
3047
3048 /* Get call's extra defs and uses. */
3049
3050 static void
3051 df_get_call_refs (struct df_collection_rec * collection_rec,
3052 basic_block bb,
3053 rtx insn,
3054 enum df_ref_flags flags)
3055 {
3056 rtx note;
3057 bitmap_iterator bi;
3058 unsigned int ui;
3059 bool is_sibling_call;
3060 unsigned int i;
3061 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3062
3063 /* Do not generate clobbers for registers that are the result of the
3064 call. This causes ordering problems in the chain building code
3065 depending on which def is seen first. */
3066 for (i=0; i<collection_rec->next_def; i++)
3067 {
3068 struct df_ref *def = collection_rec->def_vec[i];
3069 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3070 }
3071
3072 /* Record the registers used to pass arguments, and explicitly
3073 noted as clobbered. */
3074 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3075 note = XEXP (note, 1))
3076 {
3077 if (GET_CODE (XEXP (note, 0)) == USE)
3078 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3079 DF_REF_REG_USE, bb, insn, flags);
3080 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3081 {
3082 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3083 if (!bitmap_bit_p (defs_generated, regno))
3084 df_defs_record (collection_rec, XEXP (note, 0), bb, insn, flags);
3085 }
3086 }
3087
3088 /* The stack ptr is used (honorarily) by a CALL insn. */
3089 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3090 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
3091
3092 /* Calls may also reference any of the global registers,
3093 so they are recorded as used. */
3094 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3095 if (global_regs[i])
3096 df_ref_record (collection_rec, regno_reg_rtx[i],
3097 NULL, bb, insn, DF_REF_REG_USE, flags);
3098
3099 is_sibling_call = SIBLING_CALL_P (insn);
3100 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3101 {
3102 if ((!bitmap_bit_p (defs_generated, ui))
3103 && (!is_sibling_call
3104 || !bitmap_bit_p (df->exit_block_uses, ui)
3105 || refers_to_regno_p (ui, ui+1,
3106 current_function_return_rtx, NULL)))
3107
3108 df_ref_record (collection_rec, regno_reg_rtx[ui],
3109 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
3110 }
3111
3112 BITMAP_FREE (defs_generated);
3113 return;
3114 }
3115
3116 /* Collect all refs in the INSN. This function is free of any
3117 side-effect - it will create and return a lists of df_ref's in the
3118 COLLECTION_REC without putting those refs into existing ref chains
3119 and reg chains. */
3120
3121 static void
3122 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3123 basic_block bb, rtx insn)
3124 {
3125 rtx note;
3126 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3127
3128 /* Clear out the collection record. */
3129 collection_rec->next_def = 0;
3130 collection_rec->next_use = 0;
3131 collection_rec->next_eq_use = 0;
3132 collection_rec->next_mw = 0;
3133
3134 /* Record register defs. */
3135 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3136
3137 /* Process REG_EQUIV/REG_EQUAL notes */
3138 for (note = REG_NOTES (insn); note;
3139 note = XEXP (note, 1))
3140 {
3141 switch (REG_NOTE_KIND (note))
3142 {
3143 case REG_EQUIV:
3144 case REG_EQUAL:
3145 df_uses_record (collection_rec,
3146 &XEXP (note, 0), DF_REF_REG_USE,
3147 bb, insn, DF_REF_IN_NOTE);
3148 break;
3149 case REG_NON_LOCAL_GOTO:
3150 /* The frame ptr is used by a non-local goto. */
3151 df_ref_record (collection_rec,
3152 regno_reg_rtx[FRAME_POINTER_REGNUM],
3153 NULL,
3154 bb, insn,
3155 DF_REF_REG_USE, 0);
3156 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3157 df_ref_record (collection_rec,
3158 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3159 NULL,
3160 bb, insn,
3161 DF_REF_REG_USE, 0);
3162 #endif
3163 break;
3164 default:
3165 break;
3166 }
3167 }
3168
3169 if (CALL_P (insn))
3170 df_get_call_refs (collection_rec, bb, insn,
3171 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3172
3173 /* Record the register uses. */
3174 df_uses_record (collection_rec,
3175 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
3176
3177 /* DF_REF_CONDITIONAL needs corresponding USES. */
3178 if (is_cond_exec)
3179 df_get_conditional_uses (collection_rec);
3180
3181 df_canonize_collection_rec (collection_rec);
3182 }
3183
3184 /* Return true if any pred of BB is an eh. */
3185
3186 bool
3187 df_has_eh_preds (basic_block bb)
3188 {
3189 edge e;
3190 edge_iterator ei;
3191
3192 FOR_EACH_EDGE (e, ei, bb->preds)
3193 {
3194 if (e->flags & EDGE_EH)
3195 return true;
3196 }
3197 return false;
3198 }
3199
3200
3201 /* Recompute the luids for the insns in BB. */
3202
3203 void
3204 df_recompute_luids (basic_block bb)
3205 {
3206 rtx insn;
3207 int luid = 0;
3208
3209 df_grow_insn_info ();
3210
3211 /* Scan the block an insn at a time from beginning to end. */
3212 FOR_BB_INSNS (bb, insn)
3213 {
3214 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3215 /* Inserting labels does not always trigger the incremental
3216 rescanning. */
3217 if (!insn_info)
3218 {
3219 gcc_assert (!INSN_P (insn));
3220 df_insn_create_insn_record (insn);
3221 }
3222
3223 DF_INSN_LUID (insn) = luid;
3224 if (INSN_P (insn))
3225 luid++;
3226 }
3227 }
3228
3229
3230 /* Returns true if the function entry needs to
3231 define the static chain register. */
3232
3233 static bool
3234 df_need_static_chain_reg (struct function *fun)
3235 {
3236 tree fun_context = decl_function_context (fun->decl);
3237 return fun_context
3238 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3239 }
3240
3241
3242 /* Collect all artificial refs at the block level for BB and add them
3243 to COLLECTION_REC. */
3244
3245 static void
3246 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3247 {
3248 collection_rec->next_def = 0;
3249 collection_rec->next_use = 0;
3250 collection_rec->next_eq_use = 0;
3251 collection_rec->next_mw = 0;
3252
3253 if (bb->index == ENTRY_BLOCK)
3254 {
3255 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3256 return;
3257 }
3258 else if (bb->index == EXIT_BLOCK)
3259 {
3260 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3261 return;
3262 }
3263
3264 #ifdef EH_RETURN_DATA_REGNO
3265 if (df_has_eh_preds (bb))
3266 {
3267 unsigned int i;
3268 /* Mark the registers that will contain data for the handler. */
3269 for (i = 0; ; ++i)
3270 {
3271 unsigned regno = EH_RETURN_DATA_REGNO (i);
3272 if (regno == INVALID_REGNUM)
3273 break;
3274 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3275 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3276 }
3277 }
3278 #endif
3279
3280
3281 #ifdef EH_USES
3282 if (df_has_eh_preds (bb))
3283 {
3284 unsigned int i;
3285 /* This code is putting in an artificial ref for the use at the
3286 TOP of the block that receives the exception. It is too
3287 cumbersome to actually put the ref on the edge. We could
3288 either model this at the top of the receiver block or the
3289 bottom of the sender block.
3290
3291 The bottom of the sender block is problematic because not all
3292 out-edges of the a block are eh-edges. However, it is true
3293 that all edges into a block are either eh-edges or none of
3294 them are eh-edges. Thus, we can model this at the top of the
3295 eh-receiver for all of the edges at once. */
3296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3297 if (EH_USES (i))
3298 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3299 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
3300 }
3301 #endif
3302
3303 /* Add the hard_frame_pointer if this block is the target of a
3304 non-local goto. */
3305 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3306 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3307 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3308
3309 /* Add the artificial uses. */
3310 if (bb->index >= NUM_FIXED_BLOCKS)
3311 {
3312 bitmap_iterator bi;
3313 unsigned int regno;
3314 bitmap au = df_has_eh_preds (bb)
3315 ? df->eh_block_artificial_uses
3316 : df->regular_block_artificial_uses;
3317
3318 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3319 {
3320 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3321 bb, NULL, DF_REF_REG_USE, 0);
3322 }
3323 }
3324
3325 df_canonize_collection_rec (collection_rec);
3326 }
3327
3328
3329 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3330
3331 void
3332 df_bb_refs_record (int bb_index, bool scan_insns)
3333 {
3334 basic_block bb = BASIC_BLOCK (bb_index);
3335 rtx insn;
3336 int luid = 0;
3337 struct df_scan_bb_info *bb_info;
3338 struct df_collection_rec collection_rec;
3339 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3340 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3341 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3342 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3343
3344 if (!df)
3345 return;
3346
3347 bb_info = df_scan_get_bb_info (bb_index);
3348
3349 /* Need to make sure that there is a record in the basic block info. */
3350 if (!bb_info)
3351 {
3352 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3353 df_scan_set_bb_info (bb_index, bb_info);
3354 bb_info->artificial_defs = NULL;
3355 bb_info->artificial_uses = NULL;
3356 }
3357
3358 if (scan_insns)
3359 /* Scan the block an insn at a time from beginning to end. */
3360 FOR_BB_INSNS (bb, insn)
3361 {
3362 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3363 gcc_assert (!insn_info);
3364
3365 df_insn_create_insn_record (insn);
3366 if (INSN_P (insn))
3367 {
3368 /* Record refs within INSN. */
3369 DF_INSN_LUID (insn) = luid++;
3370 df_insn_refs_collect (&collection_rec, bb, insn);
3371 df_refs_add_to_chains (&collection_rec, bb, insn);
3372 }
3373 DF_INSN_LUID (insn) = luid;
3374 }
3375
3376 /* Other block level artificial refs */
3377 df_bb_refs_collect (&collection_rec, bb);
3378 df_refs_add_to_chains (&collection_rec, bb, NULL);
3379
3380 /* Now that the block has been processed, set the block as dirty so
3381 lr and ur will get it processed. */
3382 df_set_bb_dirty (bb);
3383 }
3384
3385
3386 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3387 block. */
3388
3389 static void
3390 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3391 {
3392 bitmap_clear (regular_block_artificial_uses);
3393
3394 if (reload_completed)
3395 {
3396 if (frame_pointer_needed)
3397 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3398 }
3399 else
3400 /* Before reload, there are a few registers that must be forced
3401 live everywhere -- which might not already be the case for
3402 blocks within infinite loops. */
3403 {
3404 /* Any reference to any pseudo before reload is a potential
3405 reference of the frame pointer. */
3406 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3407
3408 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3409 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3410 #endif
3411
3412 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3413 /* Pseudos with argument area equivalences may require
3414 reloading via the argument pointer. */
3415 if (fixed_regs[ARG_POINTER_REGNUM])
3416 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3417 #endif
3418
3419 /* Any constant, or pseudo with constant equivalences, may
3420 require reloading from memory using the pic register. */
3421 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3422 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3423 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3424 }
3425 /* The all-important stack pointer must always be live. */
3426 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3427 }
3428
3429
3430 /* Get the artificial use set for an eh block. */
3431
3432 static void
3433 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3434 {
3435 bitmap_clear (eh_block_artificial_uses);
3436
3437 /* The following code (down thru the arg_pointer seting APPEARS
3438 to be necessary because there is nothing that actually
3439 describes what the exception handling code may actually need
3440 to keep alive. */
3441 if (reload_completed)
3442 {
3443 if (frame_pointer_needed)
3444 {
3445 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3446 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3447 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3448 #endif
3449 }
3450 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3451 if (fixed_regs[ARG_POINTER_REGNUM])
3452 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3453 #endif
3454 }
3455 }
3456
3457
3458 \f
3459 /*----------------------------------------------------------------------------
3460 Specialized hard register scanning functions.
3461 ----------------------------------------------------------------------------*/
3462
3463
3464 /* Mark a register in SET. Hard registers in large modes get all
3465 of their component registers set as well. */
3466
3467 static void
3468 df_mark_reg (rtx reg, void *vset)
3469 {
3470 bitmap set = (bitmap) vset;
3471 int regno = REGNO (reg);
3472
3473 gcc_assert (GET_MODE (reg) != BLKmode);
3474
3475 bitmap_set_bit (set, regno);
3476 if (regno < FIRST_PSEUDO_REGISTER)
3477 {
3478 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3479 while (--n > 0)
3480 bitmap_set_bit (set, regno + n);
3481 }
3482 }
3483
3484
3485
3486
3487 /* Set the bit for regs that are considered being defined at the entry. */
3488
3489 static void
3490 df_get_entry_block_def_set (bitmap entry_block_defs)
3491 {
3492 rtx r;
3493 int i;
3494
3495 bitmap_clear (entry_block_defs);
3496
3497 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3498 {
3499 if (FUNCTION_ARG_REGNO_P (i))
3500 #ifdef INCOMING_REGNO
3501 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3502 #else
3503 bitmap_set_bit (entry_block_defs, i);
3504 #endif
3505 }
3506
3507 /* Once the prologue has been generated, all of these registers
3508 should just show up in the first regular block. */
3509 if (HAVE_prologue && epilogue_completed)
3510 {
3511 /* Defs for the callee saved registers are inserted so that the
3512 pushes have some defining location. */
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3514 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3515 bitmap_set_bit (entry_block_defs, i);
3516 }
3517 else
3518 {
3519 /* The always important stack pointer. */
3520 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3521
3522 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3523 only STATIC_CHAIN_REGNUM is defined. If they are different,
3524 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3525 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3526 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3527 #else
3528 #ifdef STATIC_CHAIN_REGNUM
3529 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3530 #endif
3531 #endif
3532
3533 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3534 if (r && REG_P (r))
3535 bitmap_set_bit (entry_block_defs, REGNO (r));
3536 }
3537
3538 if ((!reload_completed) || frame_pointer_needed)
3539 {
3540 /* Any reference to any pseudo before reload is a potential
3541 reference of the frame pointer. */
3542 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3543 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3544 /* If they are different, also mark the hard frame pointer as live. */
3545 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3546 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3547 #endif
3548 }
3549
3550 /* These registers are live everywhere. */
3551 if (!reload_completed)
3552 {
3553 #ifdef EH_USES
3554 /* The ia-64, the only machine that uses this, does not define these
3555 until after reload. */
3556 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3557 if (EH_USES (i))
3558 {
3559 bitmap_set_bit (entry_block_defs, i);
3560 }
3561 #endif
3562
3563 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3564 /* Pseudos with argument area equivalences may require
3565 reloading via the argument pointer. */
3566 if (fixed_regs[ARG_POINTER_REGNUM])
3567 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3568 #endif
3569
3570 #ifdef PIC_OFFSET_TABLE_REGNUM
3571 /* Any constant, or pseudo with constant equivalences, may
3572 require reloading from memory using the pic register. */
3573 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3574 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3575 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3576 #endif
3577 }
3578
3579 #ifdef INCOMING_RETURN_ADDR_RTX
3580 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3581 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3582 #endif
3583
3584 targetm.live_on_entry (entry_block_defs);
3585
3586 /* If the function has an incoming STATIC_CHAIN,
3587 it has to show up in the entry def set. */
3588 if (df_need_static_chain_reg (cfun))
3589 {
3590 #if !defined (STATIC_CHAIN_INCOMING_REGNUM) \
3591 || STATIC_CHAIN_REGNUM == STATIC_CHAIN_INCOMING_REGNUM
3592 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3593 #else
3594 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3595 #endif
3596 }
3597 }
3598
3599
3600 /* Return the (conservative) set of hard registers that are defined on
3601 entry to the function.
3602 It uses df->entry_block_defs to determine which register
3603 reference to include. */
3604
3605 static void
3606 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3607 bitmap entry_block_defs)
3608 {
3609 unsigned int i;
3610 bitmap_iterator bi;
3611
3612 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3613 {
3614 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3615 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3616 }
3617
3618 df_canonize_collection_rec (collection_rec);
3619 }
3620
3621
3622 /* Record the (conservative) set of hard registers that are defined on
3623 entry to the function. */
3624
3625 static void
3626 df_record_entry_block_defs (bitmap entry_block_defs)
3627 {
3628 struct df_collection_rec collection_rec;
3629 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3630 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3631
3632 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3633
3634 /* Process bb_refs chain */
3635 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3636 }
3637
3638
3639 /* Update the defs in the entry bolck. */
3640
3641 void
3642 df_update_entry_block_defs (void)
3643 {
3644 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3645 bool changed = false;
3646
3647 df_get_entry_block_def_set (refs);
3648 if (df->entry_block_defs)
3649 {
3650 if (!bitmap_equal_p (df->entry_block_defs, refs))
3651 {
3652 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3653 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3654 df_ref_chain_delete (bb_info->artificial_defs);
3655 bb_info->artificial_defs = NULL;
3656 changed = true;
3657 }
3658 }
3659 else
3660 {
3661 struct df_scan_problem_data *problem_data
3662 = (struct df_scan_problem_data *) df_scan->problem_data;
3663 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3664 changed = true;
3665 }
3666
3667 if (changed)
3668 {
3669 df_record_entry_block_defs (refs);
3670 bitmap_copy (df->entry_block_defs, refs);
3671 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3672 }
3673 BITMAP_FREE (refs);
3674 }
3675
3676
3677 /* Set the bit for regs that are considered being used at the exit. */
3678
3679 static void
3680 df_get_exit_block_use_set (bitmap exit_block_uses)
3681 {
3682 unsigned int i;
3683
3684 bitmap_clear (exit_block_uses);
3685
3686 /* Stack pointer is always live at the exit. */
3687 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3688
3689 /* Mark the frame pointer if needed at the end of the function.
3690 If we end up eliminating it, it will be removed from the live
3691 list of each basic block by reload. */
3692
3693 if ((!reload_completed) || frame_pointer_needed)
3694 {
3695 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3696 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3697 /* If they are different, also mark the hard frame pointer as live. */
3698 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3699 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3700 #endif
3701 }
3702
3703 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3704 /* Many architectures have a GP register even without flag_pic.
3705 Assume the pic register is not in use, or will be handled by
3706 other means, if it is not fixed. */
3707 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3708 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3709 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3710 #endif
3711
3712 /* Mark all global registers, and all registers used by the
3713 epilogue as being live at the end of the function since they
3714 may be referenced by our caller. */
3715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3716 if (global_regs[i] || EPILOGUE_USES (i))
3717 bitmap_set_bit (exit_block_uses, i);
3718
3719 if (HAVE_epilogue && epilogue_completed)
3720 {
3721 /* Mark all call-saved registers that we actually used. */
3722 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3723 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3724 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3725 bitmap_set_bit (exit_block_uses, i);
3726 }
3727
3728 #ifdef EH_RETURN_DATA_REGNO
3729 /* Mark the registers that will contain data for the handler. */
3730 if (reload_completed && current_function_calls_eh_return)
3731 for (i = 0; ; ++i)
3732 {
3733 unsigned regno = EH_RETURN_DATA_REGNO (i);
3734 if (regno == INVALID_REGNUM)
3735 break;
3736 bitmap_set_bit (exit_block_uses, regno);
3737 }
3738 #endif
3739
3740 #ifdef EH_RETURN_STACKADJ_RTX
3741 if ((!HAVE_epilogue || ! epilogue_completed)
3742 && current_function_calls_eh_return)
3743 {
3744 rtx tmp = EH_RETURN_STACKADJ_RTX;
3745 if (tmp && REG_P (tmp))
3746 df_mark_reg (tmp, exit_block_uses);
3747 }
3748 #endif
3749
3750 #ifdef EH_RETURN_HANDLER_RTX
3751 if ((!HAVE_epilogue || ! epilogue_completed)
3752 && current_function_calls_eh_return)
3753 {
3754 rtx tmp = EH_RETURN_HANDLER_RTX;
3755 if (tmp && REG_P (tmp))
3756 df_mark_reg (tmp, exit_block_uses);
3757 }
3758 #endif
3759
3760 /* Mark function return value. */
3761 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3762 }
3763
3764
3765 /* Return the refs of hard registers that are used in the exit block.
3766 It uses df->exit_block_uses to determine register to include. */
3767
3768 static void
3769 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3770 {
3771 unsigned int i;
3772 bitmap_iterator bi;
3773
3774 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3775 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3776 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3777
3778 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3779 /* It is deliberate that this is not put in the exit block uses but
3780 I do not know why. */
3781 if (reload_completed
3782 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3783 && df_has_eh_preds (EXIT_BLOCK_PTR)
3784 && fixed_regs[ARG_POINTER_REGNUM])
3785 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3786 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3787 #endif
3788
3789 df_canonize_collection_rec (collection_rec);
3790 }
3791
3792
3793 /* Record the set of hard registers that are used in the exit block.
3794 It uses df->exit_block_uses to determine which bit to include. */
3795
3796 static void
3797 df_record_exit_block_uses (bitmap exit_block_uses)
3798 {
3799 struct df_collection_rec collection_rec;
3800 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3801 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3802
3803 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3804
3805 /* Process bb_refs chain */
3806 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3807 }
3808
3809
3810 /* Update the uses in the exit block. */
3811
3812 void
3813 df_update_exit_block_uses (void)
3814 {
3815 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3816 bool changed = false;
3817
3818 df_get_exit_block_use_set (refs);
3819 if (df->exit_block_uses)
3820 {
3821 if (!bitmap_equal_p (df->exit_block_uses, refs))
3822 {
3823 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3824 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3825 df_ref_chain_delete (bb_info->artificial_uses);
3826 bb_info->artificial_uses = NULL;
3827 changed = true;
3828 }
3829 }
3830 else
3831 {
3832 struct df_scan_problem_data *problem_data
3833 = (struct df_scan_problem_data *) df_scan->problem_data;
3834 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3835 changed = true;
3836 }
3837
3838 if (changed)
3839 {
3840 df_record_exit_block_uses (refs);
3841 bitmap_copy (df->exit_block_uses, refs);
3842 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3843 }
3844 BITMAP_FREE (refs);
3845 }
3846
3847 static bool initialized = false;
3848
3849
3850 /* Initialize some platform specific structures. */
3851
3852 void
3853 df_hard_reg_init (void)
3854 {
3855 int i;
3856 #ifdef ELIMINABLE_REGS
3857 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3858 #endif
3859 if (initialized)
3860 return;
3861
3862 bitmap_obstack_initialize (&persistent_obstack);
3863
3864 /* Record which registers will be eliminated. We use this in
3865 mark_used_regs. */
3866 CLEAR_HARD_REG_SET (elim_reg_set);
3867
3868 #ifdef ELIMINABLE_REGS
3869 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3870 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3871 #else
3872 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3873 #endif
3874
3875 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3876
3877 /* Inconveniently, this is only readily available in hard reg set
3878 form. */
3879 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3880 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3881 bitmap_set_bit (df_invalidated_by_call, i);
3882
3883 initialized = true;
3884 }
3885
3886
3887 /* Recompute the parts of scanning that are based on regs_ever_live
3888 because something changed in that array. */
3889
3890 void
3891 df_update_entry_exit_and_calls (void)
3892 {
3893 basic_block bb;
3894
3895 df_update_entry_block_defs ();
3896 df_update_exit_block_uses ();
3897
3898 /* The call insns need to be rescanned because there may be changes
3899 in the set of registers clobbered across the call. */
3900 FOR_EACH_BB (bb)
3901 {
3902 rtx insn;
3903 FOR_BB_INSNS (bb, insn)
3904 {
3905 if (INSN_P (insn) && CALL_P (insn))
3906 df_insn_rescan (insn);
3907 }
3908 }
3909 }
3910
3911
3912 /* Return true if hard REG is actually used in the some instruction.
3913 There are a fair number of conditions that affect the setting of
3914 this array. See the comment in df.h for df->hard_regs_live_count
3915 for the conditions that this array is set. */
3916
3917 bool
3918 df_hard_reg_used_p (unsigned int reg)
3919 {
3920 gcc_assert (df);
3921 return df->hard_regs_live_count[reg] != 0;
3922 }
3923
3924
3925 /* A count of the number of times REG is actually used in the some
3926 instruction. There are a fair number of conditions that affect the
3927 setting of this array. See the comment in df.h for
3928 df->hard_regs_live_count for the conditions that this array is
3929 set. */
3930
3931
3932 unsigned int
3933 df_hard_reg_used_count (unsigned int reg)
3934 {
3935 gcc_assert (df);
3936 return df->hard_regs_live_count[reg];
3937 }
3938
3939
3940 /* Get the value of regs_ever_live[REGNO]. */
3941
3942 bool
3943 df_regs_ever_live_p (unsigned int regno)
3944 {
3945 return regs_ever_live[regno];
3946 }
3947
3948
3949 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3950 to change, schedule that change for the next update. */
3951
3952 void
3953 df_set_regs_ever_live (unsigned int regno, bool value)
3954 {
3955 if (regs_ever_live[regno] == value)
3956 return;
3957
3958 regs_ever_live[regno] = value;
3959 if (df)
3960 df->redo_entry_and_exit = true;
3961 }
3962
3963
3964 /* Compute "regs_ever_live" information from the underlying df
3965 information. Set the vector to all false if RESET. */
3966
3967 void
3968 df_compute_regs_ever_live (bool reset)
3969 {
3970 unsigned int i;
3971 bool changed = df->redo_entry_and_exit;
3972
3973 if (reset)
3974 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3975
3976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3977 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3978 {
3979 regs_ever_live[i] = true;
3980 changed = true;
3981 }
3982 if (changed)
3983 df_update_entry_exit_and_calls ();
3984 df->redo_entry_and_exit = false;
3985 }
3986
3987 \f
3988 /*----------------------------------------------------------------------------
3989 Dataflow ref information verification functions.
3990
3991 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3992 df_reg_chain_verify_unmarked (refs)
3993 df_refs_verify (ref*, ref*, bool)
3994 df_mws_verify (mw*, mw*, bool)
3995 df_insn_refs_verify (collection_rec, bb, insn, bool)
3996 df_bb_refs_verify (bb, refs, bool)
3997 df_bb_verify (bb)
3998 df_exit_block_bitmap_verify (bool)
3999 df_entry_block_bitmap_verify (bool)
4000 df_scan_verify ()
4001 ----------------------------------------------------------------------------*/
4002
4003
4004 /* Mark all refs in the reg chain. Verify that all of the registers
4005 are in the correct chain. */
4006
4007 static unsigned int
4008 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4009 bool is_def, bool is_eq_use)
4010 {
4011 unsigned int count = 0;
4012 struct df_ref *ref;
4013 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4014 {
4015 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4016
4017 /* If there are no def-use or use-def chains, make sure that all
4018 of the chains are clear. */
4019 if (!df_chain)
4020 gcc_assert (!DF_REF_CHAIN (ref));
4021
4022 /* Check to make sure the ref is in the correct chain. */
4023 gcc_assert (DF_REF_REGNO (ref) == regno);
4024 if (is_def)
4025 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4026 else
4027 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4028
4029 if (is_eq_use)
4030 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4031 else
4032 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4033
4034 if (ref->next_reg)
4035 gcc_assert (ref->next_reg->prev_reg == ref);
4036 count++;
4037 DF_REF_REG_MARK (ref);
4038 }
4039 return count;
4040 }
4041
4042
4043 /* Verify that all of the registers in the chain are unmarked. */
4044
4045 static void
4046 df_reg_chain_verify_unmarked (struct df_ref *refs)
4047 {
4048 struct df_ref *ref;
4049 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4050 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4051 }
4052
4053
4054 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4055
4056 static bool
4057 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4058 bool abort_if_fail)
4059 {
4060 while ((*new_rec) && (*old_rec))
4061 {
4062 if (!df_ref_equal_p (*new_rec, *old_rec))
4063 {
4064 if (abort_if_fail)
4065 gcc_assert (0);
4066 else
4067 return false;
4068 }
4069
4070 /* Abort if fail is called from the function level verifier. If
4071 that is the context, mark this reg as being seem. */
4072 if (abort_if_fail)
4073 {
4074 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4075 DF_REF_REG_UNMARK (*old_rec);
4076 }
4077
4078 new_rec++;
4079 old_rec++;
4080 }
4081
4082 if (abort_if_fail)
4083 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4084 else
4085 return ((*new_rec == NULL) && (*old_rec == NULL));
4086 return false;
4087 }
4088
4089
4090 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4091
4092 static bool
4093 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4094 bool abort_if_fail)
4095 {
4096 while ((*new_rec) && (*old_rec))
4097 {
4098 if (!df_mw_equal_p (*new_rec, *old_rec))
4099 {
4100 if (abort_if_fail)
4101 gcc_assert (0);
4102 else
4103 return false;
4104 }
4105 new_rec++;
4106 old_rec++;
4107 }
4108
4109 if (abort_if_fail)
4110 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4111 else
4112 return ((*new_rec == NULL) && (*old_rec == NULL));
4113 return false;
4114 }
4115
4116
4117 /* Return true if the existing insn refs information is complete and
4118 correct. Otherwise (i.e. if there's any missing or extra refs),
4119 return the correct df_ref chain in REFS_RETURN.
4120
4121 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4122 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4123 verification mode instead of the whole function, so unmark
4124 everything.
4125
4126 If ABORT_IF_FAIL is set, this function never returns false. */
4127
4128 static bool
4129 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4130 basic_block bb,
4131 rtx insn,
4132 bool abort_if_fail)
4133 {
4134 bool ret1, ret2, ret3, ret4;
4135 unsigned int uid = INSN_UID (insn);
4136
4137 df_insn_refs_collect (collection_rec, bb, insn);
4138
4139 if (!DF_INSN_UID_DEFS (uid))
4140 {
4141 /* The insn_rec was created but it was never filled out. */
4142 if (abort_if_fail)
4143 gcc_assert (0);
4144 else
4145 return false;
4146 }
4147
4148 /* Unfortunately we cannot opt out early if one of these is not
4149 right because the marks will not get cleared. */
4150 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4151 abort_if_fail);
4152 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4153 abort_if_fail);
4154 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4155 abort_if_fail);
4156 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4157 abort_if_fail);
4158 return (ret1 && ret2 && ret3 && ret4);
4159 }
4160
4161
4162 /* Return true if all refs in the basic block are correct and complete.
4163 Due to df_ref_chain_verify, it will cause all refs
4164 that are verified to have DF_REF_MARK bit set. */
4165
4166 static bool
4167 df_bb_verify (basic_block bb)
4168 {
4169 rtx insn;
4170 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4171 struct df_collection_rec collection_rec;
4172
4173 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4174 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4175 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4176 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4177 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4178
4179 gcc_assert (bb_info);
4180
4181 /* Scan the block an insn at a time from beginning to end. */
4182 FOR_BB_INSNS_REVERSE (bb, insn)
4183 {
4184 if (!INSN_P (insn))
4185 continue;
4186 df_insn_refs_verify (&collection_rec, bb, insn, true);
4187 df_free_collection_rec (&collection_rec);
4188 }
4189
4190 /* Do the artificial defs and uses. */
4191 df_bb_refs_collect (&collection_rec, bb);
4192 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4193 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4194 df_free_collection_rec (&collection_rec);
4195
4196 return true;
4197 }
4198
4199
4200 /* Returns true if the entry block has correct and complete df_ref set.
4201 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4202
4203 static bool
4204 df_entry_block_bitmap_verify (bool abort_if_fail)
4205 {
4206 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4207 bool is_eq;
4208
4209 df_get_entry_block_def_set (entry_block_defs);
4210
4211 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4212
4213 if (!is_eq && abort_if_fail)
4214 {
4215 print_current_pass (stderr);
4216 fprintf (stderr, "entry_block_defs = ");
4217 df_print_regset (stderr, entry_block_defs);
4218 fprintf (stderr, "df->entry_block_defs = ");
4219 df_print_regset (stderr, df->entry_block_defs);
4220 gcc_assert (0);
4221 }
4222
4223 BITMAP_FREE (entry_block_defs);
4224
4225 return is_eq;
4226 }
4227
4228
4229 /* Returns true if the exit block has correct and complete df_ref set.
4230 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4231
4232 static bool
4233 df_exit_block_bitmap_verify (bool abort_if_fail)
4234 {
4235 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4236 bool is_eq;
4237
4238 df_get_exit_block_use_set (exit_block_uses);
4239
4240 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4241
4242 if (!is_eq && abort_if_fail)
4243 {
4244 print_current_pass (stderr);
4245 fprintf (stderr, "exit_block_uses = ");
4246 df_print_regset (stderr, exit_block_uses);
4247 fprintf (stderr, "df->exit_block_uses = ");
4248 df_print_regset (stderr, df->exit_block_uses);
4249 gcc_assert (0);
4250 }
4251
4252 BITMAP_FREE (exit_block_uses);
4253
4254 return is_eq;
4255 }
4256
4257
4258 /* Return true if df_ref information for all insns in all BLOCKS are
4259 correct and complete. If BLOCKS is null, all blocks are
4260 checked. */
4261
4262 void
4263 df_scan_verify (void)
4264 {
4265 unsigned int i;
4266 basic_block bb;
4267 bitmap regular_block_artificial_uses;
4268 bitmap eh_block_artificial_uses;
4269
4270 if (!df)
4271 return;
4272
4273 /* This is a hack, but a necessary one. If you do not do this,
4274 insn_attrtab can never be compiled in a bootstrap. This
4275 verification is just too expensive. */
4276 if (n_basic_blocks > 250)
4277 return;
4278
4279 /* Verification is a 4 step process. */
4280
4281 /* (1) All of the refs are marked by going thru the reg chains. */
4282 for (i = 0; i < DF_REG_SIZE (df); i++)
4283 {
4284 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4285 == DF_REG_DEF_COUNT(i));
4286 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4287 == DF_REG_USE_COUNT(i));
4288 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4289 == DF_REG_EQ_USE_COUNT(i));
4290 }
4291
4292 /* (2) There are various bitmaps whose value may change over the
4293 course of the compilation. This step recomputes them to make
4294 sure that they have not slipped out of date. */
4295 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4296 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4297
4298 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4299 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4300
4301 bitmap_ior_into (eh_block_artificial_uses,
4302 regular_block_artificial_uses);
4303
4304 /* Check artificial_uses bitmaps didn't change. */
4305 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4306 df->regular_block_artificial_uses));
4307 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4308 df->eh_block_artificial_uses));
4309
4310 BITMAP_FREE (regular_block_artificial_uses);
4311 BITMAP_FREE (eh_block_artificial_uses);
4312
4313 /* Verify entry block and exit block. These only verify the bitmaps,
4314 the refs are verified in df_bb_verify. */
4315 df_entry_block_bitmap_verify (true);
4316 df_exit_block_bitmap_verify (true);
4317
4318 /* (3) All of the insns in all of the blocks are traversed and the
4319 marks are cleared both in the artificial refs attached to the
4320 blocks and the real refs inside the insns. It is a failure to
4321 clear a mark that has not been set as this means that the ref in
4322 the block or insn was not in the reg chain. */
4323
4324 FOR_ALL_BB (bb)
4325 df_bb_verify (bb);
4326
4327 /* (4) See if all reg chains are traversed a second time. This time
4328 a check is made that the marks are clear. A set mark would be a
4329 from a reg that is not in any insn or basic block. */
4330
4331 for (i = 0; i < DF_REG_SIZE (df); i++)
4332 {
4333 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4334 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4335 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4336 }
4337 }