761ba79121ce026ec7ffc95c20b4aa05f136707f
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
48
49 #ifndef HAVE_epilogue
50 #define HAVE_epilogue 0
51 #endif
52 #ifndef HAVE_prologue
53 #define HAVE_prologue 0
54 #endif
55 #ifndef HAVE_sibcall_epilogue
56 #define HAVE_sibcall_epilogue 0
57 #endif
58
59 #ifndef EPILOGUE_USES
60 #define EPILOGUE_USES(REGNO) 0
61 #endif
62
63 /* The following two macros free the vecs that hold either the refs or
64 the mw refs. They are a little tricky because the vec has 0
65 elements is special and is not to be freed. */
66 #define df_scan_free_ref_vec(V) \
67 do { \
68 if (V && *V) \
69 free (V); \
70 } while (0)
71
72 #define df_scan_free_mws_vec(V) \
73 do { \
74 if (V && *V) \
75 free (V); \
76 } while (0)
77
78 /* The bitmap_obstack is used to hold some static variables that
79 should not be reset after each function is compiled. */
80
81 static bitmap_obstack persistent_obstack;
82
83 /* The set of hard registers in eliminables[i].from. */
84
85 static HARD_REG_SET elim_reg_set;
86
87 /* This is a bitmap copy of regs_invalidated_by_call so that we can
88 easily add it into bitmaps, etc. */
89
90 bitmap df_invalidated_by_call = NULL;
91
92 /* Initialize ur_in and ur_out as if all hard registers were partially
93 available. */
94
95 struct df_collection_rec
96 {
97 df_ref * def_vec;
98 unsigned int next_def;
99 df_ref * use_vec;
100 unsigned int next_use;
101 df_ref * eq_use_vec;
102 unsigned int next_eq_use;
103 struct df_mw_hardreg **mw_vec;
104 unsigned int next_mw;
105 };
106
107 static df_ref df_null_ref_rec[1];
108 static struct df_mw_hardreg * df_null_mw_rec[1];
109
110 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
111 rtx, rtx *,
112 basic_block, struct df_insn_info *,
113 enum df_ref_type, enum df_ref_flags,
114 int, int, enum machine_mode);
115 static void df_def_record_1 (struct df_collection_rec *, rtx,
116 basic_block, struct df_insn_info *,
117 enum df_ref_flags);
118 static void df_defs_record (struct df_collection_rec *, rtx,
119 basic_block, struct df_insn_info *,
120 enum df_ref_flags);
121 static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
122 rtx *, enum df_ref_type,
123 basic_block, struct df_insn_info *,
124 enum df_ref_flags,
125 int, int, enum machine_mode);
126
127 static df_ref df_ref_create_structure (enum df_ref_class,
128 struct df_collection_rec *, rtx, rtx *,
129 basic_block, struct df_insn_info *,
130 enum df_ref_type, enum df_ref_flags,
131 int, int, enum machine_mode);
132
133 static void df_insn_refs_collect (struct df_collection_rec*,
134 basic_block, struct df_insn_info *);
135 static void df_canonize_collection_rec (struct df_collection_rec *);
136
137 static void df_get_regular_block_artificial_uses (bitmap);
138 static void df_get_eh_block_artificial_uses (bitmap);
139
140 static void df_record_entry_block_defs (bitmap);
141 static void df_record_exit_block_uses (bitmap);
142 static void df_get_exit_block_use_set (bitmap);
143 static void df_get_entry_block_def_set (bitmap);
144 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
145 static void df_ref_chain_delete_du_chain (df_ref *);
146 static void df_ref_chain_delete (df_ref *);
147
148 static void df_refs_add_to_chains (struct df_collection_rec *,
149 basic_block, rtx);
150
151 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
152 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
153 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
154 static void df_install_ref (df_ref, struct df_reg_info *,
155 struct df_ref_info *, bool);
156
157 static int df_ref_compare (const void *, const void *);
158 static int df_mw_compare (const void *, const void *);
159
160 /* Indexed by hardware reg number, is true if that register is ever
161 used in the current function.
162
163 In df-scan.c, this is set up to record the hard regs used
164 explicitly. Reload adds in the hard regs used for holding pseudo
165 regs. Final uses it to generate the code in the function prologue
166 and epilogue to save and restore registers as needed. */
167
168 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
169 \f
170 /*----------------------------------------------------------------------------
171 SCANNING DATAFLOW PROBLEM
172
173 There are several ways in which scanning looks just like the other
174 dataflow problems. It shares the all the mechanisms for local info
175 as well as basic block info. Where it differs is when and how often
176 it gets run. It also has no need for the iterative solver.
177 ----------------------------------------------------------------------------*/
178
179 /* Problem data for the scanning dataflow function. */
180 struct df_scan_problem_data
181 {
182 alloc_pool ref_base_pool;
183 alloc_pool ref_artificial_pool;
184 alloc_pool ref_regular_pool;
185 alloc_pool ref_extract_pool;
186 alloc_pool insn_pool;
187 alloc_pool reg_pool;
188 alloc_pool mw_reg_pool;
189 bitmap_obstack reg_bitmaps;
190 bitmap_obstack insn_bitmaps;
191 };
192
193 typedef struct df_scan_bb_info *df_scan_bb_info_t;
194
195
196 /* Internal function to shut down the scanning problem. */
197 static void
198 df_scan_free_internal (void)
199 {
200 struct df_scan_problem_data *problem_data
201 = (struct df_scan_problem_data *) df_scan->problem_data;
202 unsigned int i;
203 basic_block bb;
204
205 /* The vectors that hold the refs are not pool allocated because
206 they come in many sizes. This makes them impossible to delete
207 all at once. */
208 for (i = 0; i < DF_INSN_SIZE(); i++)
209 {
210 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
211 /* Skip the insns that have no insn_info or have been
212 deleted. */
213 if (insn_info)
214 {
215 df_scan_free_ref_vec (insn_info->defs);
216 df_scan_free_ref_vec (insn_info->uses);
217 df_scan_free_ref_vec (insn_info->eq_uses);
218 df_scan_free_mws_vec (insn_info->mw_hardregs);
219 }
220 }
221
222 FOR_ALL_BB (bb)
223 {
224 unsigned int bb_index = bb->index;
225 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
226 if (bb_info)
227 {
228 df_scan_free_ref_vec (bb_info->artificial_defs);
229 df_scan_free_ref_vec (bb_info->artificial_uses);
230 }
231 }
232
233 free (df->def_info.refs);
234 free (df->def_info.begin);
235 free (df->def_info.count);
236 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
237
238 free (df->use_info.refs);
239 free (df->use_info.begin);
240 free (df->use_info.count);
241 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
242
243 free (df->def_regs);
244 df->def_regs = NULL;
245 free (df->use_regs);
246 df->use_regs = NULL;
247 free (df->eq_use_regs);
248 df->eq_use_regs = NULL;
249 df->regs_size = 0;
250 DF_REG_SIZE(df) = 0;
251
252 free (df->insns);
253 df->insns = NULL;
254 DF_INSN_SIZE () = 0;
255
256 free (df_scan->block_info);
257 df_scan->block_info = NULL;
258 df_scan->block_info_size = 0;
259
260 BITMAP_FREE (df->hardware_regs_used);
261 BITMAP_FREE (df->regular_block_artificial_uses);
262 BITMAP_FREE (df->eh_block_artificial_uses);
263 BITMAP_FREE (df->entry_block_defs);
264 BITMAP_FREE (df->exit_block_uses);
265 BITMAP_FREE (df->insns_to_delete);
266 BITMAP_FREE (df->insns_to_rescan);
267 BITMAP_FREE (df->insns_to_notes_rescan);
268
269 free_alloc_pool (df_scan->block_pool);
270 free_alloc_pool (problem_data->ref_base_pool);
271 free_alloc_pool (problem_data->ref_artificial_pool);
272 free_alloc_pool (problem_data->ref_regular_pool);
273 free_alloc_pool (problem_data->ref_extract_pool);
274 free_alloc_pool (problem_data->insn_pool);
275 free_alloc_pool (problem_data->reg_pool);
276 free_alloc_pool (problem_data->mw_reg_pool);
277 bitmap_obstack_release (&problem_data->reg_bitmaps);
278 bitmap_obstack_release (&problem_data->insn_bitmaps);
279 free (df_scan->problem_data);
280 }
281
282
283 /* Set basic block info. */
284
285 static void
286 df_scan_set_bb_info (unsigned int index,
287 struct df_scan_bb_info *bb_info)
288 {
289 gcc_assert (df_scan);
290 df_grow_bb_info (df_scan);
291 df_scan->block_info[index] = (void *) bb_info;
292 }
293
294
295 /* Free basic block info. */
296
297 static void
298 df_scan_free_bb_info (basic_block bb, void *vbb_info)
299 {
300 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
301 unsigned int bb_index = bb->index;
302 if (bb_info)
303 {
304 rtx insn;
305 FOR_BB_INSNS (bb, insn)
306 {
307 if (INSN_P (insn))
308 /* Record defs within INSN. */
309 df_insn_delete (bb, INSN_UID (insn));
310 }
311
312 if (bb_index < df_scan->block_info_size)
313 bb_info = df_scan_get_bb_info (bb_index);
314
315 /* Get rid of any artificial uses or defs. */
316 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
317 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
318 df_ref_chain_delete (bb_info->artificial_defs);
319 df_ref_chain_delete (bb_info->artificial_uses);
320 bb_info->artificial_defs = NULL;
321 bb_info->artificial_uses = NULL;
322 pool_free (df_scan->block_pool, bb_info);
323 }
324 }
325
326
327 /* Allocate the problem data for the scanning problem. This should be
328 called when the problem is created or when the entire function is to
329 be rescanned. */
330 void
331 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
332 {
333 struct df_scan_problem_data *problem_data;
334 unsigned int insn_num = get_max_uid () + 1;
335 unsigned int block_size = 400;
336 basic_block bb;
337
338 /* Given the number of pools, this is really faster than tearing
339 everything apart. */
340 if (df_scan->problem_data)
341 df_scan_free_internal ();
342
343 df_scan->block_pool
344 = create_alloc_pool ("df_scan_block pool",
345 sizeof (struct df_scan_bb_info),
346 block_size);
347
348 problem_data = XNEW (struct df_scan_problem_data);
349 df_scan->problem_data = problem_data;
350 df_scan->computed = true;
351
352 problem_data->ref_base_pool
353 = create_alloc_pool ("df_scan ref base",
354 sizeof (struct df_base_ref), block_size);
355 problem_data->ref_artificial_pool
356 = create_alloc_pool ("df_scan ref artificial",
357 sizeof (struct df_artificial_ref), block_size);
358 problem_data->ref_regular_pool
359 = create_alloc_pool ("df_scan ref regular",
360 sizeof (struct df_regular_ref), block_size);
361 problem_data->ref_extract_pool
362 = create_alloc_pool ("df_scan ref extract",
363 sizeof (struct df_extract_ref), block_size);
364 problem_data->insn_pool
365 = create_alloc_pool ("df_scan insn",
366 sizeof (struct df_insn_info), block_size);
367 problem_data->reg_pool
368 = create_alloc_pool ("df_scan reg",
369 sizeof (struct df_reg_info), block_size);
370 problem_data->mw_reg_pool
371 = create_alloc_pool ("df_scan mw_reg",
372 sizeof (struct df_mw_hardreg), block_size);
373
374 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
375 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
376
377 insn_num += insn_num / 4;
378 df_grow_reg_info ();
379
380 df_grow_insn_info ();
381 df_grow_bb_info (df_scan);
382
383 FOR_ALL_BB (bb)
384 {
385 unsigned int bb_index = bb->index;
386 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
387 if (!bb_info)
388 {
389 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
390 df_scan_set_bb_info (bb_index, bb_info);
391 }
392 bb_info->artificial_defs = NULL;
393 bb_info->artificial_uses = NULL;
394 }
395
396 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
397 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
398 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
399 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
400 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
401 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
402 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
403 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
404 df_scan->optional_p = false;
405 }
406
407
408 /* Free all of the data associated with the scan problem. */
409
410 static void
411 df_scan_free (void)
412 {
413 if (df_scan->problem_data)
414 df_scan_free_internal ();
415
416 if (df->blocks_to_analyze)
417 {
418 BITMAP_FREE (df->blocks_to_analyze);
419 df->blocks_to_analyze = NULL;
420 }
421
422 free (df_scan);
423 }
424
425 /* Dump the preamble for DF_SCAN dump. */
426 static void
427 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
428 {
429 int i;
430 int dcount = 0;
431 int ucount = 0;
432 int ecount = 0;
433 int icount = 0;
434 int ccount = 0;
435 basic_block bb;
436 rtx insn;
437
438 fprintf (file, ";; invalidated by call \t");
439 df_print_regset (file, df_invalidated_by_call);
440 fprintf (file, ";; hardware regs used \t");
441 df_print_regset (file, df->hardware_regs_used);
442 fprintf (file, ";; regular block artificial uses \t");
443 df_print_regset (file, df->regular_block_artificial_uses);
444 fprintf (file, ";; eh block artificial uses \t");
445 df_print_regset (file, df->eh_block_artificial_uses);
446 fprintf (file, ";; entry block defs \t");
447 df_print_regset (file, df->entry_block_defs);
448 fprintf (file, ";; exit block uses \t");
449 df_print_regset (file, df->exit_block_uses);
450 fprintf (file, ";; regs ever live \t");
451 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
452 if (df_regs_ever_live_p (i))
453 fprintf (file, " %d[%s]", i, reg_names[i]);
454 fprintf (file, "\n;; ref usage \t");
455
456 for (i = 0; i < (int)df->regs_inited; i++)
457 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
458 {
459 const char * sep = "";
460
461 fprintf (file, "r%d={", i);
462 if (DF_REG_DEF_COUNT (i))
463 {
464 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
465 sep = ",";
466 dcount += DF_REG_DEF_COUNT (i);
467 }
468 if (DF_REG_USE_COUNT (i))
469 {
470 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
471 sep = ",";
472 ucount += DF_REG_USE_COUNT (i);
473 }
474 if (DF_REG_EQ_USE_COUNT (i))
475 {
476 fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
477 ecount += DF_REG_EQ_USE_COUNT (i);
478 }
479 fprintf (file, "} ");
480 }
481
482 FOR_EACH_BB (bb)
483 FOR_BB_INSNS (bb, insn)
484 if (INSN_P (insn))
485 {
486 if (CALL_P (insn))
487 ccount++;
488 else
489 icount++;
490 }
491
492 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
493 dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
494 }
495
496 /* Dump the bb_info for a given basic block. */
497 static void
498 df_scan_start_block (basic_block bb, FILE *file)
499 {
500 struct df_scan_bb_info *bb_info
501 = df_scan_get_bb_info (bb->index);
502
503 if (bb_info)
504 {
505 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
506 df_refs_chain_dump (bb_info->artificial_defs, true, file);
507 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
508 df_refs_chain_dump (bb_info->artificial_uses, true, file);
509 fprintf (file, "\n");
510 }
511 #if 0
512 {
513 rtx insn;
514 FOR_BB_INSNS (bb, insn)
515 if (INSN_P (insn))
516 df_insn_debug (insn, false, file);
517 }
518 #endif
519 }
520
521 static struct df_problem problem_SCAN =
522 {
523 DF_SCAN, /* Problem id. */
524 DF_NONE, /* Direction. */
525 df_scan_alloc, /* Allocate the problem specific data. */
526 NULL, /* Reset global information. */
527 df_scan_free_bb_info, /* Free basic block info. */
528 NULL, /* Local compute function. */
529 NULL, /* Init the solution specific data. */
530 NULL, /* Iterative solver. */
531 NULL, /* Confluence operator 0. */
532 NULL, /* Confluence operator n. */
533 NULL, /* Transfer function. */
534 NULL, /* Finalize function. */
535 df_scan_free, /* Free all of the problem information. */
536 NULL, /* Remove this problem from the stack of dataflow problems. */
537 df_scan_start_dump, /* Debugging. */
538 df_scan_start_block, /* Debugging start block. */
539 NULL, /* Debugging end block. */
540 NULL, /* Incremental solution verify start. */
541 NULL, /* Incremental solution verify end. */
542 NULL, /* Dependent problem. */
543 TV_DF_SCAN, /* Timing variable. */
544 false /* Reset blocks on dropping out of blocks_to_analyze. */
545 };
546
547
548 /* Create a new DATAFLOW instance and add it to an existing instance
549 of DF. The returned structure is what is used to get at the
550 solution. */
551
552 void
553 df_scan_add_problem (void)
554 {
555 df_add_problem (&problem_SCAN);
556 }
557
558 \f
559 /*----------------------------------------------------------------------------
560 Storage Allocation Utilities
561 ----------------------------------------------------------------------------*/
562
563
564 /* First, grow the reg_info information. If the current size is less than
565 the number of pseudos, grow to 25% more than the number of
566 pseudos.
567
568 Second, assure that all of the slots up to max_reg_num have been
569 filled with reg_info structures. */
570
571 void
572 df_grow_reg_info (void)
573 {
574 unsigned int max_reg = max_reg_num ();
575 unsigned int new_size = max_reg;
576 struct df_scan_problem_data *problem_data
577 = (struct df_scan_problem_data *) df_scan->problem_data;
578 unsigned int i;
579
580 if (df->regs_size < new_size)
581 {
582 new_size += new_size / 4;
583 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
584 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
585 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
586 new_size);
587 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
588 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
589 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
590 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
591 df->regs_size = new_size;
592 }
593
594 for (i = df->regs_inited; i < max_reg; i++)
595 {
596 struct df_reg_info *reg_info;
597
598 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
599 memset (reg_info, 0, sizeof (struct df_reg_info));
600 df->def_regs[i] = reg_info;
601 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
602 memset (reg_info, 0, sizeof (struct df_reg_info));
603 df->use_regs[i] = reg_info;
604 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
605 memset (reg_info, 0, sizeof (struct df_reg_info));
606 df->eq_use_regs[i] = reg_info;
607 df->def_info.begin[i] = 0;
608 df->def_info.count[i] = 0;
609 df->use_info.begin[i] = 0;
610 df->use_info.count[i] = 0;
611 }
612
613 df->regs_inited = max_reg;
614 }
615
616
617 /* Grow the ref information. */
618
619 static void
620 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
621 {
622 if (ref_info->refs_size < new_size)
623 {
624 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
625 memset (ref_info->refs + ref_info->refs_size, 0,
626 (new_size - ref_info->refs_size) *sizeof (df_ref));
627 ref_info->refs_size = new_size;
628 }
629 }
630
631
632 /* Check and grow the ref information if necessary. This routine
633 guarantees total_size + BITMAP_ADDEND amount of entries in refs
634 array. It updates ref_info->refs_size only and does not change
635 ref_info->total_size. */
636
637 static void
638 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
639 unsigned bitmap_addend)
640 {
641 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
642 {
643 int new_size = ref_info->total_size + bitmap_addend;
644 new_size += ref_info->total_size / 4;
645 df_grow_ref_info (ref_info, new_size);
646 }
647 }
648
649
650 /* Grow the ref information. If the current size is less than the
651 number of instructions, grow to 25% more than the number of
652 instructions. */
653
654 void
655 df_grow_insn_info (void)
656 {
657 unsigned int new_size = get_max_uid () + 1;
658 if (DF_INSN_SIZE () < new_size)
659 {
660 new_size += new_size / 4;
661 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
662 memset (df->insns + df->insns_size, 0,
663 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
664 DF_INSN_SIZE () = new_size;
665 }
666 }
667
668
669
670 \f
671 /*----------------------------------------------------------------------------
672 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
673 ----------------------------------------------------------------------------*/
674
675 /* Rescan all of the block_to_analyze or all of the blocks in the
676 function if df_set_blocks if blocks_to_analyze is NULL; */
677
678 void
679 df_scan_blocks (void)
680 {
681 basic_block bb;
682
683 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
684 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
685
686 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
687 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
688
689 bitmap_ior_into (df->eh_block_artificial_uses,
690 df->regular_block_artificial_uses);
691
692 /* ENTRY and EXIT blocks have special defs/uses. */
693 df_get_entry_block_def_set (df->entry_block_defs);
694 df_record_entry_block_defs (df->entry_block_defs);
695 df_get_exit_block_use_set (df->exit_block_uses);
696 df_record_exit_block_uses (df->exit_block_uses);
697 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
698 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
699
700 /* Regular blocks */
701 FOR_EACH_BB (bb)
702 {
703 unsigned int bb_index = bb->index;
704 df_bb_refs_record (bb_index, true);
705 }
706 }
707
708
709 /* Create a new ref of type DF_REF_TYPE for register REG at address
710 LOC within INSN of BB. This function is only used externally.
711
712 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
713 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
714 fields if they were constants. Otherwise they should be -1 if
715 those flags were set. */
716
717 df_ref
718 df_ref_create (rtx reg, rtx *loc, rtx insn,
719 basic_block bb,
720 enum df_ref_type ref_type,
721 enum df_ref_flags ref_flags,
722 int width, int offset, enum machine_mode mode)
723 {
724 df_ref ref;
725 struct df_reg_info **reg_info;
726 struct df_ref_info *ref_info;
727 df_ref *ref_rec;
728 df_ref **ref_rec_ptr;
729 unsigned int count = 0;
730 bool add_to_table;
731 enum df_ref_class cl;
732
733 df_grow_reg_info ();
734
735 /* You cannot hack artificial refs. */
736 gcc_assert (insn);
737
738 if (width != -1 || offset != -1)
739 cl = DF_REF_EXTRACT;
740 else if (loc)
741 cl = DF_REF_REGULAR;
742 else
743 cl = DF_REF_BASE;
744 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
745 ref_type, ref_flags,
746 width, offset, mode);
747
748 if (DF_REF_REG_DEF_P (ref))
749 {
750 reg_info = df->def_regs;
751 ref_info = &df->def_info;
752 ref_rec_ptr = &DF_INSN_DEFS (insn);
753 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
754 }
755 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
756 {
757 reg_info = df->eq_use_regs;
758 ref_info = &df->use_info;
759 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
760 switch (ref_info->ref_order)
761 {
762 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
763 case DF_REF_ORDER_BY_REG_WITH_NOTES:
764 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
765 add_to_table = true;
766 break;
767 default:
768 add_to_table = false;
769 break;
770 }
771 }
772 else
773 {
774 reg_info = df->use_regs;
775 ref_info = &df->use_info;
776 ref_rec_ptr = &DF_INSN_USES (insn);
777 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
778 }
779
780 /* Do not add if ref is not in the right blocks. */
781 if (add_to_table && df->analyze_subset)
782 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
783
784 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
785
786 if (add_to_table)
787 switch (ref_info->ref_order)
788 {
789 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
790 case DF_REF_ORDER_BY_REG_WITH_NOTES:
791 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
792 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
793 break;
794 default:
795 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
796 break;
797 }
798
799 ref_rec = *ref_rec_ptr;
800 while (*ref_rec)
801 {
802 count++;
803 ref_rec++;
804 }
805
806 ref_rec = *ref_rec_ptr;
807 if (count)
808 {
809 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
810 *ref_rec_ptr = ref_rec;
811 ref_rec[count] = ref;
812 ref_rec[count+1] = NULL;
813 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
814 }
815 else
816 {
817 df_ref *ref_rec = XNEWVEC (df_ref, 2);
818 ref_rec[0] = ref;
819 ref_rec[1] = NULL;
820 *ref_rec_ptr = ref_rec;
821 }
822
823 #if 0
824 if (dump_file)
825 {
826 fprintf (dump_file, "adding ref ");
827 df_ref_debug (ref, dump_file);
828 }
829 #endif
830 /* By adding the ref directly, df_insn_rescan my not find any
831 differences even though the block will have changed. So we need
832 to mark the block dirty ourselves. */
833 df_set_bb_dirty (bb);
834
835 return ref;
836 }
837
838
839 \f
840 /*----------------------------------------------------------------------------
841 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
842 ----------------------------------------------------------------------------*/
843
844 static void
845 df_free_ref (df_ref ref)
846 {
847 struct df_scan_problem_data *problem_data
848 = (struct df_scan_problem_data *) df_scan->problem_data;
849
850 switch (DF_REF_CLASS (ref))
851 {
852 case DF_REF_BASE:
853 pool_free (problem_data->ref_base_pool, ref);
854 break;
855
856 case DF_REF_ARTIFICIAL:
857 pool_free (problem_data->ref_artificial_pool, ref);
858 break;
859
860 case DF_REF_REGULAR:
861 pool_free (problem_data->ref_regular_pool, ref);
862 break;
863
864 case DF_REF_EXTRACT:
865 pool_free (problem_data->ref_extract_pool, ref);
866 break;
867 }
868 }
869
870
871 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
872 Also delete the def-use or use-def chain if it exists. */
873
874 static void
875 df_reg_chain_unlink (df_ref ref)
876 {
877 df_ref next = DF_REF_NEXT_REG (ref);
878 df_ref prev = DF_REF_PREV_REG (ref);
879 int id = DF_REF_ID (ref);
880 struct df_reg_info *reg_info;
881 df_ref *refs = NULL;
882
883 if (DF_REF_REG_DEF_P (ref))
884 {
885 int regno = DF_REF_REGNO (ref);
886 reg_info = DF_REG_DEF_GET (regno);
887 refs = df->def_info.refs;
888 }
889 else
890 {
891 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
892 {
893 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
894 switch (df->use_info.ref_order)
895 {
896 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
897 case DF_REF_ORDER_BY_REG_WITH_NOTES:
898 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
899 refs = df->use_info.refs;
900 break;
901 default:
902 break;
903 }
904 }
905 else
906 {
907 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
908 refs = df->use_info.refs;
909 }
910 }
911
912 if (refs)
913 {
914 if (df->analyze_subset)
915 {
916 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
917 refs[id] = NULL;
918 }
919 else
920 refs[id] = NULL;
921 }
922
923 /* Delete any def-use or use-def chains that start here. It is
924 possible that there is trash in this field. This happens for
925 insns that have been deleted when rescanning has been deferred
926 and the chain problem has also been deleted. The chain tear down
927 code skips deleted insns. */
928 if (df_chain && DF_REF_CHAIN (ref))
929 df_chain_unlink (ref);
930
931 reg_info->n_refs--;
932 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
933 {
934 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
935 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
936 }
937
938 /* Unlink from the reg chain. If there is no prev, this is the
939 first of the list. If not, just join the next and prev. */
940 if (prev)
941 DF_REF_NEXT_REG (prev) = next;
942 else
943 {
944 gcc_assert (reg_info->reg_chain == ref);
945 reg_info->reg_chain = next;
946 }
947 if (next)
948 DF_REF_PREV_REG (next) = prev;
949
950 df_free_ref (ref);
951 }
952
953
954 /* Remove REF from VEC. */
955
956 static void
957 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
958 {
959 df_ref *vec = *vec_ptr;
960
961 if (vec[1])
962 {
963 while (*vec && *vec != ref)
964 vec++;
965
966 while (*vec)
967 {
968 *vec = *(vec+1);
969 vec++;
970 }
971 }
972 else
973 {
974 free (vec);
975 *vec_ptr = df_null_ref_rec;
976 }
977 }
978
979
980 /* Unlink REF from all def-use/use-def chains, etc. */
981
982 void
983 df_ref_remove (df_ref ref)
984 {
985 #if 0
986 if (dump_file)
987 {
988 fprintf (dump_file, "removing ref ");
989 df_ref_debug (ref, dump_file);
990 }
991 #endif
992
993 if (DF_REF_REG_DEF_P (ref))
994 {
995 if (DF_REF_IS_ARTIFICIAL (ref))
996 {
997 struct df_scan_bb_info *bb_info
998 = df_scan_get_bb_info (DF_REF_BBNO (ref));
999 df_ref_compress_rec (&bb_info->artificial_defs, ref);
1000 }
1001 else
1002 {
1003 unsigned int uid = DF_REF_INSN_UID (ref);
1004 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1005 df_ref_compress_rec (&insn_rec->defs, ref);
1006 }
1007 }
1008 else
1009 {
1010 if (DF_REF_IS_ARTIFICIAL (ref))
1011 {
1012 struct df_scan_bb_info *bb_info
1013 = df_scan_get_bb_info (DF_REF_BBNO (ref));
1014 df_ref_compress_rec (&bb_info->artificial_uses, ref);
1015 }
1016 else
1017 {
1018 unsigned int uid = DF_REF_INSN_UID (ref);
1019 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1020
1021 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1022 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1023 else
1024 df_ref_compress_rec (&insn_rec->uses, ref);
1025 }
1026 }
1027
1028 /* By deleting the ref directly, df_insn_rescan my not find any
1029 differences even though the block will have changed. So we need
1030 to mark the block dirty ourselves. */
1031 df_set_bb_dirty (DF_REF_BB (ref));
1032 df_reg_chain_unlink (ref);
1033 }
1034
1035
1036 /* Create the insn record for INSN. If there was one there, zero it
1037 out. */
1038
1039 struct df_insn_info *
1040 df_insn_create_insn_record (rtx insn)
1041 {
1042 struct df_scan_problem_data *problem_data
1043 = (struct df_scan_problem_data *) df_scan->problem_data;
1044 struct df_insn_info *insn_rec;
1045
1046 df_grow_insn_info ();
1047 insn_rec = DF_INSN_INFO_GET (insn);
1048 if (!insn_rec)
1049 {
1050 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1051 DF_INSN_INFO_SET (insn, insn_rec);
1052 }
1053 memset (insn_rec, 0, sizeof (struct df_insn_info));
1054 insn_rec->insn = insn;
1055 return insn_rec;
1056 }
1057
1058
1059 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1060
1061 static void
1062 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1063 {
1064 while (*ref_rec)
1065 {
1066 df_ref ref = *ref_rec;
1067 /* CHAIN is allocated by DF_CHAIN. So make sure to
1068 pass df_scan instance for the problem. */
1069 if (DF_REF_CHAIN (ref))
1070 df_chain_unlink (ref);
1071 ref_rec++;
1072 }
1073 }
1074
1075
1076 /* Delete all refs in the ref chain. */
1077
1078 static void
1079 df_ref_chain_delete (df_ref *ref_rec)
1080 {
1081 df_ref *start = ref_rec;
1082 while (*ref_rec)
1083 {
1084 df_reg_chain_unlink (*ref_rec);
1085 ref_rec++;
1086 }
1087
1088 /* If the list is empty, it has a special shared element that is not
1089 to be deleted. */
1090 if (*start)
1091 free (start);
1092 }
1093
1094
1095 /* Delete the hardreg chain. */
1096
1097 static void
1098 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1099 {
1100 struct df_scan_problem_data *problem_data;
1101
1102 if (!hardregs)
1103 return;
1104
1105 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1106
1107 while (*hardregs)
1108 {
1109 pool_free (problem_data->mw_reg_pool, *hardregs);
1110 hardregs++;
1111 }
1112 }
1113
1114
1115 /* Delete all of the refs information from INSN. BB must be passed in
1116 except when called from df_process_deferred_rescans to mark the block
1117 as dirty. */
1118
1119 void
1120 df_insn_delete (basic_block bb, unsigned int uid)
1121 {
1122 struct df_insn_info *insn_info = NULL;
1123 if (!df)
1124 return;
1125
1126 df_grow_bb_info (df_scan);
1127 df_grow_reg_info ();
1128
1129 /* The block must be marked as dirty now, rather than later as in
1130 df_insn_rescan and df_notes_rescan because it may not be there at
1131 rescanning time and the mark would blow up. */
1132 if (bb)
1133 df_set_bb_dirty (bb);
1134
1135 insn_info = DF_INSN_UID_SAFE_GET (uid);
1136
1137 /* The client has deferred rescanning. */
1138 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1139 {
1140 if (insn_info)
1141 {
1142 bitmap_clear_bit (df->insns_to_rescan, uid);
1143 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1144 bitmap_set_bit (df->insns_to_delete, uid);
1145 }
1146 if (dump_file)
1147 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1148 return;
1149 }
1150
1151 if (dump_file)
1152 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1153
1154 bitmap_clear_bit (df->insns_to_delete, uid);
1155 bitmap_clear_bit (df->insns_to_rescan, uid);
1156 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1157 if (insn_info)
1158 {
1159 struct df_scan_problem_data *problem_data
1160 = (struct df_scan_problem_data *) df_scan->problem_data;
1161
1162 /* In general, notes do not have the insn_info fields
1163 initialized. However, combine deletes insns by changing them
1164 to notes. How clever. So we cannot just check if it is a
1165 valid insn before short circuiting this code, we need to see
1166 if we actually initialized it. */
1167 if (insn_info->defs)
1168 {
1169 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1170
1171 if (df_chain)
1172 {
1173 df_ref_chain_delete_du_chain (insn_info->defs);
1174 df_ref_chain_delete_du_chain (insn_info->uses);
1175 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1176 }
1177
1178 df_ref_chain_delete (insn_info->defs);
1179 df_ref_chain_delete (insn_info->uses);
1180 df_ref_chain_delete (insn_info->eq_uses);
1181 }
1182 pool_free (problem_data->insn_pool, insn_info);
1183 DF_INSN_UID_SET (uid, NULL);
1184 }
1185 }
1186
1187
1188 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1189
1190 static void
1191 df_free_collection_rec (struct df_collection_rec *collection_rec)
1192 {
1193 struct df_scan_problem_data *problem_data
1194 = (struct df_scan_problem_data *) df_scan->problem_data;
1195 df_ref *ref;
1196 struct df_mw_hardreg **mw;
1197
1198 if (collection_rec->def_vec)
1199 for (ref = collection_rec->def_vec; *ref; ref++)
1200 df_free_ref (*ref);
1201 if (collection_rec->use_vec)
1202 for (ref = collection_rec->use_vec; *ref; ref++)
1203 df_free_ref (*ref);
1204 if (collection_rec->eq_use_vec)
1205 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1206 df_free_ref (*ref);
1207 if (collection_rec->mw_vec)
1208 for (mw = collection_rec->mw_vec; *mw; mw++)
1209 pool_free (problem_data->mw_reg_pool, *mw);
1210 }
1211
1212
1213 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1214
1215 bool
1216 df_insn_rescan (rtx insn)
1217 {
1218 unsigned int uid = INSN_UID (insn);
1219 struct df_insn_info *insn_info = NULL;
1220 basic_block bb = BLOCK_FOR_INSN (insn);
1221 struct df_collection_rec collection_rec;
1222 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
1223 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
1224 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
1225 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
1226
1227 if ((!df) || (!INSN_P (insn)))
1228 return false;
1229
1230 if (!bb)
1231 {
1232 if (dump_file)
1233 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1234 return false;
1235 }
1236
1237 /* The client has disabled rescanning and plans to do it itself. */
1238 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1239 return false;
1240
1241 df_grow_bb_info (df_scan);
1242 df_grow_reg_info ();
1243
1244 insn_info = DF_INSN_UID_SAFE_GET (uid);
1245
1246 /* The client has deferred rescanning. */
1247 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1248 {
1249 if (!insn_info)
1250 {
1251 insn_info = df_insn_create_insn_record (insn);
1252 insn_info->defs = df_null_ref_rec;
1253 insn_info->uses = df_null_ref_rec;
1254 insn_info->eq_uses = df_null_ref_rec;
1255 insn_info->mw_hardregs = df_null_mw_rec;
1256 }
1257 if (dump_file)
1258 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1259
1260 bitmap_clear_bit (df->insns_to_delete, uid);
1261 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1262 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1263 return false;
1264 }
1265
1266 bitmap_clear_bit (df->insns_to_delete, uid);
1267 bitmap_clear_bit (df->insns_to_rescan, uid);
1268 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1269 if (insn_info)
1270 {
1271 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1272 /* If there's no change, return false. */
1273 if (the_same)
1274 {
1275 df_free_collection_rec (&collection_rec);
1276 if (dump_file)
1277 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1278 return false;
1279 }
1280 if (dump_file)
1281 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1282
1283 /* There's change - we need to delete the existing info. */
1284 df_insn_delete (NULL, uid);
1285 df_insn_create_insn_record (insn);
1286 }
1287 else
1288 {
1289 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1290 df_insn_refs_collect (&collection_rec, bb, insn_info);
1291 if (dump_file)
1292 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1293 }
1294
1295 df_refs_add_to_chains (&collection_rec, bb, insn);
1296 df_set_bb_dirty (bb);
1297 return true;
1298 }
1299
1300
1301 /* Rescan all of the insns in the function. Note that the artificial
1302 uses and defs are not touched. This function will destroy def-se
1303 or use-def chains. */
1304
1305 void
1306 df_insn_rescan_all (void)
1307 {
1308 bool no_insn_rescan = false;
1309 bool defer_insn_rescan = false;
1310 basic_block bb;
1311 bitmap_iterator bi;
1312 unsigned int uid;
1313 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1314
1315 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1316 {
1317 df_clear_flags (DF_NO_INSN_RESCAN);
1318 no_insn_rescan = true;
1319 }
1320
1321 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1322 {
1323 df_clear_flags (DF_DEFER_INSN_RESCAN);
1324 defer_insn_rescan = true;
1325 }
1326
1327 bitmap_copy (tmp, df->insns_to_delete);
1328 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1329 {
1330 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1331 if (insn_info)
1332 df_insn_delete (NULL, uid);
1333 }
1334
1335 BITMAP_FREE (tmp);
1336 bitmap_clear (df->insns_to_delete);
1337 bitmap_clear (df->insns_to_rescan);
1338 bitmap_clear (df->insns_to_notes_rescan);
1339
1340 FOR_EACH_BB (bb)
1341 {
1342 rtx insn;
1343 FOR_BB_INSNS (bb, insn)
1344 {
1345 df_insn_rescan (insn);
1346 }
1347 }
1348
1349 if (no_insn_rescan)
1350 df_set_flags (DF_NO_INSN_RESCAN);
1351 if (defer_insn_rescan)
1352 df_set_flags (DF_DEFER_INSN_RESCAN);
1353 }
1354
1355
1356 /* Process all of the deferred rescans or deletions. */
1357
1358 void
1359 df_process_deferred_rescans (void)
1360 {
1361 bool no_insn_rescan = false;
1362 bool defer_insn_rescan = false;
1363 bitmap_iterator bi;
1364 unsigned int uid;
1365 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1366
1367 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1368 {
1369 df_clear_flags (DF_NO_INSN_RESCAN);
1370 no_insn_rescan = true;
1371 }
1372
1373 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1374 {
1375 df_clear_flags (DF_DEFER_INSN_RESCAN);
1376 defer_insn_rescan = true;
1377 }
1378
1379 if (dump_file)
1380 fprintf (dump_file, "starting the processing of deferred insns\n");
1381
1382 bitmap_copy (tmp, df->insns_to_delete);
1383 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1384 {
1385 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1386 if (insn_info)
1387 df_insn_delete (NULL, uid);
1388 }
1389
1390 bitmap_copy (tmp, df->insns_to_rescan);
1391 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1392 {
1393 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1394 if (insn_info)
1395 df_insn_rescan (insn_info->insn);
1396 }
1397
1398 bitmap_copy (tmp, df->insns_to_notes_rescan);
1399 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1400 {
1401 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1402 if (insn_info)
1403 df_notes_rescan (insn_info->insn);
1404 }
1405
1406 if (dump_file)
1407 fprintf (dump_file, "ending the processing of deferred insns\n");
1408
1409 BITMAP_FREE (tmp);
1410 bitmap_clear (df->insns_to_delete);
1411 bitmap_clear (df->insns_to_rescan);
1412 bitmap_clear (df->insns_to_notes_rescan);
1413
1414 if (no_insn_rescan)
1415 df_set_flags (DF_NO_INSN_RESCAN);
1416 if (defer_insn_rescan)
1417 df_set_flags (DF_DEFER_INSN_RESCAN);
1418
1419 /* If someone changed regs_ever_live during this pass, fix up the
1420 entry and exit blocks. */
1421 if (df->redo_entry_and_exit)
1422 {
1423 df_update_entry_exit_and_calls ();
1424 df->redo_entry_and_exit = false;
1425 }
1426 }
1427
1428
1429 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1430 the uses if INCLUDE_USES. Include the eq_uses if
1431 INCLUDE_EQ_USES. */
1432
1433 static unsigned int
1434 df_count_refs (bool include_defs, bool include_uses,
1435 bool include_eq_uses)
1436 {
1437 unsigned int regno;
1438 int size = 0;
1439 unsigned int m = df->regs_inited;
1440
1441 for (regno = 0; regno < m; regno++)
1442 {
1443 if (include_defs)
1444 size += DF_REG_DEF_COUNT (regno);
1445 if (include_uses)
1446 size += DF_REG_USE_COUNT (regno);
1447 if (include_eq_uses)
1448 size += DF_REG_EQ_USE_COUNT (regno);
1449 }
1450 return size;
1451 }
1452
1453
1454 /* Take build ref table for either the uses or defs from the reg-use
1455 or reg-def chains. This version processes the refs in reg order
1456 which is likely to be best if processing the whole function. */
1457
1458 static void
1459 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1460 bool include_defs,
1461 bool include_uses,
1462 bool include_eq_uses)
1463 {
1464 unsigned int m = df->regs_inited;
1465 unsigned int regno;
1466 unsigned int offset = 0;
1467 unsigned int start;
1468
1469 if (df->changeable_flags & DF_NO_HARD_REGS)
1470 {
1471 start = FIRST_PSEUDO_REGISTER;
1472 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1473 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1474 }
1475 else
1476 start = 0;
1477
1478 ref_info->total_size
1479 = df_count_refs (include_defs, include_uses, include_eq_uses);
1480
1481 df_check_and_grow_ref_info (ref_info, 1);
1482
1483 for (regno = start; regno < m; regno++)
1484 {
1485 int count = 0;
1486 ref_info->begin[regno] = offset;
1487 if (include_defs)
1488 {
1489 df_ref ref = DF_REG_DEF_CHAIN (regno);
1490 while (ref)
1491 {
1492 ref_info->refs[offset] = ref;
1493 DF_REF_ID (ref) = offset++;
1494 count++;
1495 ref = DF_REF_NEXT_REG (ref);
1496 gcc_assert (offset < ref_info->refs_size);
1497 }
1498 }
1499 if (include_uses)
1500 {
1501 df_ref ref = DF_REG_USE_CHAIN (regno);
1502 while (ref)
1503 {
1504 ref_info->refs[offset] = ref;
1505 DF_REF_ID (ref) = offset++;
1506 count++;
1507 ref = DF_REF_NEXT_REG (ref);
1508 gcc_assert (offset < ref_info->refs_size);
1509 }
1510 }
1511 if (include_eq_uses)
1512 {
1513 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1514 while (ref)
1515 {
1516 ref_info->refs[offset] = ref;
1517 DF_REF_ID (ref) = offset++;
1518 count++;
1519 ref = DF_REF_NEXT_REG (ref);
1520 gcc_assert (offset < ref_info->refs_size);
1521 }
1522 }
1523 ref_info->count[regno] = count;
1524 }
1525
1526 /* The bitmap size is not decremented when refs are deleted. So
1527 reset it now that we have squished out all of the empty
1528 slots. */
1529 ref_info->table_size = offset;
1530 }
1531
1532
1533 /* Take build ref table for either the uses or defs from the reg-use
1534 or reg-def chains. This version processes the refs in insn order
1535 which is likely to be best if processing some segment of the
1536 function. */
1537
1538 static void
1539 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1540 bool include_defs,
1541 bool include_uses,
1542 bool include_eq_uses)
1543 {
1544 bitmap_iterator bi;
1545 unsigned int bb_index;
1546 unsigned int m = df->regs_inited;
1547 unsigned int offset = 0;
1548 unsigned int r;
1549 unsigned int start
1550 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1551
1552 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1553 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1554
1555 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1556 df_check_and_grow_ref_info (ref_info, 1);
1557
1558 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1559 {
1560 basic_block bb = BASIC_BLOCK (bb_index);
1561 rtx insn;
1562 df_ref *ref_rec;
1563
1564 if (include_defs)
1565 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1566 {
1567 unsigned int regno = DF_REF_REGNO (*ref_rec);
1568 ref_info->count[regno]++;
1569 }
1570 if (include_uses)
1571 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1572 {
1573 unsigned int regno = DF_REF_REGNO (*ref_rec);
1574 ref_info->count[regno]++;
1575 }
1576
1577 FOR_BB_INSNS (bb, insn)
1578 {
1579 if (INSN_P (insn))
1580 {
1581 unsigned int uid = INSN_UID (insn);
1582
1583 if (include_defs)
1584 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1585 {
1586 unsigned int regno = DF_REF_REGNO (*ref_rec);
1587 ref_info->count[regno]++;
1588 }
1589 if (include_uses)
1590 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1591 {
1592 unsigned int regno = DF_REF_REGNO (*ref_rec);
1593 ref_info->count[regno]++;
1594 }
1595 if (include_eq_uses)
1596 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1597 {
1598 unsigned int regno = DF_REF_REGNO (*ref_rec);
1599 ref_info->count[regno]++;
1600 }
1601 }
1602 }
1603 }
1604
1605 for (r = start; r < m; r++)
1606 {
1607 ref_info->begin[r] = offset;
1608 offset += ref_info->count[r];
1609 ref_info->count[r] = 0;
1610 }
1611
1612 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1613 {
1614 basic_block bb = BASIC_BLOCK (bb_index);
1615 rtx insn;
1616 df_ref *ref_rec;
1617
1618 if (include_defs)
1619 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1620 {
1621 df_ref ref = *ref_rec;
1622 unsigned int regno = DF_REF_REGNO (ref);
1623 if (regno >= start)
1624 {
1625 unsigned int id
1626 = ref_info->begin[regno] + ref_info->count[regno]++;
1627 DF_REF_ID (ref) = id;
1628 ref_info->refs[id] = ref;
1629 }
1630 }
1631 if (include_uses)
1632 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1633 {
1634 df_ref ref = *ref_rec;
1635 unsigned int regno = DF_REF_REGNO (ref);
1636 if (regno >= start)
1637 {
1638 unsigned int id
1639 = ref_info->begin[regno] + ref_info->count[regno]++;
1640 DF_REF_ID (ref) = id;
1641 ref_info->refs[id] = ref;
1642 }
1643 }
1644
1645 FOR_BB_INSNS (bb, insn)
1646 {
1647 if (INSN_P (insn))
1648 {
1649 unsigned int uid = INSN_UID (insn);
1650
1651 if (include_defs)
1652 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1653 {
1654 df_ref ref = *ref_rec;
1655 unsigned int regno = DF_REF_REGNO (ref);
1656 if (regno >= start)
1657 {
1658 unsigned int id
1659 = ref_info->begin[regno] + ref_info->count[regno]++;
1660 DF_REF_ID (ref) = id;
1661 ref_info->refs[id] = ref;
1662 }
1663 }
1664 if (include_uses)
1665 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1666 {
1667 df_ref ref = *ref_rec;
1668 unsigned int regno = DF_REF_REGNO (ref);
1669 if (regno >= start)
1670 {
1671 unsigned int id
1672 = ref_info->begin[regno] + ref_info->count[regno]++;
1673 DF_REF_ID (ref) = id;
1674 ref_info->refs[id] = ref;
1675 }
1676 }
1677 if (include_eq_uses)
1678 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1679 {
1680 df_ref ref = *ref_rec;
1681 unsigned int regno = DF_REF_REGNO (ref);
1682 if (regno >= start)
1683 {
1684 unsigned int id
1685 = ref_info->begin[regno] + ref_info->count[regno]++;
1686 DF_REF_ID (ref) = id;
1687 ref_info->refs[id] = ref;
1688 }
1689 }
1690 }
1691 }
1692 }
1693
1694 /* The bitmap size is not decremented when refs are deleted. So
1695 reset it now that we have squished out all of the empty
1696 slots. */
1697
1698 ref_info->table_size = offset;
1699 }
1700
1701 /* Take build ref table for either the uses or defs from the reg-use
1702 or reg-def chains. */
1703
1704 static void
1705 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1706 bool include_defs,
1707 bool include_uses,
1708 bool include_eq_uses)
1709 {
1710 if (df->analyze_subset)
1711 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1712 include_uses, include_eq_uses);
1713 else
1714 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1715 include_uses, include_eq_uses);
1716 }
1717
1718
1719 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1720 static unsigned int
1721 df_add_refs_to_table (unsigned int offset,
1722 struct df_ref_info *ref_info,
1723 df_ref *ref_vec)
1724 {
1725 while (*ref_vec)
1726 {
1727 df_ref ref = *ref_vec;
1728 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1729 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1730 {
1731 ref_info->refs[offset] = ref;
1732 DF_REF_ID (*ref_vec) = offset++;
1733 }
1734 ref_vec++;
1735 }
1736 return offset;
1737 }
1738
1739
1740 /* Count the number of refs in all of the insns of BB. Include the
1741 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1742 eq_uses if INCLUDE_EQ_USES. */
1743
1744 static unsigned int
1745 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1746 struct df_ref_info *ref_info,
1747 bool include_defs, bool include_uses,
1748 bool include_eq_uses)
1749 {
1750 rtx insn;
1751
1752 if (include_defs)
1753 offset = df_add_refs_to_table (offset, ref_info,
1754 df_get_artificial_defs (bb->index));
1755 if (include_uses)
1756 offset = df_add_refs_to_table (offset, ref_info,
1757 df_get_artificial_uses (bb->index));
1758
1759 FOR_BB_INSNS (bb, insn)
1760 if (INSN_P (insn))
1761 {
1762 unsigned int uid = INSN_UID (insn);
1763 if (include_defs)
1764 offset = df_add_refs_to_table (offset, ref_info,
1765 DF_INSN_UID_DEFS (uid));
1766 if (include_uses)
1767 offset = df_add_refs_to_table (offset, ref_info,
1768 DF_INSN_UID_USES (uid));
1769 if (include_eq_uses)
1770 offset = df_add_refs_to_table (offset, ref_info,
1771 DF_INSN_UID_EQ_USES (uid));
1772 }
1773 return offset;
1774 }
1775
1776
1777 /* Organize the refs by insn into the table in REF_INFO. If
1778 blocks_to_analyze is defined, use that set, otherwise the entire
1779 program. Include the defs if INCLUDE_DEFS. Include the uses if
1780 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1781
1782 static void
1783 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1784 bool include_defs, bool include_uses,
1785 bool include_eq_uses)
1786 {
1787 basic_block bb;
1788 unsigned int offset = 0;
1789
1790 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1791 df_check_and_grow_ref_info (ref_info, 1);
1792 if (df->blocks_to_analyze)
1793 {
1794 bitmap_iterator bi;
1795 unsigned int index;
1796
1797 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1798 {
1799 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1800 include_defs, include_uses,
1801 include_eq_uses);
1802 }
1803
1804 ref_info->table_size = offset;
1805 }
1806 else
1807 {
1808 FOR_ALL_BB (bb)
1809 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1810 include_defs, include_uses,
1811 include_eq_uses);
1812 ref_info->table_size = offset;
1813 }
1814 }
1815
1816
1817 /* If the use refs in DF are not organized, reorganize them. */
1818
1819 void
1820 df_maybe_reorganize_use_refs (enum df_ref_order order)
1821 {
1822 if (order == df->use_info.ref_order)
1823 return;
1824
1825 switch (order)
1826 {
1827 case DF_REF_ORDER_BY_REG:
1828 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1829 break;
1830
1831 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1832 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1833 break;
1834
1835 case DF_REF_ORDER_BY_INSN:
1836 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1837 break;
1838
1839 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1840 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1841 break;
1842
1843 case DF_REF_ORDER_NO_TABLE:
1844 free (df->use_info.refs);
1845 df->use_info.refs = NULL;
1846 df->use_info.refs_size = 0;
1847 break;
1848
1849 case DF_REF_ORDER_UNORDERED:
1850 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1851 gcc_unreachable ();
1852 break;
1853 }
1854
1855 df->use_info.ref_order = order;
1856 }
1857
1858
1859 /* If the def refs in DF are not organized, reorganize them. */
1860
1861 void
1862 df_maybe_reorganize_def_refs (enum df_ref_order order)
1863 {
1864 if (order == df->def_info.ref_order)
1865 return;
1866
1867 switch (order)
1868 {
1869 case DF_REF_ORDER_BY_REG:
1870 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1871 break;
1872
1873 case DF_REF_ORDER_BY_INSN:
1874 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1875 break;
1876
1877 case DF_REF_ORDER_NO_TABLE:
1878 free (df->def_info.refs);
1879 df->def_info.refs = NULL;
1880 df->def_info.refs_size = 0;
1881 break;
1882
1883 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1884 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1885 case DF_REF_ORDER_UNORDERED:
1886 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1887 gcc_unreachable ();
1888 break;
1889 }
1890
1891 df->def_info.ref_order = order;
1892 }
1893
1894
1895 /* Change all of the basic block references in INSN to use the insn's
1896 current basic block. This function is called from routines that move
1897 instructions from one block to another. */
1898
1899 void
1900 df_insn_change_bb (rtx insn, basic_block new_bb)
1901 {
1902 basic_block old_bb = BLOCK_FOR_INSN (insn);
1903 struct df_insn_info *insn_info;
1904 unsigned int uid = INSN_UID (insn);
1905
1906 if (old_bb == new_bb)
1907 return;
1908
1909 set_block_for_insn (insn, new_bb);
1910
1911 if (!df)
1912 return;
1913
1914 if (dump_file)
1915 fprintf (dump_file, "changing bb of uid %d\n", uid);
1916
1917 insn_info = DF_INSN_UID_SAFE_GET (uid);
1918 if (insn_info == NULL)
1919 {
1920 if (dump_file)
1921 fprintf (dump_file, " unscanned insn\n");
1922 df_insn_rescan (insn);
1923 return;
1924 }
1925
1926 if (!INSN_P (insn))
1927 return;
1928
1929 df_set_bb_dirty (new_bb);
1930 if (old_bb)
1931 {
1932 if (dump_file)
1933 fprintf (dump_file, " from %d to %d\n",
1934 old_bb->index, new_bb->index);
1935 df_set_bb_dirty (old_bb);
1936 }
1937 else
1938 if (dump_file)
1939 fprintf (dump_file, " to %d\n", new_bb->index);
1940 }
1941
1942
1943 /* Helper function for df_ref_change_reg_with_loc. */
1944
1945 static void
1946 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1947 struct df_reg_info *new_df,
1948 int new_regno, rtx loc)
1949 {
1950 df_ref the_ref = old_df->reg_chain;
1951
1952 while (the_ref)
1953 {
1954 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1955 && (DF_REF_LOC (the_ref))
1956 && (*DF_REF_LOC (the_ref) == loc))
1957 {
1958 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1959 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1960 df_ref *ref_vec, *ref_vec_t;
1961 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1962 unsigned int count = 0;
1963
1964 DF_REF_REGNO (the_ref) = new_regno;
1965 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1966
1967 /* Pull the_ref out of the old regno chain. */
1968 if (prev_ref)
1969 DF_REF_NEXT_REG (prev_ref) = next_ref;
1970 else
1971 old_df->reg_chain = next_ref;
1972 if (next_ref)
1973 DF_REF_PREV_REG (next_ref) = prev_ref;
1974 old_df->n_refs--;
1975
1976 /* Put the ref into the new regno chain. */
1977 DF_REF_PREV_REG (the_ref) = NULL;
1978 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1979 if (new_df->reg_chain)
1980 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1981 new_df->reg_chain = the_ref;
1982 new_df->n_refs++;
1983 if (DF_REF_BB (the_ref))
1984 df_set_bb_dirty (DF_REF_BB (the_ref));
1985
1986 /* Need to sort the record again that the ref was in because
1987 the regno is a sorting key. First, find the right
1988 record. */
1989 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1990 ref_vec = insn_info->eq_uses;
1991 else
1992 ref_vec = insn_info->uses;
1993 if (dump_file)
1994 fprintf (dump_file, "changing reg in insn %d\n",
1995 DF_REF_INSN_UID (the_ref));
1996
1997 ref_vec_t = ref_vec;
1998
1999 /* Find the length. */
2000 while (*ref_vec_t)
2001 {
2002 count++;
2003 ref_vec_t++;
2004 }
2005 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2006
2007 the_ref = next_ref;
2008 }
2009 else
2010 the_ref = DF_REF_NEXT_REG (the_ref);
2011 }
2012 }
2013
2014
2015 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2016 NEW_REGNO. Refs that do not match LOC are not changed which means
2017 that artificial refs are not changed since they have no loc. This
2018 call is to support the SET_REGNO macro. */
2019
2020 void
2021 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2022 {
2023 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2024 return;
2025
2026 df_grow_reg_info ();
2027
2028 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2029 DF_REG_DEF_GET (new_regno), new_regno, loc);
2030 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2031 DF_REG_USE_GET (new_regno), new_regno, loc);
2032 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2033 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2034 }
2035
2036
2037 /* Delete the mw_hardregs that point into the eq_notes. */
2038
2039 static unsigned int
2040 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2041 {
2042 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2043 unsigned int deleted = 0;
2044 unsigned int count = 0;
2045 struct df_scan_problem_data *problem_data
2046 = (struct df_scan_problem_data *) df_scan->problem_data;
2047
2048 if (!*mw_vec)
2049 return 0;
2050
2051 while (*mw_vec)
2052 {
2053 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2054 {
2055 struct df_mw_hardreg **temp_vec = mw_vec;
2056
2057 pool_free (problem_data->mw_reg_pool, *mw_vec);
2058 temp_vec = mw_vec;
2059 /* Shove the remaining ones down one to fill the gap. While
2060 this looks n**2, it is highly unusual to have any mw regs
2061 in eq_notes and the chances of more than one are almost
2062 non existent. */
2063 while (*temp_vec)
2064 {
2065 *temp_vec = *(temp_vec + 1);
2066 temp_vec++;
2067 }
2068 deleted++;
2069 }
2070 else
2071 {
2072 mw_vec++;
2073 count++;
2074 }
2075 }
2076
2077 if (count == 0)
2078 {
2079 df_scan_free_mws_vec (insn_info->mw_hardregs);
2080 insn_info->mw_hardregs = df_null_mw_rec;
2081 return 0;
2082 }
2083 return deleted;
2084 }
2085
2086
2087 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2088
2089 void
2090 df_notes_rescan (rtx insn)
2091 {
2092 struct df_insn_info *insn_info;
2093 unsigned int uid = INSN_UID (insn);
2094
2095 if (!df)
2096 return;
2097
2098 /* The client has disabled rescanning and plans to do it itself. */
2099 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2100 return;
2101
2102 /* Do nothing if the insn hasn't been emitted yet. */
2103 if (!BLOCK_FOR_INSN (insn))
2104 return;
2105
2106 df_grow_bb_info (df_scan);
2107 df_grow_reg_info ();
2108
2109 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2110
2111 /* The client has deferred rescanning. */
2112 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2113 {
2114 if (!insn_info)
2115 {
2116 insn_info = df_insn_create_insn_record (insn);
2117 insn_info->defs = df_null_ref_rec;
2118 insn_info->uses = df_null_ref_rec;
2119 insn_info->eq_uses = df_null_ref_rec;
2120 insn_info->mw_hardregs = df_null_mw_rec;
2121 }
2122
2123 bitmap_clear_bit (df->insns_to_delete, uid);
2124 /* If the insn is set to be rescanned, it does not need to also
2125 be notes rescanned. */
2126 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2127 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2128 return;
2129 }
2130
2131 bitmap_clear_bit (df->insns_to_delete, uid);
2132 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2133
2134 if (insn_info)
2135 {
2136 basic_block bb = BLOCK_FOR_INSN (insn);
2137 rtx note;
2138 struct df_collection_rec collection_rec;
2139 unsigned int num_deleted;
2140
2141 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2142 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
2143 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
2144
2145 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2146 df_ref_chain_delete (insn_info->eq_uses);
2147 insn_info->eq_uses = NULL;
2148
2149 /* Process REG_EQUIV/REG_EQUAL notes */
2150 for (note = REG_NOTES (insn); note;
2151 note = XEXP (note, 1))
2152 {
2153 switch (REG_NOTE_KIND (note))
2154 {
2155 case REG_EQUIV:
2156 case REG_EQUAL:
2157 df_uses_record (DF_REF_REGULAR, &collection_rec,
2158 &XEXP (note, 0), DF_REF_REG_USE,
2159 bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
2160 default:
2161 break;
2162 }
2163 }
2164
2165 /* Find some place to put any new mw_hardregs. */
2166 df_canonize_collection_rec (&collection_rec);
2167 if (collection_rec.next_mw)
2168 {
2169 unsigned int count = 0;
2170 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2171 while (*mw_rec)
2172 {
2173 count++;
2174 mw_rec++;
2175 }
2176
2177 if (count)
2178 {
2179 /* Append to the end of the existing record after
2180 expanding it if necessary. */
2181 if (collection_rec.next_mw > num_deleted)
2182 {
2183 insn_info->mw_hardregs =
2184 XRESIZEVEC (struct df_mw_hardreg *,
2185 insn_info->mw_hardregs,
2186 count + 1 + collection_rec.next_mw);
2187 }
2188 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2189 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2190 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2191 sizeof (struct df_mw_hardreg *), df_mw_compare);
2192 }
2193 else
2194 {
2195 /* No vector there. */
2196 insn_info->mw_hardregs
2197 = XNEWVEC (struct df_mw_hardreg*,
2198 count + 1 + collection_rec.next_mw);
2199 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2200 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2201 }
2202 }
2203 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2204 ignore it. */
2205 collection_rec.mw_vec = NULL;
2206 collection_rec.next_mw = 0;
2207 df_refs_add_to_chains (&collection_rec, bb, insn);
2208 }
2209 else
2210 df_insn_rescan (insn);
2211
2212 }
2213
2214 \f
2215 /*----------------------------------------------------------------------------
2216 Hard core instruction scanning code. No external interfaces here,
2217 just a lot of routines that look inside insns.
2218 ----------------------------------------------------------------------------*/
2219
2220
2221 /* Return true if the contents of two df_ref's are identical.
2222 It ignores DF_REF_MARKER. */
2223
2224 static bool
2225 df_ref_equal_p (df_ref ref1, df_ref ref2)
2226 {
2227 if (!ref2)
2228 return false;
2229
2230 if (ref1 == ref2)
2231 return true;
2232
2233 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2234 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2235 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2236 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2237 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2238 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2239 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2240 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2241 return false;
2242
2243 switch (DF_REF_CLASS (ref1))
2244 {
2245 case DF_REF_ARTIFICIAL:
2246 case DF_REF_BASE:
2247 return true;
2248
2249 case DF_REF_EXTRACT:
2250 if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2251 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2252 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2253 return false;
2254 /* fallthru. */
2255
2256 case DF_REF_REGULAR:
2257 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2258
2259 default:
2260 gcc_unreachable ();
2261 }
2262 return false;
2263 }
2264
2265
2266 /* Compare REF1 and REF2 for sorting. This is only called from places
2267 where all of the refs are of the same type, in the same insn, and
2268 have the same bb. So these fields are not checked. */
2269
2270 static int
2271 df_ref_compare (const void *r1, const void *r2)
2272 {
2273 const df_ref ref1 = *(const df_ref *)r1;
2274 const df_ref ref2 = *(const df_ref *)r2;
2275
2276 if (ref1 == ref2)
2277 return 0;
2278
2279 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2280 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2281
2282 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2283 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2284
2285 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2286 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2287
2288 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2289 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2290
2291 /* Cannot look at the LOC field on artificial refs. */
2292 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2293 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2294 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2295
2296 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2297 {
2298 /* If two refs are identical except that one of them has is from
2299 a mw and one is not, we need to have the one with the mw
2300 first. */
2301 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2302 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2303 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2304 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2305 return -1;
2306 else
2307 return 1;
2308 }
2309
2310 /* The classes are the same at this point so it is safe to only look
2311 at ref1. */
2312 if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
2313 {
2314 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2315 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2316 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2317 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2318 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2319 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
2320 }
2321 return 0;
2322 }
2323
2324 static void
2325 df_swap_refs (df_ref *ref_vec, int i, int j)
2326 {
2327 df_ref tmp = ref_vec[i];
2328 ref_vec[i] = ref_vec[j];
2329 ref_vec[j] = tmp;
2330 }
2331
2332 /* Sort and compress a set of refs. */
2333
2334 static unsigned int
2335 df_sort_and_compress_refs (df_ref *ref_vec, unsigned int count)
2336 {
2337 unsigned int i;
2338 unsigned int dist = 0;
2339
2340 ref_vec[count] = NULL;
2341 /* If there are 1 or 0 elements, there is nothing to do. */
2342 if (count < 2)
2343 return count;
2344 else if (count == 2)
2345 {
2346 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2347 df_swap_refs (ref_vec, 0, 1);
2348 }
2349 else
2350 {
2351 for (i = 0; i < count - 1; i++)
2352 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2353 break;
2354 /* If the array is already strictly ordered,
2355 which is the most common case for large COUNT case
2356 (which happens for CALL INSNs),
2357 no need to sort and filter out duplicate.
2358 Simply return the count.
2359 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2360 of DF_REF_COMPARE. */
2361 if (i == count - 1)
2362 return count;
2363 qsort (ref_vec, count, sizeof (df_ref), df_ref_compare);
2364 }
2365
2366 for (i=0; i<count-dist; i++)
2367 {
2368 /* Find the next ref that is not equal to the current ref. */
2369 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2370 {
2371 df_free_ref (ref_vec[i + dist + 1]);
2372 dist++;
2373 }
2374 /* Copy it down to the next position. */
2375 if (dist)
2376 ref_vec[i+1] = ref_vec[i + dist + 1];
2377 }
2378
2379 count -= dist;
2380 ref_vec[count] = NULL;
2381 return count;
2382 }
2383
2384
2385 /* Return true if the contents of two df_ref's are identical.
2386 It ignores DF_REF_MARKER. */
2387
2388 static bool
2389 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2390 {
2391 if (!mw2)
2392 return false;
2393 return (mw1 == mw2) ||
2394 (mw1->mw_reg == mw2->mw_reg
2395 && mw1->type == mw2->type
2396 && mw1->flags == mw2->flags
2397 && mw1->start_regno == mw2->start_regno
2398 && mw1->end_regno == mw2->end_regno);
2399 }
2400
2401
2402 /* Compare MW1 and MW2 for sorting. */
2403
2404 static int
2405 df_mw_compare (const void *m1, const void *m2)
2406 {
2407 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2408 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2409
2410 if (mw1 == mw2)
2411 return 0;
2412
2413 if (mw1->type != mw2->type)
2414 return mw1->type - mw2->type;
2415
2416 if (mw1->flags != mw2->flags)
2417 return mw1->flags - mw2->flags;
2418
2419 if (mw1->start_regno != mw2->start_regno)
2420 return mw1->start_regno - mw2->start_regno;
2421
2422 if (mw1->end_regno != mw2->end_regno)
2423 return mw1->end_regno - mw2->end_regno;
2424
2425 if (mw1->mw_reg != mw2->mw_reg)
2426 return mw1->mw_order - mw2->mw_order;
2427
2428 return 0;
2429 }
2430
2431
2432 /* Sort and compress a set of refs. */
2433
2434 static unsigned int
2435 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2436 {
2437 struct df_scan_problem_data *problem_data
2438 = (struct df_scan_problem_data *) df_scan->problem_data;
2439 unsigned int i;
2440 unsigned int dist = 0;
2441 mw_vec[count] = NULL;
2442
2443 if (count < 2)
2444 return count;
2445 else if (count == 2)
2446 {
2447 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2448 {
2449 struct df_mw_hardreg *tmp = mw_vec[0];
2450 mw_vec[0] = mw_vec[1];
2451 mw_vec[1] = tmp;
2452 }
2453 }
2454 else
2455 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2456
2457 for (i=0; i<count-dist; i++)
2458 {
2459 /* Find the next ref that is not equal to the current ref. */
2460 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2461 {
2462 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2463 dist++;
2464 }
2465 /* Copy it down to the next position. */
2466 if (dist)
2467 mw_vec[i+1] = mw_vec[i + dist + 1];
2468 }
2469
2470 count -= dist;
2471 mw_vec[count] = NULL;
2472 return count;
2473 }
2474
2475
2476 /* Sort and remove duplicates from the COLLECTION_REC. */
2477
2478 static void
2479 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2480 {
2481 if (collection_rec->def_vec)
2482 collection_rec->next_def
2483 = df_sort_and_compress_refs (collection_rec->def_vec,
2484 collection_rec->next_def);
2485 if (collection_rec->use_vec)
2486 collection_rec->next_use
2487 = df_sort_and_compress_refs (collection_rec->use_vec,
2488 collection_rec->next_use);
2489 if (collection_rec->eq_use_vec)
2490 collection_rec->next_eq_use
2491 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2492 collection_rec->next_eq_use);
2493 if (collection_rec->mw_vec)
2494 collection_rec->next_mw
2495 = df_sort_and_compress_mws (collection_rec->mw_vec,
2496 collection_rec->next_mw);
2497 }
2498
2499
2500 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2501
2502 static void
2503 df_install_ref (df_ref this_ref,
2504 struct df_reg_info *reg_info,
2505 struct df_ref_info *ref_info,
2506 bool add_to_table)
2507 {
2508 unsigned int regno = DF_REF_REGNO (this_ref);
2509 /* Add the ref to the reg_{def,use,eq_use} chain. */
2510 df_ref head = reg_info->reg_chain;
2511
2512 reg_info->reg_chain = this_ref;
2513 reg_info->n_refs++;
2514
2515 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2516 {
2517 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2518 df->hard_regs_live_count[regno]++;
2519 }
2520
2521 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2522 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2523
2524 DF_REF_NEXT_REG (this_ref) = head;
2525
2526 /* We cannot actually link to the head of the chain. */
2527 DF_REF_PREV_REG (this_ref) = NULL;
2528
2529 if (head)
2530 DF_REF_PREV_REG (head) = this_ref;
2531
2532 if (add_to_table)
2533 {
2534 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2535 df_check_and_grow_ref_info (ref_info, 1);
2536 DF_REF_ID (this_ref) = ref_info->table_size;
2537 /* Add the ref to the big array of defs. */
2538 ref_info->refs[ref_info->table_size] = this_ref;
2539 ref_info->table_size++;
2540 }
2541 else
2542 DF_REF_ID (this_ref) = -1;
2543
2544 ref_info->total_size++;
2545 }
2546
2547
2548 /* This function takes one of the groups of refs (defs, uses or
2549 eq_uses) and installs the entire group into the insn. It also adds
2550 each of these refs into the appropriate chains. */
2551
2552 static df_ref *
2553 df_install_refs (basic_block bb,
2554 df_ref *old_vec, unsigned int count,
2555 struct df_reg_info **reg_info,
2556 struct df_ref_info *ref_info,
2557 bool is_notes)
2558 {
2559 if (count)
2560 {
2561 unsigned int i;
2562 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2563 bool add_to_table;
2564
2565 switch (ref_info->ref_order)
2566 {
2567 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2568 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2569 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2570 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2571 add_to_table = true;
2572 break;
2573 case DF_REF_ORDER_UNORDERED:
2574 case DF_REF_ORDER_BY_REG:
2575 case DF_REF_ORDER_BY_INSN:
2576 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2577 add_to_table = !is_notes;
2578 break;
2579 default:
2580 add_to_table = false;
2581 break;
2582 }
2583
2584 /* Do not add if ref is not in the right blocks. */
2585 if (add_to_table && df->analyze_subset)
2586 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2587
2588 for (i = 0; i < count; i++)
2589 {
2590 df_ref this_ref = old_vec[i];
2591 new_vec[i] = this_ref;
2592 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2593 ref_info, add_to_table);
2594 }
2595
2596 new_vec[count] = NULL;
2597 return new_vec;
2598 }
2599 else
2600 return df_null_ref_rec;
2601 }
2602
2603
2604 /* This function takes the mws installs the entire group into the
2605 insn. */
2606
2607 static struct df_mw_hardreg **
2608 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2609 {
2610 if (count)
2611 {
2612 struct df_mw_hardreg **new_vec
2613 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2614 memcpy (new_vec, old_vec,
2615 sizeof (struct df_mw_hardreg*) * (count + 1));
2616 return new_vec;
2617 }
2618 else
2619 return df_null_mw_rec;
2620 }
2621
2622
2623 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2624 chains and update other necessary information. */
2625
2626 static void
2627 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2628 basic_block bb, rtx insn)
2629 {
2630 if (insn)
2631 {
2632 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2633 /* If there is a vector in the collection rec, add it to the
2634 insn. A null rec is a signal that the caller will handle the
2635 chain specially. */
2636 if (collection_rec->def_vec)
2637 {
2638 df_scan_free_ref_vec (insn_rec->defs);
2639 insn_rec->defs
2640 = df_install_refs (bb, collection_rec->def_vec,
2641 collection_rec->next_def,
2642 df->def_regs,
2643 &df->def_info, false);
2644 }
2645 if (collection_rec->use_vec)
2646 {
2647 df_scan_free_ref_vec (insn_rec->uses);
2648 insn_rec->uses
2649 = df_install_refs (bb, collection_rec->use_vec,
2650 collection_rec->next_use,
2651 df->use_regs,
2652 &df->use_info, false);
2653 }
2654 if (collection_rec->eq_use_vec)
2655 {
2656 df_scan_free_ref_vec (insn_rec->eq_uses);
2657 insn_rec->eq_uses
2658 = df_install_refs (bb, collection_rec->eq_use_vec,
2659 collection_rec->next_eq_use,
2660 df->eq_use_regs,
2661 &df->use_info, true);
2662 }
2663 if (collection_rec->mw_vec)
2664 {
2665 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2666 insn_rec->mw_hardregs
2667 = df_install_mws (collection_rec->mw_vec,
2668 collection_rec->next_mw);
2669 }
2670 }
2671 else
2672 {
2673 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2674
2675 df_scan_free_ref_vec (bb_info->artificial_defs);
2676 bb_info->artificial_defs
2677 = df_install_refs (bb, collection_rec->def_vec,
2678 collection_rec->next_def,
2679 df->def_regs,
2680 &df->def_info, false);
2681 df_scan_free_ref_vec (bb_info->artificial_uses);
2682 bb_info->artificial_uses
2683 = df_install_refs (bb, collection_rec->use_vec,
2684 collection_rec->next_use,
2685 df->use_regs,
2686 &df->use_info, false);
2687 }
2688 }
2689
2690
2691 /* Allocate a ref and initialize its fields.
2692
2693 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2694 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
2695 if they were constants. Otherwise they should be -1 if those flags
2696 were set. */
2697
2698 static df_ref
2699 df_ref_create_structure (enum df_ref_class cl,
2700 struct df_collection_rec *collection_rec,
2701 rtx reg, rtx *loc,
2702 basic_block bb, struct df_insn_info *info,
2703 enum df_ref_type ref_type,
2704 enum df_ref_flags ref_flags,
2705 int width, int offset, enum machine_mode mode)
2706 {
2707 df_ref this_ref = NULL;
2708 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2709 struct df_scan_problem_data *problem_data
2710 = (struct df_scan_problem_data *) df_scan->problem_data;
2711
2712 switch (cl)
2713 {
2714 case DF_REF_BASE:
2715 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2716 gcc_assert (loc == NULL);
2717 break;
2718
2719 case DF_REF_ARTIFICIAL:
2720 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2721 this_ref->artificial_ref.bb = bb;
2722 gcc_assert (loc == NULL);
2723 break;
2724
2725 case DF_REF_REGULAR:
2726 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2727 this_ref->regular_ref.loc = loc;
2728 gcc_assert (loc);
2729 break;
2730
2731 case DF_REF_EXTRACT:
2732 this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
2733 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2734 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2735 DF_REF_EXTRACT_MODE (this_ref) = mode;
2736 this_ref->regular_ref.loc = loc;
2737 gcc_assert (loc);
2738 break;
2739 }
2740
2741 DF_REF_CLASS (this_ref) = cl;
2742 DF_REF_ID (this_ref) = -1;
2743 DF_REF_REG (this_ref) = reg;
2744 DF_REF_REGNO (this_ref) = regno;
2745 DF_REF_TYPE (this_ref) = ref_type;
2746 DF_REF_INSN_INFO (this_ref) = info;
2747 DF_REF_CHAIN (this_ref) = NULL;
2748 DF_REF_FLAGS (this_ref) = ref_flags;
2749 DF_REF_NEXT_REG (this_ref) = NULL;
2750 DF_REF_PREV_REG (this_ref) = NULL;
2751 DF_REF_ORDER (this_ref) = df->ref_order++;
2752
2753 /* We need to clear this bit because fwprop, and in the future
2754 possibly other optimizations sometimes create new refs using ond
2755 refs as the model. */
2756 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2757
2758 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2759 if ((regno < FIRST_PSEUDO_REGISTER)
2760 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2761 {
2762 if (DF_REF_REG_DEF_P (this_ref))
2763 {
2764 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2765 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2766 }
2767 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2768 && (regno == FRAME_POINTER_REGNUM
2769 || regno == ARG_POINTER_REGNUM)))
2770 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2771 }
2772
2773 if (collection_rec)
2774 {
2775 if (DF_REF_REG_DEF_P (this_ref))
2776 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2777 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2778 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2779 else
2780 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2781 }
2782
2783 return this_ref;
2784 }
2785
2786
2787 /* Create new references of type DF_REF_TYPE for each part of register REG
2788 at address LOC within INSN of BB.
2789
2790 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2791 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2792 fields if they were constants. Otherwise they should be -1 if
2793 those flags were set. */
2794
2795
2796 static void
2797 df_ref_record (enum df_ref_class cl,
2798 struct df_collection_rec *collection_rec,
2799 rtx reg, rtx *loc,
2800 basic_block bb, struct df_insn_info *insn_info,
2801 enum df_ref_type ref_type,
2802 enum df_ref_flags ref_flags,
2803 int width, int offset, enum machine_mode mode)
2804 {
2805 unsigned int regno;
2806
2807 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2808
2809 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2810 if (regno < FIRST_PSEUDO_REGISTER)
2811 {
2812 struct df_mw_hardreg *hardreg = NULL;
2813 struct df_scan_problem_data *problem_data
2814 = (struct df_scan_problem_data *) df_scan->problem_data;
2815 unsigned int i;
2816 unsigned int endregno;
2817 df_ref ref;
2818
2819 if (GET_CODE (reg) == SUBREG)
2820 {
2821 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2822 SUBREG_BYTE (reg), GET_MODE (reg));
2823 endregno = regno + subreg_nregs (reg);
2824 }
2825 else
2826 endregno = END_HARD_REGNO (reg);
2827
2828 /* If this is a multiword hardreg, we create some extra
2829 datastructures that will enable us to easily build REG_DEAD
2830 and REG_UNUSED notes. */
2831 if ((endregno != regno + 1) && insn_info)
2832 {
2833 /* Sets to a subreg of a multiword register are partial.
2834 Sets to a non-subreg of a multiword register are not. */
2835 if (GET_CODE (reg) == SUBREG)
2836 ref_flags |= DF_REF_PARTIAL;
2837 ref_flags |= DF_REF_MW_HARDREG;
2838
2839 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2840 hardreg->type = ref_type;
2841 hardreg->flags = ref_flags;
2842 hardreg->mw_reg = reg;
2843 hardreg->start_regno = regno;
2844 hardreg->end_regno = endregno - 1;
2845 hardreg->mw_order = df->ref_order++;
2846 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2847 }
2848
2849 for (i = regno; i < endregno; i++)
2850 {
2851 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2852 bb, insn_info, ref_type, ref_flags,
2853 width, offset, mode);
2854
2855 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2856 }
2857 }
2858 else
2859 {
2860 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2861 ref_type, ref_flags, width, offset, mode);
2862 }
2863 }
2864
2865
2866 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2867 covered by the outer mode is smaller than that covered by the inner mode,
2868 is a read-modify-write operation.
2869 This function returns true iff the SUBREG X is such a SUBREG. */
2870
2871 bool
2872 df_read_modify_subreg_p (rtx x)
2873 {
2874 unsigned int isize, osize;
2875 if (GET_CODE (x) != SUBREG)
2876 return false;
2877 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2878 osize = GET_MODE_SIZE (GET_MODE (x));
2879 return isize > osize
2880 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2881 }
2882
2883
2884 /* Process all the registers defined in the rtx, X.
2885 Autoincrement/decrement definitions will be picked up by
2886 df_uses_record. */
2887
2888 static void
2889 df_def_record_1 (struct df_collection_rec *collection_rec,
2890 rtx x, basic_block bb, struct df_insn_info *insn_info,
2891 enum df_ref_flags flags)
2892 {
2893 rtx *loc;
2894 rtx dst;
2895 int offset = -1;
2896 int width = -1;
2897 enum machine_mode mode = 0;
2898 enum df_ref_class cl = DF_REF_REGULAR;
2899
2900 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2901 construct. */
2902 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2903 loc = &XEXP (x, 0);
2904 else
2905 loc = &SET_DEST (x);
2906 dst = *loc;
2907
2908 /* It is legal to have a set destination be a parallel. */
2909 if (GET_CODE (dst) == PARALLEL)
2910 {
2911 int i;
2912
2913 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2914 {
2915 rtx temp = XVECEXP (dst, 0, i);
2916 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2917 || GET_CODE (temp) == SET)
2918 df_def_record_1 (collection_rec,
2919 temp, bb, insn_info,
2920 GET_CODE (temp) == CLOBBER
2921 ? flags | DF_REF_MUST_CLOBBER : flags);
2922 }
2923 return;
2924 }
2925
2926 if (GET_CODE (dst) == STRICT_LOW_PART)
2927 {
2928 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2929
2930 loc = &XEXP (dst, 0);
2931 dst = *loc;
2932 }
2933
2934 if (GET_CODE (dst) == ZERO_EXTRACT)
2935 {
2936 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2937
2938 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
2939 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
2940 {
2941 width = INTVAL (XEXP (dst, 1));
2942 offset = INTVAL (XEXP (dst, 2));
2943 mode = GET_MODE (dst);
2944 cl = DF_REF_EXTRACT;
2945 }
2946
2947 loc = &XEXP (dst, 0);
2948 dst = *loc;
2949 }
2950
2951 /* At this point if we do not have a reg or a subreg, just return. */
2952 if (REG_P (dst))
2953 {
2954 df_ref_record (cl, collection_rec,
2955 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
2956 width, offset, mode);
2957
2958 /* We want to keep sp alive everywhere - by making all
2959 writes to sp also use of sp. */
2960 if (REGNO (dst) == STACK_POINTER_REGNUM)
2961 df_ref_record (DF_REF_BASE, collection_rec,
2962 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
2963 width, offset, mode);
2964 }
2965 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2966 {
2967 if (df_read_modify_subreg_p (dst))
2968 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2969
2970 flags |= DF_REF_SUBREG;
2971
2972 df_ref_record (cl, collection_rec,
2973 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
2974 width, offset, mode);
2975 }
2976 }
2977
2978
2979 /* Process all the registers defined in the pattern rtx, X. */
2980
2981 static void
2982 df_defs_record (struct df_collection_rec *collection_rec,
2983 rtx x, basic_block bb, struct df_insn_info *insn_info,
2984 enum df_ref_flags flags)
2985 {
2986 RTX_CODE code = GET_CODE (x);
2987
2988 if (code == SET || code == CLOBBER)
2989 {
2990 /* Mark the single def within the pattern. */
2991 enum df_ref_flags clobber_flags = flags;
2992 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2993 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
2994 }
2995 else if (code == COND_EXEC)
2996 {
2997 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2998 bb, insn_info, DF_REF_CONDITIONAL);
2999 }
3000 else if (code == PARALLEL)
3001 {
3002 int i;
3003
3004 /* Mark the multiple defs within the pattern. */
3005 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3006 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3007 }
3008 }
3009
3010
3011 /* Process all the registers used in the rtx at address LOC.
3012
3013 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
3014 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
3015 fields if they were constants. Otherwise they should be -1 if
3016 those flags were set. */
3017
3018 static void
3019 df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3020 rtx *loc, enum df_ref_type ref_type,
3021 basic_block bb, struct df_insn_info *insn_info,
3022 enum df_ref_flags flags,
3023 int width, int offset, enum machine_mode mode)
3024 {
3025 RTX_CODE code;
3026 rtx x;
3027
3028 retry:
3029 x = *loc;
3030 if (!x)
3031 return;
3032 code = GET_CODE (x);
3033 switch (code)
3034 {
3035 case LABEL_REF:
3036 case SYMBOL_REF:
3037 case CONST_INT:
3038 case CONST:
3039 case CONST_DOUBLE:
3040 case CONST_FIXED:
3041 case CONST_VECTOR:
3042 case PC:
3043 case CC0:
3044 case ADDR_VEC:
3045 case ADDR_DIFF_VEC:
3046 return;
3047
3048 case CLOBBER:
3049 /* If we are clobbering a MEM, mark any registers inside the address
3050 as being used. */
3051 if (MEM_P (XEXP (x, 0)))
3052 df_uses_record (cl, collection_rec,
3053 &XEXP (XEXP (x, 0), 0),
3054 DF_REF_REG_MEM_STORE,
3055 bb, insn_info,
3056 flags, width, offset, mode);
3057
3058 /* If we're clobbering a REG then we have a def so ignore. */
3059 return;
3060
3061 case MEM:
3062 df_uses_record (cl, collection_rec,
3063 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3064 bb, insn_info, flags & DF_REF_IN_NOTE,
3065 width, offset, mode);
3066 return;
3067
3068 case SUBREG:
3069 /* While we're here, optimize this case. */
3070 flags |= DF_REF_PARTIAL;
3071 /* In case the SUBREG is not of a REG, do not optimize. */
3072 if (!REG_P (SUBREG_REG (x)))
3073 {
3074 loc = &SUBREG_REG (x);
3075 df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
3076 width, offset, mode);
3077 return;
3078 }
3079 /* ... Fall through ... */
3080
3081 case REG:
3082 df_ref_record (cl, collection_rec,
3083 x, loc, bb, insn_info,
3084 ref_type, flags,
3085 width, offset, mode);
3086 return;
3087
3088 case SIGN_EXTRACT:
3089 case ZERO_EXTRACT:
3090 {
3091 /* If the parameters to the zero or sign extract are
3092 constants, strip them off and recurse, otherwise there is
3093 no information that we can gain from this operation. */
3094 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3095 && GET_CODE (XEXP (x, 2)) == CONST_INT)
3096 {
3097 width = INTVAL (XEXP (x, 1));
3098 offset = INTVAL (XEXP (x, 2));
3099 mode = GET_MODE (x);
3100
3101 if (code == ZERO_EXTRACT)
3102 flags |= DF_REF_ZERO_EXTRACT;
3103 else
3104 flags |= DF_REF_SIGN_EXTRACT;
3105
3106 df_uses_record (DF_REF_EXTRACT, collection_rec,
3107 &XEXP (x, 0), ref_type, bb, insn_info, flags,
3108 width, offset, mode);
3109 return;
3110 }
3111 }
3112 break;
3113
3114 case SET:
3115 {
3116 rtx dst = SET_DEST (x);
3117 gcc_assert (!(flags & DF_REF_IN_NOTE));
3118 df_uses_record (cl, collection_rec,
3119 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
3120 width, offset, mode);
3121
3122 switch (GET_CODE (dst))
3123 {
3124 case SUBREG:
3125 if (df_read_modify_subreg_p (dst))
3126 {
3127 df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
3128 DF_REF_REG_USE, bb, insn_info,
3129 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3130 width, offset, mode);
3131 break;
3132 }
3133 /* Fall through. */
3134 case REG:
3135 case PARALLEL:
3136 case SCRATCH:
3137 case PC:
3138 case CC0:
3139 break;
3140 case MEM:
3141 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3142 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
3143 width, offset, mode);
3144 break;
3145 case STRICT_LOW_PART:
3146 {
3147 rtx *temp = &XEXP (dst, 0);
3148 /* A strict_low_part uses the whole REG and not just the
3149 SUBREG. */
3150 dst = XEXP (dst, 0);
3151 df_uses_record (cl, collection_rec,
3152 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3153 DF_REF_REG_USE, bb, insn_info,
3154 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3155 width, offset, mode);
3156 }
3157 break;
3158 case ZERO_EXTRACT:
3159 {
3160 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
3161 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
3162 {
3163 width = INTVAL (XEXP (dst, 1));
3164 offset = INTVAL (XEXP (dst, 2));
3165 mode = GET_MODE (dst);
3166 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3167 DF_REF_REG_USE, bb, insn_info,
3168 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3169 width, offset, mode);
3170 }
3171 else
3172 {
3173 df_uses_record (cl, collection_rec, &XEXP (dst, 1),
3174 DF_REF_REG_USE, bb, insn_info, flags,
3175 width, offset, mode);
3176 df_uses_record (cl, collection_rec, &XEXP (dst, 2),
3177 DF_REF_REG_USE, bb, insn_info, flags,
3178 width, offset, mode);
3179 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3180 DF_REF_REG_USE, bb, insn_info,
3181 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3182 width, offset, mode);
3183 }
3184
3185 }
3186 break;
3187
3188 default:
3189 gcc_unreachable ();
3190 }
3191 return;
3192 }
3193
3194 case RETURN:
3195 break;
3196
3197 case ASM_OPERANDS:
3198 case UNSPEC_VOLATILE:
3199 case TRAP_IF:
3200 case ASM_INPUT:
3201 {
3202 /* Traditional and volatile asm instructions must be
3203 considered to use and clobber all hard registers, all
3204 pseudo-registers and all of memory. So must TRAP_IF and
3205 UNSPEC_VOLATILE operations.
3206
3207 Consider for instance a volatile asm that changes the fpu
3208 rounding mode. An insn should not be moved across this
3209 even if it only uses pseudo-regs because it might give an
3210 incorrectly rounded result.
3211
3212 However, flow.c's liveness computation did *not* do this,
3213 giving the reasoning as " ?!? Unfortunately, marking all
3214 hard registers as live causes massive problems for the
3215 register allocator and marking all pseudos as live creates
3216 mountains of uninitialized variable warnings."
3217
3218 In order to maintain the status quo with regard to liveness
3219 and uses, we do what flow.c did and just mark any regs we
3220 can find in ASM_OPERANDS as used. In global asm insns are
3221 scanned and regs_asm_clobbered is filled out.
3222
3223 For all ASM_OPERANDS, we must traverse the vector of input
3224 operands. We can not just fall through here since then we
3225 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3226 which do not indicate traditional asms unlike their normal
3227 usage. */
3228 if (code == ASM_OPERANDS)
3229 {
3230 int j;
3231
3232 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3233 df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
3234 DF_REF_REG_USE, bb, insn_info, flags,
3235 width, offset, mode);
3236 return;
3237 }
3238 break;
3239 }
3240
3241 case PRE_DEC:
3242 case POST_DEC:
3243 case PRE_INC:
3244 case POST_INC:
3245 case PRE_MODIFY:
3246 case POST_MODIFY:
3247 /* Catch the def of the register being modified. */
3248 df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3249 bb, insn_info,
3250 DF_REF_REG_DEF,
3251 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3252 width, offset, mode);
3253
3254 /* ... Fall through to handle uses ... */
3255
3256 default:
3257 break;
3258 }
3259
3260 /* Recursively scan the operands of this expression. */
3261 {
3262 const char *fmt = GET_RTX_FORMAT (code);
3263 int i;
3264
3265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3266 {
3267 if (fmt[i] == 'e')
3268 {
3269 /* Tail recursive case: save a function call level. */
3270 if (i == 0)
3271 {
3272 loc = &XEXP (x, 0);
3273 goto retry;
3274 }
3275 df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
3276 bb, insn_info, flags,
3277 width, offset, mode);
3278 }
3279 else if (fmt[i] == 'E')
3280 {
3281 int j;
3282 for (j = 0; j < XVECLEN (x, i); j++)
3283 df_uses_record (cl, collection_rec,
3284 &XVECEXP (x, i, j), ref_type,
3285 bb, insn_info, flags,
3286 width, offset, mode);
3287 }
3288 }
3289 }
3290
3291 return;
3292 }
3293
3294
3295 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3296
3297 static void
3298 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3299 {
3300 unsigned int i;
3301 for (i = 0; i < collection_rec->next_def; i++)
3302 {
3303 df_ref ref = collection_rec->def_vec[i];
3304 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3305 {
3306 int width = -1;
3307 int offset = -1;
3308 enum machine_mode mode = 0;
3309 df_ref use;
3310
3311 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3312 {
3313 width = DF_REF_EXTRACT_WIDTH (ref);
3314 offset = DF_REF_EXTRACT_OFFSET (ref);
3315 mode = DF_REF_EXTRACT_MODE (ref);
3316 }
3317
3318 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3319 DF_REF_LOC (ref), DF_REF_BB (ref),
3320 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3321 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3322 width, offset, mode);
3323 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3324 }
3325 }
3326 }
3327
3328
3329 /* Get call's extra defs and uses. */
3330
3331 static void
3332 df_get_call_refs (struct df_collection_rec * collection_rec,
3333 basic_block bb,
3334 struct df_insn_info *insn_info,
3335 enum df_ref_flags flags)
3336 {
3337 rtx note;
3338 bitmap_iterator bi;
3339 unsigned int ui;
3340 bool is_sibling_call;
3341 unsigned int i;
3342 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3343
3344 /* Do not generate clobbers for registers that are the result of the
3345 call. This causes ordering problems in the chain building code
3346 depending on which def is seen first. */
3347 for (i=0; i<collection_rec->next_def; i++)
3348 {
3349 df_ref def = collection_rec->def_vec[i];
3350 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3351 }
3352
3353 /* Record the registers used to pass arguments, and explicitly
3354 noted as clobbered. */
3355 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3356 note = XEXP (note, 1))
3357 {
3358 if (GET_CODE (XEXP (note, 0)) == USE)
3359 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
3360 DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
3361 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3362 {
3363 if (REG_P (XEXP (XEXP (note, 0), 0)))
3364 {
3365 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3366 if (!bitmap_bit_p (defs_generated, regno))
3367 df_defs_record (collection_rec, XEXP (note, 0), bb,
3368 insn_info, flags);
3369 }
3370 else
3371 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
3372 DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
3373 }
3374 }
3375
3376 /* The stack ptr is used (honorarily) by a CALL insn. */
3377 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3378 NULL, bb, insn_info, DF_REF_REG_USE,
3379 DF_REF_CALL_STACK_USAGE | flags,
3380 -1, -1, 0);
3381
3382 /* Calls may also reference any of the global registers,
3383 so they are recorded as used. */
3384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3385 if (global_regs[i])
3386 {
3387 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3388 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1, 0);
3389 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3390 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1, 0);
3391 }
3392
3393 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3394 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3395 {
3396 if (!global_regs[ui]
3397 && (!bitmap_bit_p (defs_generated, ui))
3398 && (!is_sibling_call
3399 || !bitmap_bit_p (df->exit_block_uses, ui)
3400 || refers_to_regno_p (ui, ui+1,
3401 crtl->return_rtx, NULL)))
3402 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3403 NULL, bb, insn_info, DF_REF_REG_DEF,
3404 DF_REF_MAY_CLOBBER | flags,
3405 -1, -1, 0);
3406 }
3407
3408 BITMAP_FREE (defs_generated);
3409 return;
3410 }
3411
3412 /* Collect all refs in the INSN. This function is free of any
3413 side-effect - it will create and return a lists of df_ref's in the
3414 COLLECTION_REC without putting those refs into existing ref chains
3415 and reg chains. */
3416
3417 static void
3418 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3419 basic_block bb, struct df_insn_info *insn_info)
3420 {
3421 rtx note;
3422 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3423
3424 /* Clear out the collection record. */
3425 collection_rec->next_def = 0;
3426 collection_rec->next_use = 0;
3427 collection_rec->next_eq_use = 0;
3428 collection_rec->next_mw = 0;
3429
3430 /* Record register defs. */
3431 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3432
3433 /* Process REG_EQUIV/REG_EQUAL notes. */
3434 for (note = REG_NOTES (insn_info->insn); note;
3435 note = XEXP (note, 1))
3436 {
3437 switch (REG_NOTE_KIND (note))
3438 {
3439 case REG_EQUIV:
3440 case REG_EQUAL:
3441 df_uses_record (DF_REF_REGULAR, collection_rec,
3442 &XEXP (note, 0), DF_REF_REG_USE,
3443 bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
3444 break;
3445 case REG_NON_LOCAL_GOTO:
3446 /* The frame ptr is used by a non-local goto. */
3447 df_ref_record (DF_REF_BASE, collection_rec,
3448 regno_reg_rtx[FRAME_POINTER_REGNUM],
3449 NULL, bb, insn_info,
3450 DF_REF_REG_USE, 0, -1, -1, 0);
3451 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3452 df_ref_record (DF_REF_BASE, collection_rec,
3453 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3454 NULL, bb, insn_info,
3455 DF_REF_REG_USE, 0, -1, -1, 0);
3456 #endif
3457 break;
3458 default:
3459 break;
3460 }
3461 }
3462
3463 if (CALL_P (insn_info->insn))
3464 df_get_call_refs (collection_rec, bb, insn_info,
3465 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3466
3467 /* Record the register uses. */
3468 df_uses_record (DF_REF_REGULAR, collection_rec,
3469 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
3470 -1, -1, 0);
3471
3472 /* DF_REF_CONDITIONAL needs corresponding USES. */
3473 if (is_cond_exec)
3474 df_get_conditional_uses (collection_rec);
3475
3476 df_canonize_collection_rec (collection_rec);
3477 }
3478
3479 /* Recompute the luids for the insns in BB. */
3480
3481 void
3482 df_recompute_luids (basic_block bb)
3483 {
3484 rtx insn;
3485 int luid = 0;
3486
3487 df_grow_insn_info ();
3488
3489 /* Scan the block an insn at a time from beginning to end. */
3490 FOR_BB_INSNS (bb, insn)
3491 {
3492 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3493 /* Inserting labels does not always trigger the incremental
3494 rescanning. */
3495 if (!insn_info)
3496 {
3497 gcc_assert (!INSN_P (insn));
3498 insn_info = df_insn_create_insn_record (insn);
3499 }
3500
3501 DF_INSN_INFO_LUID (insn_info) = luid;
3502 if (INSN_P (insn))
3503 luid++;
3504 }
3505 }
3506
3507
3508 /* Returns true if the function entry needs to
3509 define the static chain register. */
3510
3511 static bool
3512 df_need_static_chain_reg (struct function *fun)
3513 {
3514 tree fun_context = decl_function_context (fun->decl);
3515 return fun_context
3516 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3517 }
3518
3519
3520 /* Collect all artificial refs at the block level for BB and add them
3521 to COLLECTION_REC. */
3522
3523 static void
3524 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3525 {
3526 collection_rec->next_def = 0;
3527 collection_rec->next_use = 0;
3528 collection_rec->next_eq_use = 0;
3529 collection_rec->next_mw = 0;
3530
3531 if (bb->index == ENTRY_BLOCK)
3532 {
3533 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3534 return;
3535 }
3536 else if (bb->index == EXIT_BLOCK)
3537 {
3538 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3539 return;
3540 }
3541
3542 #ifdef EH_RETURN_DATA_REGNO
3543 if (bb_has_eh_pred (bb))
3544 {
3545 unsigned int i;
3546 /* Mark the registers that will contain data for the handler. */
3547 for (i = 0; ; ++i)
3548 {
3549 unsigned regno = EH_RETURN_DATA_REGNO (i);
3550 if (regno == INVALID_REGNUM)
3551 break;
3552 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3553 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3554 }
3555 }
3556 #endif
3557
3558
3559 #ifdef EH_USES
3560 if (bb_has_eh_pred (bb))
3561 {
3562 unsigned int i;
3563 /* This code is putting in an artificial ref for the use at the
3564 TOP of the block that receives the exception. It is too
3565 cumbersome to actually put the ref on the edge. We could
3566 either model this at the top of the receiver block or the
3567 bottom of the sender block.
3568
3569 The bottom of the sender block is problematic because not all
3570 out-edges of a block are eh-edges. However, it is true
3571 that all edges into a block are either eh-edges or none of
3572 them are eh-edges. Thus, we can model this at the top of the
3573 eh-receiver for all of the edges at once. */
3574 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3575 if (EH_USES (i))
3576 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3577 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1, 0);
3578 }
3579 #endif
3580
3581 /* Add the hard_frame_pointer if this block is the target of a
3582 non-local goto. */
3583 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3584 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3585 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3586
3587 /* Add the artificial uses. */
3588 if (bb->index >= NUM_FIXED_BLOCKS)
3589 {
3590 bitmap_iterator bi;
3591 unsigned int regno;
3592 bitmap au = bb_has_eh_pred (bb)
3593 ? df->eh_block_artificial_uses
3594 : df->regular_block_artificial_uses;
3595
3596 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3597 {
3598 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3599 bb, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3600 }
3601 }
3602
3603 df_canonize_collection_rec (collection_rec);
3604 }
3605
3606
3607 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3608
3609 void
3610 df_bb_refs_record (int bb_index, bool scan_insns)
3611 {
3612 basic_block bb = BASIC_BLOCK (bb_index);
3613 rtx insn;
3614 int luid = 0;
3615 struct df_scan_bb_info *bb_info;
3616 struct df_collection_rec collection_rec;
3617 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
3618 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
3619 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
3620 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
3621
3622 if (!df)
3623 return;
3624
3625 bb_info = df_scan_get_bb_info (bb_index);
3626
3627 /* Need to make sure that there is a record in the basic block info. */
3628 if (!bb_info)
3629 {
3630 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3631 df_scan_set_bb_info (bb_index, bb_info);
3632 bb_info->artificial_defs = NULL;
3633 bb_info->artificial_uses = NULL;
3634 }
3635
3636 if (scan_insns)
3637 /* Scan the block an insn at a time from beginning to end. */
3638 FOR_BB_INSNS (bb, insn)
3639 {
3640 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3641 gcc_assert (!insn_info);
3642
3643 insn_info = df_insn_create_insn_record (insn);
3644 if (INSN_P (insn))
3645 {
3646 /* Record refs within INSN. */
3647 DF_INSN_INFO_LUID (insn_info) = luid++;
3648 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3649 df_refs_add_to_chains (&collection_rec, bb, insn);
3650 }
3651 DF_INSN_INFO_LUID (insn_info) = luid;
3652 }
3653
3654 /* Other block level artificial refs */
3655 df_bb_refs_collect (&collection_rec, bb);
3656 df_refs_add_to_chains (&collection_rec, bb, NULL);
3657
3658 /* Now that the block has been processed, set the block as dirty so
3659 LR and LIVE will get it processed. */
3660 df_set_bb_dirty (bb);
3661 }
3662
3663
3664 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3665 block. */
3666
3667 static void
3668 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3669 {
3670 bitmap_clear (regular_block_artificial_uses);
3671
3672 if (reload_completed)
3673 {
3674 if (frame_pointer_needed)
3675 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3676 }
3677 else
3678 /* Before reload, there are a few registers that must be forced
3679 live everywhere -- which might not already be the case for
3680 blocks within infinite loops. */
3681 {
3682 /* Any reference to any pseudo before reload is a potential
3683 reference of the frame pointer. */
3684 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3685
3686 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3687 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3688 #endif
3689
3690 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3691 /* Pseudos with argument area equivalences may require
3692 reloading via the argument pointer. */
3693 if (fixed_regs[ARG_POINTER_REGNUM])
3694 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3695 #endif
3696
3697 /* Any constant, or pseudo with constant equivalences, may
3698 require reloading from memory using the pic register. */
3699 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3700 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3701 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3702 }
3703 /* The all-important stack pointer must always be live. */
3704 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3705 }
3706
3707
3708 /* Get the artificial use set for an eh block. */
3709
3710 static void
3711 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3712 {
3713 bitmap_clear (eh_block_artificial_uses);
3714
3715 /* The following code (down thru the arg_pointer setting APPEARS
3716 to be necessary because there is nothing that actually
3717 describes what the exception handling code may actually need
3718 to keep alive. */
3719 if (reload_completed)
3720 {
3721 if (frame_pointer_needed)
3722 {
3723 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3724 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3725 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3726 #endif
3727 }
3728 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3729 if (fixed_regs[ARG_POINTER_REGNUM])
3730 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3731 #endif
3732 }
3733 }
3734
3735
3736 \f
3737 /*----------------------------------------------------------------------------
3738 Specialized hard register scanning functions.
3739 ----------------------------------------------------------------------------*/
3740
3741
3742 /* Mark a register in SET. Hard registers in large modes get all
3743 of their component registers set as well. */
3744
3745 static void
3746 df_mark_reg (rtx reg, void *vset)
3747 {
3748 bitmap set = (bitmap) vset;
3749 int regno = REGNO (reg);
3750
3751 gcc_assert (GET_MODE (reg) != BLKmode);
3752
3753 bitmap_set_bit (set, regno);
3754 if (regno < FIRST_PSEUDO_REGISTER)
3755 {
3756 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3757 while (--n > 0)
3758 bitmap_set_bit (set, regno + n);
3759 }
3760 }
3761
3762
3763 /* Set the bit for regs that are considered being defined at the entry. */
3764
3765 static void
3766 df_get_entry_block_def_set (bitmap entry_block_defs)
3767 {
3768 rtx r;
3769 int i;
3770
3771 bitmap_clear (entry_block_defs);
3772
3773 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3774 {
3775 if (FUNCTION_ARG_REGNO_P (i))
3776 #ifdef INCOMING_REGNO
3777 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3778 #else
3779 bitmap_set_bit (entry_block_defs, i);
3780 #endif
3781 }
3782
3783 /* The always important stack pointer. */
3784 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3785
3786 /* Once the prologue has been generated, all of these registers
3787 should just show up in the first regular block. */
3788 if (HAVE_prologue && epilogue_completed)
3789 {
3790 /* Defs for the callee saved registers are inserted so that the
3791 pushes have some defining location. */
3792 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3793 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3794 bitmap_set_bit (entry_block_defs, i);
3795 }
3796 else
3797 {
3798 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3799 only STATIC_CHAIN_REGNUM is defined. If they are different,
3800 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3801 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3802 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3803 #else
3804 #ifdef STATIC_CHAIN_REGNUM
3805 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3806 #endif
3807 #endif
3808 }
3809
3810 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3811 if (r && REG_P (r))
3812 bitmap_set_bit (entry_block_defs, REGNO (r));
3813
3814 if ((!reload_completed) || frame_pointer_needed)
3815 {
3816 /* Any reference to any pseudo before reload is a potential
3817 reference of the frame pointer. */
3818 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3819 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3820 /* If they are different, also mark the hard frame pointer as live. */
3821 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3822 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3823 #endif
3824 }
3825
3826 /* These registers are live everywhere. */
3827 if (!reload_completed)
3828 {
3829 #ifdef EH_USES
3830 /* The ia-64, the only machine that uses this, does not define these
3831 until after reload. */
3832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3833 if (EH_USES (i))
3834 {
3835 bitmap_set_bit (entry_block_defs, i);
3836 }
3837 #endif
3838
3839 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3840 /* Pseudos with argument area equivalences may require
3841 reloading via the argument pointer. */
3842 if (fixed_regs[ARG_POINTER_REGNUM])
3843 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3844 #endif
3845
3846 #ifdef PIC_OFFSET_TABLE_REGNUM
3847 /* Any constant, or pseudo with constant equivalences, may
3848 require reloading from memory using the pic register. */
3849 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3850 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3851 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3852 #endif
3853 }
3854
3855 #ifdef INCOMING_RETURN_ADDR_RTX
3856 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3857 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3858 #endif
3859
3860 targetm.live_on_entry (entry_block_defs);
3861
3862 /* If the function has an incoming STATIC_CHAIN,
3863 it has to show up in the entry def set. */
3864 if (df_need_static_chain_reg (cfun))
3865 {
3866 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3867 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3868 #else
3869 #ifdef STATIC_CHAIN_REGNUM
3870 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3871 #endif
3872 #endif
3873 }
3874 }
3875
3876
3877 /* Return the (conservative) set of hard registers that are defined on
3878 entry to the function.
3879 It uses df->entry_block_defs to determine which register
3880 reference to include. */
3881
3882 static void
3883 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3884 bitmap entry_block_defs)
3885 {
3886 unsigned int i;
3887 bitmap_iterator bi;
3888
3889 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3890 {
3891 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3892 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1, 0);
3893 }
3894
3895 df_canonize_collection_rec (collection_rec);
3896 }
3897
3898
3899 /* Record the (conservative) set of hard registers that are defined on
3900 entry to the function. */
3901
3902 static void
3903 df_record_entry_block_defs (bitmap entry_block_defs)
3904 {
3905 struct df_collection_rec collection_rec;
3906 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3907 collection_rec.def_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
3908
3909 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3910
3911 /* Process bb_refs chain */
3912 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3913 }
3914
3915
3916 /* Update the defs in the entry block. */
3917
3918 void
3919 df_update_entry_block_defs (void)
3920 {
3921 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3922 bool changed = false;
3923
3924 df_get_entry_block_def_set (refs);
3925 if (df->entry_block_defs)
3926 {
3927 if (!bitmap_equal_p (df->entry_block_defs, refs))
3928 {
3929 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3930 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3931 df_ref_chain_delete (bb_info->artificial_defs);
3932 bb_info->artificial_defs = NULL;
3933 changed = true;
3934 }
3935 }
3936 else
3937 {
3938 struct df_scan_problem_data *problem_data
3939 = (struct df_scan_problem_data *) df_scan->problem_data;
3940 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3941 changed = true;
3942 }
3943
3944 if (changed)
3945 {
3946 df_record_entry_block_defs (refs);
3947 bitmap_copy (df->entry_block_defs, refs);
3948 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3949 }
3950 BITMAP_FREE (refs);
3951 }
3952
3953
3954 /* Set the bit for regs that are considered being used at the exit. */
3955
3956 static void
3957 df_get_exit_block_use_set (bitmap exit_block_uses)
3958 {
3959 unsigned int i;
3960
3961 bitmap_clear (exit_block_uses);
3962
3963 /* Stack pointer is always live at the exit. */
3964 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3965
3966 /* Mark the frame pointer if needed at the end of the function.
3967 If we end up eliminating it, it will be removed from the live
3968 list of each basic block by reload. */
3969
3970 if ((!reload_completed) || frame_pointer_needed)
3971 {
3972 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3973 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3974 /* If they are different, also mark the hard frame pointer as live. */
3975 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3976 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3977 #endif
3978 }
3979
3980 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3981 /* Many architectures have a GP register even without flag_pic.
3982 Assume the pic register is not in use, or will be handled by
3983 other means, if it is not fixed. */
3984 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3985 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3986 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3987 #endif
3988
3989 /* Mark all global registers, and all registers used by the
3990 epilogue as being live at the end of the function since they
3991 may be referenced by our caller. */
3992 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3993 if (global_regs[i] || EPILOGUE_USES (i))
3994 bitmap_set_bit (exit_block_uses, i);
3995
3996 if (HAVE_epilogue && epilogue_completed)
3997 {
3998 /* Mark all call-saved registers that we actually used. */
3999 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4000 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
4001 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4002 bitmap_set_bit (exit_block_uses, i);
4003 }
4004
4005 #ifdef EH_RETURN_DATA_REGNO
4006 /* Mark the registers that will contain data for the handler. */
4007 if (reload_completed && crtl->calls_eh_return)
4008 for (i = 0; ; ++i)
4009 {
4010 unsigned regno = EH_RETURN_DATA_REGNO (i);
4011 if (regno == INVALID_REGNUM)
4012 break;
4013 bitmap_set_bit (exit_block_uses, regno);
4014 }
4015 #endif
4016
4017 #ifdef EH_RETURN_STACKADJ_RTX
4018 if ((!HAVE_epilogue || ! epilogue_completed)
4019 && crtl->calls_eh_return)
4020 {
4021 rtx tmp = EH_RETURN_STACKADJ_RTX;
4022 if (tmp && REG_P (tmp))
4023 df_mark_reg (tmp, exit_block_uses);
4024 }
4025 #endif
4026
4027 #ifdef EH_RETURN_HANDLER_RTX
4028 if ((!HAVE_epilogue || ! epilogue_completed)
4029 && crtl->calls_eh_return)
4030 {
4031 rtx tmp = EH_RETURN_HANDLER_RTX;
4032 if (tmp && REG_P (tmp))
4033 df_mark_reg (tmp, exit_block_uses);
4034 }
4035 #endif
4036
4037 /* Mark function return value. */
4038 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4039 }
4040
4041
4042 /* Return the refs of hard registers that are used in the exit block.
4043 It uses df->exit_block_uses to determine register to include. */
4044
4045 static void
4046 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4047 {
4048 unsigned int i;
4049 bitmap_iterator bi;
4050
4051 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4052 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4053 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
4054
4055 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4056 /* It is deliberate that this is not put in the exit block uses but
4057 I do not know why. */
4058 if (reload_completed
4059 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4060 && bb_has_eh_pred (EXIT_BLOCK_PTR)
4061 && fixed_regs[ARG_POINTER_REGNUM])
4062 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4063 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
4064 #endif
4065
4066 df_canonize_collection_rec (collection_rec);
4067 }
4068
4069
4070 /* Record the set of hard registers that are used in the exit block.
4071 It uses df->exit_block_uses to determine which bit to include. */
4072
4073 static void
4074 df_record_exit_block_uses (bitmap exit_block_uses)
4075 {
4076 struct df_collection_rec collection_rec;
4077 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4078 collection_rec.use_vec = XALLOCAVEC (df_ref, FIRST_PSEUDO_REGISTER);
4079
4080 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4081
4082 /* Process bb_refs chain */
4083 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4084 }
4085
4086
4087 /* Update the uses in the exit block. */
4088
4089 void
4090 df_update_exit_block_uses (void)
4091 {
4092 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4093 bool changed = false;
4094
4095 df_get_exit_block_use_set (refs);
4096 if (df->exit_block_uses)
4097 {
4098 if (!bitmap_equal_p (df->exit_block_uses, refs))
4099 {
4100 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4101 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4102 df_ref_chain_delete (bb_info->artificial_uses);
4103 bb_info->artificial_uses = NULL;
4104 changed = true;
4105 }
4106 }
4107 else
4108 {
4109 struct df_scan_problem_data *problem_data
4110 = (struct df_scan_problem_data *) df_scan->problem_data;
4111 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4112 changed = true;
4113 }
4114
4115 if (changed)
4116 {
4117 df_record_exit_block_uses (refs);
4118 bitmap_copy (df->exit_block_uses, refs);
4119 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4120 }
4121 BITMAP_FREE (refs);
4122 }
4123
4124 static bool initialized = false;
4125
4126
4127 /* Initialize some platform specific structures. */
4128
4129 void
4130 df_hard_reg_init (void)
4131 {
4132 int i;
4133 #ifdef ELIMINABLE_REGS
4134 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4135 #endif
4136 if (initialized)
4137 return;
4138
4139 bitmap_obstack_initialize (&persistent_obstack);
4140
4141 /* Record which registers will be eliminated. We use this in
4142 mark_used_regs. */
4143 CLEAR_HARD_REG_SET (elim_reg_set);
4144
4145 #ifdef ELIMINABLE_REGS
4146 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4147 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4148 #else
4149 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4150 #endif
4151
4152 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
4153
4154 /* Inconveniently, this is only readily available in hard reg set
4155 form. */
4156 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4157 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4158 bitmap_set_bit (df_invalidated_by_call, i);
4159
4160 initialized = true;
4161 }
4162
4163
4164 /* Recompute the parts of scanning that are based on regs_ever_live
4165 because something changed in that array. */
4166
4167 void
4168 df_update_entry_exit_and_calls (void)
4169 {
4170 basic_block bb;
4171
4172 df_update_entry_block_defs ();
4173 df_update_exit_block_uses ();
4174
4175 /* The call insns need to be rescanned because there may be changes
4176 in the set of registers clobbered across the call. */
4177 FOR_EACH_BB (bb)
4178 {
4179 rtx insn;
4180 FOR_BB_INSNS (bb, insn)
4181 {
4182 if (INSN_P (insn) && CALL_P (insn))
4183 df_insn_rescan (insn);
4184 }
4185 }
4186 }
4187
4188
4189 /* Return true if hard REG is actually used in the some instruction.
4190 There are a fair number of conditions that affect the setting of
4191 this array. See the comment in df.h for df->hard_regs_live_count
4192 for the conditions that this array is set. */
4193
4194 bool
4195 df_hard_reg_used_p (unsigned int reg)
4196 {
4197 gcc_assert (df);
4198 return df->hard_regs_live_count[reg] != 0;
4199 }
4200
4201
4202 /* A count of the number of times REG is actually used in the some
4203 instruction. There are a fair number of conditions that affect the
4204 setting of this array. See the comment in df.h for
4205 df->hard_regs_live_count for the conditions that this array is
4206 set. */
4207
4208
4209 unsigned int
4210 df_hard_reg_used_count (unsigned int reg)
4211 {
4212 gcc_assert (df);
4213 return df->hard_regs_live_count[reg];
4214 }
4215
4216
4217 /* Get the value of regs_ever_live[REGNO]. */
4218
4219 bool
4220 df_regs_ever_live_p (unsigned int regno)
4221 {
4222 return regs_ever_live[regno];
4223 }
4224
4225
4226 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4227 to change, schedule that change for the next update. */
4228
4229 void
4230 df_set_regs_ever_live (unsigned int regno, bool value)
4231 {
4232 if (regs_ever_live[regno] == value)
4233 return;
4234
4235 regs_ever_live[regno] = value;
4236 if (df)
4237 df->redo_entry_and_exit = true;
4238 }
4239
4240
4241 /* Compute "regs_ever_live" information from the underlying df
4242 information. Set the vector to all false if RESET. */
4243
4244 void
4245 df_compute_regs_ever_live (bool reset)
4246 {
4247 unsigned int i;
4248 bool changed = df->redo_entry_and_exit;
4249
4250 if (reset)
4251 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4252
4253 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4254 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4255 {
4256 regs_ever_live[i] = true;
4257 changed = true;
4258 }
4259 if (changed)
4260 df_update_entry_exit_and_calls ();
4261 df->redo_entry_and_exit = false;
4262 }
4263
4264 \f
4265 /*----------------------------------------------------------------------------
4266 Dataflow ref information verification functions.
4267
4268 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4269 df_reg_chain_verify_unmarked (refs)
4270 df_refs_verify (ref*, ref*, bool)
4271 df_mws_verify (mw*, mw*, bool)
4272 df_insn_refs_verify (collection_rec, bb, insn, bool)
4273 df_bb_refs_verify (bb, refs, bool)
4274 df_bb_verify (bb)
4275 df_exit_block_bitmap_verify (bool)
4276 df_entry_block_bitmap_verify (bool)
4277 df_scan_verify ()
4278 ----------------------------------------------------------------------------*/
4279
4280
4281 /* Mark all refs in the reg chain. Verify that all of the registers
4282 are in the correct chain. */
4283
4284 static unsigned int
4285 df_reg_chain_mark (df_ref refs, unsigned int regno,
4286 bool is_def, bool is_eq_use)
4287 {
4288 unsigned int count = 0;
4289 df_ref ref;
4290 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4291 {
4292 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4293
4294 /* If there are no def-use or use-def chains, make sure that all
4295 of the chains are clear. */
4296 if (!df_chain)
4297 gcc_assert (!DF_REF_CHAIN (ref));
4298
4299 /* Check to make sure the ref is in the correct chain. */
4300 gcc_assert (DF_REF_REGNO (ref) == regno);
4301 if (is_def)
4302 gcc_assert (DF_REF_REG_DEF_P (ref));
4303 else
4304 gcc_assert (!DF_REF_REG_DEF_P (ref));
4305
4306 if (is_eq_use)
4307 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4308 else
4309 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4310
4311 if (DF_REF_NEXT_REG (ref))
4312 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4313 count++;
4314 DF_REF_REG_MARK (ref);
4315 }
4316 return count;
4317 }
4318
4319
4320 /* Verify that all of the registers in the chain are unmarked. */
4321
4322 static void
4323 df_reg_chain_verify_unmarked (df_ref refs)
4324 {
4325 df_ref ref;
4326 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4327 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4328 }
4329
4330
4331 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4332
4333 static bool
4334 df_refs_verify (df_ref *new_rec, df_ref *old_rec,
4335 bool abort_if_fail)
4336 {
4337 while ((*new_rec) && (*old_rec))
4338 {
4339 if (!df_ref_equal_p (*new_rec, *old_rec))
4340 {
4341 if (abort_if_fail)
4342 gcc_assert (0);
4343 else
4344 return false;
4345 }
4346
4347 /* Abort if fail is called from the function level verifier. If
4348 that is the context, mark this reg as being seem. */
4349 if (abort_if_fail)
4350 {
4351 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4352 DF_REF_REG_UNMARK (*old_rec);
4353 }
4354
4355 new_rec++;
4356 old_rec++;
4357 }
4358
4359 if (abort_if_fail)
4360 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4361 else
4362 return ((*new_rec == NULL) && (*old_rec == NULL));
4363 return false;
4364 }
4365
4366
4367 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4368
4369 static bool
4370 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4371 bool abort_if_fail)
4372 {
4373 while ((*new_rec) && (*old_rec))
4374 {
4375 if (!df_mw_equal_p (*new_rec, *old_rec))
4376 {
4377 if (abort_if_fail)
4378 gcc_assert (0);
4379 else
4380 return false;
4381 }
4382 new_rec++;
4383 old_rec++;
4384 }
4385
4386 if (abort_if_fail)
4387 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4388 else
4389 return ((*new_rec == NULL) && (*old_rec == NULL));
4390 return false;
4391 }
4392
4393
4394 /* Return true if the existing insn refs information is complete and
4395 correct. Otherwise (i.e. if there's any missing or extra refs),
4396 return the correct df_ref chain in REFS_RETURN.
4397
4398 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4399 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4400 verification mode instead of the whole function, so unmark
4401 everything.
4402
4403 If ABORT_IF_FAIL is set, this function never returns false. */
4404
4405 static bool
4406 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4407 basic_block bb,
4408 rtx insn,
4409 bool abort_if_fail)
4410 {
4411 bool ret1, ret2, ret3, ret4;
4412 unsigned int uid = INSN_UID (insn);
4413 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4414
4415 df_insn_refs_collect (collection_rec, bb, insn_info);
4416
4417 if (!DF_INSN_UID_DEFS (uid))
4418 {
4419 /* The insn_rec was created but it was never filled out. */
4420 if (abort_if_fail)
4421 gcc_assert (0);
4422 else
4423 return false;
4424 }
4425
4426 /* Unfortunately we cannot opt out early if one of these is not
4427 right because the marks will not get cleared. */
4428 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4429 abort_if_fail);
4430 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4431 abort_if_fail);
4432 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4433 abort_if_fail);
4434 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4435 abort_if_fail);
4436 return (ret1 && ret2 && ret3 && ret4);
4437 }
4438
4439
4440 /* Return true if all refs in the basic block are correct and complete.
4441 Due to df_ref_chain_verify, it will cause all refs
4442 that are verified to have DF_REF_MARK bit set. */
4443
4444 static bool
4445 df_bb_verify (basic_block bb)
4446 {
4447 rtx insn;
4448 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4449 struct df_collection_rec collection_rec;
4450
4451 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4452 collection_rec.def_vec = XALLOCAVEC (df_ref, 1000);
4453 collection_rec.use_vec = XALLOCAVEC (df_ref, 1000);
4454 collection_rec.eq_use_vec = XALLOCAVEC (df_ref, 1000);
4455 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
4456
4457 gcc_assert (bb_info);
4458
4459 /* Scan the block, one insn at a time, from beginning to end. */
4460 FOR_BB_INSNS_REVERSE (bb, insn)
4461 {
4462 if (!INSN_P (insn))
4463 continue;
4464 df_insn_refs_verify (&collection_rec, bb, insn, true);
4465 df_free_collection_rec (&collection_rec);
4466 }
4467
4468 /* Do the artificial defs and uses. */
4469 df_bb_refs_collect (&collection_rec, bb);
4470 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4471 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4472 df_free_collection_rec (&collection_rec);
4473
4474 return true;
4475 }
4476
4477
4478 /* Returns true if the entry block has correct and complete df_ref set.
4479 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4480
4481 static bool
4482 df_entry_block_bitmap_verify (bool abort_if_fail)
4483 {
4484 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4485 bool is_eq;
4486
4487 df_get_entry_block_def_set (entry_block_defs);
4488
4489 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4490
4491 if (!is_eq && abort_if_fail)
4492 {
4493 print_current_pass (stderr);
4494 fprintf (stderr, "entry_block_defs = ");
4495 df_print_regset (stderr, entry_block_defs);
4496 fprintf (stderr, "df->entry_block_defs = ");
4497 df_print_regset (stderr, df->entry_block_defs);
4498 gcc_assert (0);
4499 }
4500
4501 BITMAP_FREE (entry_block_defs);
4502
4503 return is_eq;
4504 }
4505
4506
4507 /* Returns true if the exit block has correct and complete df_ref set.
4508 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4509
4510 static bool
4511 df_exit_block_bitmap_verify (bool abort_if_fail)
4512 {
4513 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4514 bool is_eq;
4515
4516 df_get_exit_block_use_set (exit_block_uses);
4517
4518 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4519
4520 if (!is_eq && abort_if_fail)
4521 {
4522 print_current_pass (stderr);
4523 fprintf (stderr, "exit_block_uses = ");
4524 df_print_regset (stderr, exit_block_uses);
4525 fprintf (stderr, "df->exit_block_uses = ");
4526 df_print_regset (stderr, df->exit_block_uses);
4527 gcc_assert (0);
4528 }
4529
4530 BITMAP_FREE (exit_block_uses);
4531
4532 return is_eq;
4533 }
4534
4535
4536 /* Return true if df_ref information for all insns in all blocks are
4537 correct and complete. */
4538
4539 void
4540 df_scan_verify (void)
4541 {
4542 unsigned int i;
4543 basic_block bb;
4544 bitmap regular_block_artificial_uses;
4545 bitmap eh_block_artificial_uses;
4546
4547 if (!df)
4548 return;
4549
4550 /* Verification is a 4 step process. */
4551
4552 /* (1) All of the refs are marked by going thru the reg chains. */
4553 for (i = 0; i < DF_REG_SIZE (df); i++)
4554 {
4555 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4556 == DF_REG_DEF_COUNT(i));
4557 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4558 == DF_REG_USE_COUNT(i));
4559 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4560 == DF_REG_EQ_USE_COUNT(i));
4561 }
4562
4563 /* (2) There are various bitmaps whose value may change over the
4564 course of the compilation. This step recomputes them to make
4565 sure that they have not slipped out of date. */
4566 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4567 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4568
4569 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4570 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4571
4572 bitmap_ior_into (eh_block_artificial_uses,
4573 regular_block_artificial_uses);
4574
4575 /* Check artificial_uses bitmaps didn't change. */
4576 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4577 df->regular_block_artificial_uses));
4578 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4579 df->eh_block_artificial_uses));
4580
4581 BITMAP_FREE (regular_block_artificial_uses);
4582 BITMAP_FREE (eh_block_artificial_uses);
4583
4584 /* Verify entry block and exit block. These only verify the bitmaps,
4585 the refs are verified in df_bb_verify. */
4586 df_entry_block_bitmap_verify (true);
4587 df_exit_block_bitmap_verify (true);
4588
4589 /* (3) All of the insns in all of the blocks are traversed and the
4590 marks are cleared both in the artificial refs attached to the
4591 blocks and the real refs inside the insns. It is a failure to
4592 clear a mark that has not been set as this means that the ref in
4593 the block or insn was not in the reg chain. */
4594
4595 FOR_ALL_BB (bb)
4596 df_bb_verify (bb);
4597
4598 /* (4) See if all reg chains are traversed a second time. This time
4599 a check is made that the marks are clear. A set mark would be a
4600 from a reg that is not in any insn or basic block. */
4601
4602 for (i = 0; i < DF_REG_SIZE (df); i++)
4603 {
4604 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4605 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4606 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4607 }
4608 }