fwprop.c (update_df): Support width and offset parameters of df_ref_create.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
48
49 #ifndef HAVE_epilogue
50 #define HAVE_epilogue 0
51 #endif
52 #ifndef HAVE_prologue
53 #define HAVE_prologue 0
54 #endif
55 #ifndef HAVE_sibcall_epilogue
56 #define HAVE_sibcall_epilogue 0
57 #endif
58
59 #ifndef EPILOGUE_USES
60 #define EPILOGUE_USES(REGNO) 0
61 #endif
62
63 /* The bitmap_obstack is used to hold some static variables that
64 should not be reset after each function is compiled. */
65
66 static bitmap_obstack persistent_obstack;
67
68 /* The set of hard registers in eliminables[i].from. */
69
70 static HARD_REG_SET elim_reg_set;
71
72 /* This is a bitmap copy of regs_invalidated_by_call so that we can
73 easily add it into bitmaps, etc. */
74
75 bitmap df_invalidated_by_call = NULL;
76
77 /* Initialize ur_in and ur_out as if all hard registers were partially
78 available. */
79
80 struct df_collection_rec
81 {
82 struct df_ref ** def_vec;
83 unsigned int next_def;
84 struct df_ref ** use_vec;
85 unsigned int next_use;
86 struct df_ref ** eq_use_vec;
87 unsigned int next_eq_use;
88 struct df_mw_hardreg **mw_vec;
89 unsigned int next_mw;
90 };
91
92 static struct df_ref * df_null_ref_rec[1];
93 static struct df_mw_hardreg * df_null_mw_rec[1];
94
95 static void df_ref_record (struct df_collection_rec *,
96 rtx, rtx *,
97 basic_block, rtx, enum df_ref_type,
98 enum df_ref_flags, int, int);
99 static void df_def_record_1 (struct df_collection_rec *,
100 rtx, basic_block, rtx,
101 enum df_ref_flags);
102 static void df_defs_record (struct df_collection_rec *,
103 rtx, basic_block, rtx,
104 enum df_ref_flags);
105 static void df_uses_record (struct df_collection_rec *,
106 rtx *, enum df_ref_type,
107 basic_block, rtx, enum df_ref_flags, int, int);
108
109 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
110 basic_block, rtx, enum df_ref_type,
111 enum df_ref_flags, int, int);
112
113 static void df_insn_refs_collect (struct df_collection_rec*,
114 basic_block, rtx);
115 static void df_canonize_collection_rec (struct df_collection_rec *);
116
117 static void df_get_regular_block_artificial_uses (bitmap);
118 static void df_get_eh_block_artificial_uses (bitmap);
119
120 static void df_record_entry_block_defs (bitmap);
121 static void df_record_exit_block_uses (bitmap);
122 static void df_get_exit_block_use_set (bitmap);
123 static void df_get_entry_block_def_set (bitmap);
124 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
125 static void df_ref_chain_delete_du_chain (struct df_ref **);
126 static void df_ref_chain_delete (struct df_ref **);
127
128 static void df_refs_add_to_chains (struct df_collection_rec *,
129 basic_block, rtx);
130
131 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
132 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
133 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
134 static void df_install_ref (struct df_ref *, struct df_reg_info *,
135 struct df_ref_info *, bool);
136
137 static int df_ref_compare (const void *, const void *);
138 static int df_mw_compare (const void *, const void *);
139
140 /* Indexed by hardware reg number, is true if that register is ever
141 used in the current function.
142
143 In df-scan.c, this is set up to record the hard regs used
144 explicitly. Reload adds in the hard regs used for holding pseudo
145 regs. Final uses it to generate the code in the function prologue
146 and epilogue to save and restore registers as needed. */
147
148 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
149 \f
150 /*----------------------------------------------------------------------------
151 SCANNING DATAFLOW PROBLEM
152
153 There are several ways in which scanning looks just like the other
154 dataflow problems. It shares the all the mechanisms for local info
155 as well as basic block info. Where it differs is when and how often
156 it gets run. It also has no need for the iterative solver.
157 ----------------------------------------------------------------------------*/
158
159 /* Problem data for the scanning dataflow function. */
160 struct df_scan_problem_data
161 {
162 alloc_pool ref_pool;
163 alloc_pool ref_extract_pool;
164 alloc_pool insn_pool;
165 alloc_pool reg_pool;
166 alloc_pool mw_reg_pool;
167 alloc_pool mw_link_pool;
168 bitmap_obstack reg_bitmaps;
169 bitmap_obstack insn_bitmaps;
170 };
171
172 typedef struct df_scan_bb_info *df_scan_bb_info_t;
173
174 static void
175 df_scan_free_internal (void)
176 {
177 struct df_scan_problem_data *problem_data
178 = (struct df_scan_problem_data *) df_scan->problem_data;
179
180 free (df->def_info.refs);
181 free (df->def_info.begin);
182 free (df->def_info.count);
183 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
184
185 free (df->use_info.refs);
186 free (df->use_info.begin);
187 free (df->use_info.count);
188 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
189
190 free (df->def_regs);
191 df->def_regs = NULL;
192 free (df->use_regs);
193 df->use_regs = NULL;
194 free (df->eq_use_regs);
195 df->eq_use_regs = NULL;
196 df->regs_size = 0;
197 DF_REG_SIZE(df) = 0;
198
199 free (df->insns);
200 df->insns = NULL;
201 DF_INSN_SIZE () = 0;
202
203 free (df_scan->block_info);
204 df_scan->block_info = NULL;
205 df_scan->block_info_size = 0;
206
207 BITMAP_FREE (df->hardware_regs_used);
208 BITMAP_FREE (df->regular_block_artificial_uses);
209 BITMAP_FREE (df->eh_block_artificial_uses);
210 BITMAP_FREE (df->entry_block_defs);
211 BITMAP_FREE (df->exit_block_uses);
212 BITMAP_FREE (df->insns_to_delete);
213 BITMAP_FREE (df->insns_to_rescan);
214 BITMAP_FREE (df->insns_to_notes_rescan);
215
216 free_alloc_pool (df_scan->block_pool);
217 free_alloc_pool (problem_data->ref_pool);
218 free_alloc_pool (problem_data->ref_extract_pool);
219 free_alloc_pool (problem_data->insn_pool);
220 free_alloc_pool (problem_data->reg_pool);
221 free_alloc_pool (problem_data->mw_reg_pool);
222 free_alloc_pool (problem_data->mw_link_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
226 }
227
228
229 /* Set basic block info. */
230
231 static void
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
234 {
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
238 }
239
240
241 /* Free basic block info. */
242
243 static void
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
245 {
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
248 if (bb_info)
249 {
250 rtx insn;
251 FOR_BB_INSNS (bb, insn)
252 {
253 if (INSN_P (insn))
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
256 }
257
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
260
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
269 }
270 }
271
272
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
275 be rescanned. */
276 void
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
278 {
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
282 basic_block bb;
283
284 /* Given the number of pools, this is really faster than tearing
285 everything apart. */
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
288
289 df_scan->block_pool
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
292 block_size);
293
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
297
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->ref_extract_pool
302 = create_alloc_pool ("df_scan_ref extract pool",
303 sizeof (struct df_ref_extract), block_size);
304 problem_data->insn_pool
305 = create_alloc_pool ("df_scan_insn pool",
306 sizeof (struct df_insn_info), block_size);
307 problem_data->reg_pool
308 = create_alloc_pool ("df_scan_reg pool",
309 sizeof (struct df_reg_info), block_size);
310 problem_data->mw_reg_pool
311 = create_alloc_pool ("df_scan_mw_reg pool",
312 sizeof (struct df_mw_hardreg), block_size);
313 problem_data->mw_link_pool
314 = create_alloc_pool ("df_scan_mw_link pool",
315 sizeof (struct df_link), block_size);
316
317 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
318 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
319
320 insn_num += insn_num / 4;
321 df_grow_reg_info ();
322
323 df_grow_insn_info ();
324 df_grow_bb_info (df_scan);
325
326 FOR_ALL_BB (bb)
327 {
328 unsigned int bb_index = bb->index;
329 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
330 if (!bb_info)
331 {
332 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
333 df_scan_set_bb_info (bb_index, bb_info);
334 }
335 bb_info->artificial_defs = NULL;
336 bb_info->artificial_uses = NULL;
337 }
338
339 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
342 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
343 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
344 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
345 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
346 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
347 df_scan->optional_p = false;
348 }
349
350
351 /* Free all of the data associated with the scan problem. */
352
353 static void
354 df_scan_free (void)
355 {
356 if (df_scan->problem_data)
357 df_scan_free_internal ();
358
359 if (df->blocks_to_analyze)
360 {
361 BITMAP_FREE (df->blocks_to_analyze);
362 df->blocks_to_analyze = NULL;
363 }
364
365 free (df_scan);
366 }
367
368 /* Dump the preamble for DF_SCAN dump. */
369 static void
370 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
371 {
372 int i;
373
374 fprintf (file, ";; invalidated by call \t");
375 df_print_regset (file, df_invalidated_by_call);
376 fprintf (file, ";; hardware regs used \t");
377 df_print_regset (file, df->hardware_regs_used);
378 fprintf (file, ";; regular block artificial uses \t");
379 df_print_regset (file, df->regular_block_artificial_uses);
380 fprintf (file, ";; eh block artificial uses \t");
381 df_print_regset (file, df->eh_block_artificial_uses);
382 fprintf (file, ";; entry block defs \t");
383 df_print_regset (file, df->entry_block_defs);
384 fprintf (file, ";; exit block uses \t");
385 df_print_regset (file, df->exit_block_uses);
386 fprintf (file, ";; regs ever live \t");
387 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
388 if (df_regs_ever_live_p (i))
389 fprintf (file, " %d[%s]", i, reg_names[i]);
390
391 fprintf (file, "\n");
392 }
393
394 /* Dump the bb_info for a given basic block. */
395 static void
396 df_scan_start_block (basic_block bb, FILE *file)
397 {
398 struct df_scan_bb_info *bb_info
399 = df_scan_get_bb_info (bb->index);
400
401 if (bb_info)
402 {
403 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
404 df_refs_chain_dump (bb_info->artificial_defs, true, file);
405 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
406 df_refs_chain_dump (bb_info->artificial_uses, true, file);
407 fprintf (file, "\n");
408 }
409 #if 0
410 {
411 rtx insn;
412 FOR_BB_INSNS (bb, insn)
413 if (INSN_P (insn))
414 df_insn_debug (insn, false, file);
415 }
416 #endif
417 }
418
419 static struct df_problem problem_SCAN =
420 {
421 DF_SCAN, /* Problem id. */
422 DF_NONE, /* Direction. */
423 df_scan_alloc, /* Allocate the problem specific data. */
424 NULL, /* Reset global information. */
425 df_scan_free_bb_info, /* Free basic block info. */
426 NULL, /* Local compute function. */
427 NULL, /* Init the solution specific data. */
428 NULL, /* Iterative solver. */
429 NULL, /* Confluence operator 0. */
430 NULL, /* Confluence operator n. */
431 NULL, /* Transfer function. */
432 NULL, /* Finalize function. */
433 df_scan_free, /* Free all of the problem information. */
434 NULL, /* Remove this problem from the stack of dataflow problems. */
435 df_scan_start_dump, /* Debugging. */
436 df_scan_start_block, /* Debugging start block. */
437 NULL, /* Debugging end block. */
438 NULL, /* Incremental solution verify start. */
439 NULL, /* Incremental solution verify end. */
440 NULL, /* Dependent problem. */
441 TV_DF_SCAN, /* Timing variable. */
442 false /* Reset blocks on dropping out of blocks_to_analyze. */
443 };
444
445
446 /* Create a new DATAFLOW instance and add it to an existing instance
447 of DF. The returned structure is what is used to get at the
448 solution. */
449
450 void
451 df_scan_add_problem (void)
452 {
453 df_add_problem (&problem_SCAN);
454 }
455
456 \f
457 /*----------------------------------------------------------------------------
458 Storage Allocation Utilities
459 ----------------------------------------------------------------------------*/
460
461
462 /* First, grow the reg_info information. If the current size is less than
463 the number of psuedos, grow to 25% more than the number of
464 pseudos.
465
466 Second, assure that all of the slots up to max_reg_num have been
467 filled with reg_info structures. */
468
469 void
470 df_grow_reg_info (void)
471 {
472 unsigned int max_reg = max_reg_num ();
473 unsigned int new_size = max_reg;
474 struct df_scan_problem_data *problem_data
475 = (struct df_scan_problem_data *) df_scan->problem_data;
476 unsigned int i;
477
478 if (df->regs_size < new_size)
479 {
480 new_size += new_size / 4;
481 df->def_regs = xrealloc (df->def_regs,
482 new_size *sizeof (struct df_reg_info*));
483 df->use_regs = xrealloc (df->use_regs,
484 new_size *sizeof (struct df_reg_info*));
485 df->eq_use_regs = xrealloc (df->eq_use_regs,
486 new_size *sizeof (struct df_reg_info*));
487 df->def_info.begin = xrealloc (df->def_info.begin,
488 new_size *sizeof (int));
489 df->def_info.count = xrealloc (df->def_info.count,
490 new_size *sizeof (int));
491 df->use_info.begin = xrealloc (df->use_info.begin,
492 new_size *sizeof (int));
493 df->use_info.count = xrealloc (df->use_info.count,
494 new_size *sizeof (int));
495 df->regs_size = new_size;
496 }
497
498 for (i = df->regs_inited; i < max_reg; i++)
499 {
500 struct df_reg_info *reg_info;
501
502 reg_info = pool_alloc (problem_data->reg_pool);
503 memset (reg_info, 0, sizeof (struct df_reg_info));
504 df->def_regs[i] = reg_info;
505 reg_info = pool_alloc (problem_data->reg_pool);
506 memset (reg_info, 0, sizeof (struct df_reg_info));
507 df->use_regs[i] = reg_info;
508 reg_info = pool_alloc (problem_data->reg_pool);
509 memset (reg_info, 0, sizeof (struct df_reg_info));
510 df->eq_use_regs[i] = reg_info;
511 df->def_info.begin[i] = 0;
512 df->def_info.count[i] = 0;
513 df->use_info.begin[i] = 0;
514 df->use_info.count[i] = 0;
515 }
516
517 df->regs_inited = max_reg;
518 }
519
520
521 /* Grow the ref information. */
522
523 static void
524 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
525 {
526 if (ref_info->refs_size < new_size)
527 {
528 ref_info->refs = xrealloc (ref_info->refs,
529 new_size *sizeof (struct df_ref *));
530 memset (ref_info->refs + ref_info->refs_size, 0,
531 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
532 ref_info->refs_size = new_size;
533 }
534 }
535
536
537 /* Check and grow the ref information if necessary. This routine
538 guarantees total_size + BITMAP_ADDEND amount of entries in refs
539 array. It updates ref_info->refs_size only and does not change
540 ref_info->total_size. */
541
542 static void
543 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
544 unsigned bitmap_addend)
545 {
546 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
547 {
548 int new_size = ref_info->total_size + bitmap_addend;
549 new_size += ref_info->total_size / 4;
550 df_grow_ref_info (ref_info, new_size);
551 }
552 }
553
554
555 /* Grow the ref information. If the current size is less than the
556 number of instructions, grow to 25% more than the number of
557 instructions. */
558
559 void
560 df_grow_insn_info (void)
561 {
562 unsigned int new_size = get_max_uid () + 1;
563 if (DF_INSN_SIZE () < new_size)
564 {
565 new_size += new_size / 4;
566 df->insns = xrealloc (df->insns,
567 new_size *sizeof (struct df_insn_info *));
568 memset (df->insns + df->insns_size, 0,
569 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
570 DF_INSN_SIZE () = new_size;
571 }
572 }
573
574
575
576 \f
577 /*----------------------------------------------------------------------------
578 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
579 ----------------------------------------------------------------------------*/
580
581 /* Rescan all of the block_to_analyze or all of the blocks in the
582 function if df_set_blocks if blocks_to_analyze is NULL; */
583
584 void
585 df_scan_blocks (void)
586 {
587 basic_block bb;
588
589 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
590 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
591
592 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
593 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
594
595 bitmap_ior_into (df->eh_block_artificial_uses,
596 df->regular_block_artificial_uses);
597
598 /* ENTRY and EXIT blocks have special defs/uses. */
599 df_get_entry_block_def_set (df->entry_block_defs);
600 df_record_entry_block_defs (df->entry_block_defs);
601 df_get_exit_block_use_set (df->exit_block_uses);
602 df_record_exit_block_uses (df->exit_block_uses);
603 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
604 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
605
606 /* Regular blocks */
607 FOR_EACH_BB (bb)
608 {
609 unsigned int bb_index = bb->index;
610 df_bb_refs_record (bb_index, true);
611 }
612 }
613
614
615 /* Create a new ref of type DF_REF_TYPE for register REG at address
616 LOC within INSN of BB. This function is only used externally.
617
618 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
619 DF_REF_ZERO_EXTRACT. WIDTH and OFFSET are used to access the fields
620 if they were constants. Otherwise they should be -1 if those flags
621 were set. */
622
623 struct df_ref *
624 df_ref_create (rtx reg, rtx *loc, rtx insn,
625 basic_block bb,
626 enum df_ref_type ref_type,
627 enum df_ref_flags ref_flags,
628 int width, int offset)
629 {
630 struct df_ref *ref;
631 struct df_reg_info **reg_info;
632 struct df_ref_info *ref_info;
633 struct df_ref **ref_rec;
634 struct df_ref ***ref_rec_ptr;
635 unsigned int count = 0;
636 bool add_to_table;
637
638 df_grow_reg_info ();
639
640 /* You cannot hack artificial refs. */
641 gcc_assert (insn);
642 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
643 ref_type, ref_flags, width, offset);
644
645 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
646 {
647 reg_info = df->def_regs;
648 ref_info = &df->def_info;
649 ref_rec_ptr = &DF_INSN_DEFS (insn);
650 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
651 }
652 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
653 {
654 reg_info = df->eq_use_regs;
655 ref_info = &df->use_info;
656 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
657 switch (ref_info->ref_order)
658 {
659 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
660 case DF_REF_ORDER_BY_REG_WITH_NOTES:
661 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
662 add_to_table = true;
663 break;
664 default:
665 add_to_table = false;
666 break;
667 }
668 }
669 else
670 {
671 reg_info = df->use_regs;
672 ref_info = &df->use_info;
673 ref_rec_ptr = &DF_INSN_USES (insn);
674 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
675 }
676
677 /* Do not add if ref is not in the right blocks. */
678 if (add_to_table && df->analyze_subset)
679 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
680
681 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
682
683 if (add_to_table)
684 switch (ref_info->ref_order)
685 {
686 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
687 case DF_REF_ORDER_BY_REG_WITH_NOTES:
688 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
689 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
690 break;
691 default:
692 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
693 break;
694 }
695
696 ref_rec = *ref_rec_ptr;
697 while (*ref_rec)
698 {
699 count++;
700 ref_rec++;
701 }
702
703 ref_rec = *ref_rec_ptr;
704 if (count)
705 {
706 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
707 *ref_rec_ptr = ref_rec;
708 ref_rec[count] = ref;
709 ref_rec[count+1] = NULL;
710 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
711 }
712 else
713 {
714 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
715 ref_rec[0] = ref;
716 ref_rec[1] = NULL;
717 *ref_rec_ptr = ref_rec;
718 }
719
720 #if 0
721 if (dump_file)
722 {
723 fprintf (dump_file, "adding ref ");
724 df_ref_debug (ref, dump_file);
725 }
726 #endif
727 /* By adding the ref directly, df_insn_rescan my not find any
728 differences even though the block will have changed. So we need
729 to mark the block dirty ourselves. */
730 df_set_bb_dirty (bb);
731
732 return ref;
733 }
734
735
736 \f
737 /*----------------------------------------------------------------------------
738 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
739 ----------------------------------------------------------------------------*/
740
741 static void
742 df_free_ref (struct df_ref *ref)
743 {
744 struct df_scan_problem_data *problem_data
745 = (struct df_scan_problem_data *) df_scan->problem_data;
746
747 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
748 pool_free (problem_data->ref_extract_pool, (struct df_ref_extract *)ref);
749 else
750 pool_free (problem_data->ref_pool, ref);
751 }
752
753
754 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
755 Also delete the def-use or use-def chain if it exists. */
756
757 static void
758 df_reg_chain_unlink (struct df_ref *ref)
759 {
760 struct df_ref *next = DF_REF_NEXT_REG (ref);
761 struct df_ref *prev = DF_REF_PREV_REG (ref);
762 int id = DF_REF_ID (ref);
763 struct df_reg_info *reg_info;
764 struct df_ref **refs = NULL;
765
766 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
767 {
768 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
769 refs = df->def_info.refs;
770 }
771 else
772 {
773 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
774 {
775 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
776 switch (df->use_info.ref_order)
777 {
778 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
779 case DF_REF_ORDER_BY_REG_WITH_NOTES:
780 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
781 refs = df->use_info.refs;
782 break;
783 default:
784 break;
785 }
786 }
787 else
788 {
789 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
790 refs = df->use_info.refs;
791 }
792 }
793
794 if (refs)
795 {
796 if (df->analyze_subset)
797 {
798 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
799 refs[id] = NULL;
800 }
801 else
802 refs[id] = NULL;
803 }
804
805 /* Delete any def-use or use-def chains that start here. It is
806 possible that there is trash in this field. This happens for
807 insns that have been deleted when rescanning has been deferred
808 and the chain problem has also been deleted. The chain tear down
809 code skips deleted insns. */
810 if (df_chain && DF_REF_CHAIN (ref))
811 df_chain_unlink (ref);
812
813 reg_info->n_refs--;
814 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
815 {
816 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
817 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
818 }
819
820 /* Unlink from the reg chain. If there is no prev, this is the
821 first of the list. If not, just join the next and prev. */
822 if (prev)
823 DF_REF_NEXT_REG (prev) = next;
824 else
825 {
826 gcc_assert (reg_info->reg_chain == ref);
827 reg_info->reg_chain = next;
828 }
829 if (next)
830 DF_REF_PREV_REG (next) = prev;
831
832 df_free_ref (ref);
833 }
834
835
836 /* Remove REF from VEC. */
837
838 static void
839 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
840 {
841 struct df_ref **vec = *vec_ptr;
842
843 if (vec[1])
844 {
845 while (*vec && *vec != ref)
846 vec++;
847
848 while (*vec)
849 {
850 *vec = *(vec+1);
851 vec++;
852 }
853 }
854 else
855 {
856 free (vec);
857 *vec_ptr = df_null_ref_rec;
858 }
859 }
860
861
862 /* Unlink REF from all def-use/use-def chains, etc. */
863
864 void
865 df_ref_remove (struct df_ref *ref)
866 {
867 #if 0
868 if (dump_file)
869 {
870 fprintf (dump_file, "removing ref ");
871 df_ref_debug (ref, dump_file);
872 }
873 #endif
874
875 if (DF_REF_REG_DEF_P (ref))
876 {
877 if (DF_REF_IS_ARTIFICIAL (ref))
878 {
879 struct df_scan_bb_info *bb_info
880 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
881 df_ref_compress_rec (&bb_info->artificial_defs, ref);
882 }
883 else
884 {
885 unsigned int uid = DF_REF_INSN_UID (ref);
886 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
887 df_ref_compress_rec (&insn_rec->defs, ref);
888 }
889 }
890 else
891 {
892 if (DF_REF_IS_ARTIFICIAL (ref))
893 {
894 struct df_scan_bb_info *bb_info
895 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
896 df_ref_compress_rec (&bb_info->artificial_uses, ref);
897 }
898 else
899 {
900 unsigned int uid = DF_REF_INSN_UID (ref);
901 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
902
903 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
904 df_ref_compress_rec (&insn_rec->eq_uses, ref);
905 else
906 df_ref_compress_rec (&insn_rec->uses, ref);
907 }
908 }
909
910 /* By deleting the ref directly, df_insn_rescan my not find any
911 differences even though the block will have changed. So we need
912 to mark the block dirty ourselves. */
913 df_set_bb_dirty (DF_REF_BB (ref));
914 df_reg_chain_unlink (ref);
915 }
916
917
918 /* Create the insn record for INSN. If there was one there, zero it
919 out. */
920
921 struct df_insn_info *
922 df_insn_create_insn_record (rtx insn)
923 {
924 struct df_scan_problem_data *problem_data
925 = (struct df_scan_problem_data *) df_scan->problem_data;
926 struct df_insn_info *insn_rec;
927
928 df_grow_insn_info ();
929 insn_rec = DF_INSN_GET (insn);
930 if (!insn_rec)
931 {
932 insn_rec = pool_alloc (problem_data->insn_pool);
933 DF_INSN_SET (insn, insn_rec);
934 }
935 memset (insn_rec, 0, sizeof (struct df_insn_info));
936 insn_rec->insn = insn;
937 return insn_rec;
938 }
939
940
941 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
942
943 static void
944 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
945 {
946 while (*ref_rec)
947 {
948 struct df_ref *ref = *ref_rec;
949 /* CHAIN is allocated by DF_CHAIN. So make sure to
950 pass df_scan instance for the problem. */
951 if (DF_REF_CHAIN (ref))
952 df_chain_unlink (ref);
953 ref_rec++;
954 }
955 }
956
957
958 /* Delete all refs in the ref chain. */
959
960 static void
961 df_ref_chain_delete (struct df_ref **ref_rec)
962 {
963 struct df_ref **start = ref_rec;
964 while (*ref_rec)
965 {
966 df_reg_chain_unlink (*ref_rec);
967 ref_rec++;
968 }
969
970 /* If the list is empty, it has a special shared element that is not
971 to be deleted. */
972 if (*start)
973 free (start);
974 }
975
976
977 /* Delete the hardreg chain. */
978
979 static void
980 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
981 {
982 struct df_scan_problem_data *problem_data;
983
984 if (!hardregs)
985 return;
986
987 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
988
989 while (*hardregs)
990 {
991 pool_free (problem_data->mw_reg_pool, *hardregs);
992 hardregs++;
993 }
994 }
995
996
997 /* Delete all of the refs information from INSN. BB must be passed in
998 except when called from df_process_deferred_rescans to mark the block
999 as dirty. */
1000
1001 void
1002 df_insn_delete (basic_block bb, unsigned int uid)
1003 {
1004 struct df_insn_info *insn_info = NULL;
1005 if (!df)
1006 return;
1007
1008 df_grow_bb_info (df_scan);
1009 df_grow_reg_info ();
1010
1011 /* The block must be marked as dirty now, rather than later as in
1012 df_insn_rescan and df_notes_rescan because it may not be there at
1013 rescanning time and the mark would blow up. */
1014 if (bb)
1015 df_set_bb_dirty (bb);
1016
1017 insn_info = DF_INSN_UID_SAFE_GET (uid);
1018
1019 /* The client has deferred rescanning. */
1020 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1021 {
1022 if (insn_info)
1023 {
1024 bitmap_clear_bit (df->insns_to_rescan, uid);
1025 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1026 bitmap_set_bit (df->insns_to_delete, uid);
1027 }
1028 if (dump_file)
1029 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1030 return;
1031 }
1032
1033 if (dump_file)
1034 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1035
1036 bitmap_clear_bit (df->insns_to_delete, uid);
1037 bitmap_clear_bit (df->insns_to_rescan, uid);
1038 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1039 if (insn_info)
1040 {
1041 struct df_scan_problem_data *problem_data
1042 = (struct df_scan_problem_data *) df_scan->problem_data;
1043
1044 /* In general, notes do not have the insn_info fields
1045 initialized. However, combine deletes insns by changing them
1046 to notes. How clever. So we cannot just check if it is a
1047 valid insn before short circuiting this code, we need to see
1048 if we actually initialized it. */
1049 if (insn_info->defs)
1050 {
1051 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1052
1053 if (df_chain)
1054 {
1055 df_ref_chain_delete_du_chain (insn_info->defs);
1056 df_ref_chain_delete_du_chain (insn_info->uses);
1057 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1058 }
1059
1060 df_ref_chain_delete (insn_info->defs);
1061 df_ref_chain_delete (insn_info->uses);
1062 df_ref_chain_delete (insn_info->eq_uses);
1063 }
1064 pool_free (problem_data->insn_pool, insn_info);
1065 DF_INSN_UID_SET (uid, NULL);
1066 }
1067 }
1068
1069
1070 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1071
1072 static void
1073 df_free_collection_rec (struct df_collection_rec *collection_rec)
1074 {
1075 struct df_scan_problem_data *problem_data
1076 = (struct df_scan_problem_data *) df_scan->problem_data;
1077 struct df_ref **ref;
1078 struct df_mw_hardreg **mw;
1079
1080 if (collection_rec->def_vec)
1081 for (ref = collection_rec->def_vec; *ref; ref++)
1082 df_free_ref (*ref);
1083 if (collection_rec->use_vec)
1084 for (ref = collection_rec->use_vec; *ref; ref++)
1085 df_free_ref (*ref);
1086 if (collection_rec->eq_use_vec)
1087 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1088 df_free_ref (*ref);
1089 if (collection_rec->mw_vec)
1090 for (mw = collection_rec->mw_vec; *mw; mw++)
1091 pool_free (problem_data->mw_reg_pool, *mw);
1092 }
1093
1094
1095 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1096
1097 bool
1098 df_insn_rescan (rtx insn)
1099 {
1100 unsigned int uid = INSN_UID (insn);
1101 struct df_insn_info *insn_info = NULL;
1102 basic_block bb = BLOCK_FOR_INSN (insn);
1103 struct df_collection_rec collection_rec;
1104 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1105 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1106 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1107 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1108
1109 if ((!df) || (!INSN_P (insn)))
1110 return false;
1111
1112 if (!bb)
1113 {
1114 if (dump_file)
1115 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1116 return false;
1117 }
1118
1119 /* The client has disabled rescanning and plans to do it itself. */
1120 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1121 return false;
1122
1123 df_grow_bb_info (df_scan);
1124 df_grow_reg_info ();
1125
1126 insn_info = DF_INSN_UID_SAFE_GET (uid);
1127
1128 /* The client has deferred rescanning. */
1129 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1130 {
1131 if (!insn_info)
1132 {
1133 insn_info = df_insn_create_insn_record (insn);
1134 insn_info->defs = df_null_ref_rec;
1135 insn_info->uses = df_null_ref_rec;
1136 insn_info->eq_uses = df_null_ref_rec;
1137 insn_info->mw_hardregs = df_null_mw_rec;
1138 }
1139 if (dump_file)
1140 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1141
1142 bitmap_clear_bit (df->insns_to_delete, uid);
1143 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1144 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1145 return false;
1146 }
1147
1148 bitmap_clear_bit (df->insns_to_delete, uid);
1149 bitmap_clear_bit (df->insns_to_rescan, uid);
1150 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1151 if (insn_info)
1152 {
1153 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1154 /* If there's no change, return false. */
1155 if (the_same)
1156 {
1157 df_free_collection_rec (&collection_rec);
1158 if (dump_file)
1159 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1160 return false;
1161 }
1162 if (dump_file)
1163 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1164
1165 /* There's change - we need to delete the existing info. */
1166 df_insn_delete (NULL, uid);
1167 df_insn_create_insn_record (insn);
1168 }
1169 else
1170 {
1171 df_insn_create_insn_record (insn);
1172 df_insn_refs_collect (&collection_rec, bb, insn);
1173 if (dump_file)
1174 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1175 }
1176
1177 df_refs_add_to_chains (&collection_rec, bb, insn);
1178 df_set_bb_dirty (bb);
1179 return true;
1180 }
1181
1182
1183 /* Rescan all of the insns in the function. Note that the artificial
1184 uses and defs are not touched. This function will destroy def-se
1185 or use-def chains. */
1186
1187 void
1188 df_insn_rescan_all (void)
1189 {
1190 bool no_insn_rescan = false;
1191 bool defer_insn_rescan = false;
1192 basic_block bb;
1193 bitmap_iterator bi;
1194 unsigned int uid;
1195 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1196
1197 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1198 {
1199 df_clear_flags (DF_NO_INSN_RESCAN);
1200 no_insn_rescan = true;
1201 }
1202
1203 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1204 {
1205 df_clear_flags (DF_DEFER_INSN_RESCAN);
1206 defer_insn_rescan = true;
1207 }
1208
1209 bitmap_copy (tmp, df->insns_to_delete);
1210 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1211 {
1212 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1213 if (insn_info)
1214 df_insn_delete (NULL, uid);
1215 }
1216
1217 BITMAP_FREE (tmp);
1218 bitmap_clear (df->insns_to_delete);
1219 bitmap_clear (df->insns_to_rescan);
1220 bitmap_clear (df->insns_to_notes_rescan);
1221
1222 FOR_EACH_BB (bb)
1223 {
1224 rtx insn;
1225 FOR_BB_INSNS (bb, insn)
1226 {
1227 df_insn_rescan (insn);
1228 }
1229 }
1230
1231 if (no_insn_rescan)
1232 df_set_flags (DF_NO_INSN_RESCAN);
1233 if (defer_insn_rescan)
1234 df_set_flags (DF_DEFER_INSN_RESCAN);
1235 }
1236
1237
1238 /* Process all of the deferred rescans or deletions. */
1239
1240 void
1241 df_process_deferred_rescans (void)
1242 {
1243 bool no_insn_rescan = false;
1244 bool defer_insn_rescan = false;
1245 bitmap_iterator bi;
1246 unsigned int uid;
1247 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1248
1249 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1250 {
1251 df_clear_flags (DF_NO_INSN_RESCAN);
1252 no_insn_rescan = true;
1253 }
1254
1255 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1256 {
1257 df_clear_flags (DF_DEFER_INSN_RESCAN);
1258 defer_insn_rescan = true;
1259 }
1260
1261 if (dump_file)
1262 fprintf (dump_file, "starting the processing of deferred insns\n");
1263
1264 bitmap_copy (tmp, df->insns_to_delete);
1265 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1266 {
1267 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1268 if (insn_info)
1269 df_insn_delete (NULL, uid);
1270 }
1271
1272 bitmap_copy (tmp, df->insns_to_rescan);
1273 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1274 {
1275 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1276 if (insn_info)
1277 df_insn_rescan (insn_info->insn);
1278 }
1279
1280 bitmap_copy (tmp, df->insns_to_notes_rescan);
1281 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1282 {
1283 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1284 if (insn_info)
1285 df_notes_rescan (insn_info->insn);
1286 }
1287
1288 if (dump_file)
1289 fprintf (dump_file, "ending the processing of deferred insns\n");
1290
1291 BITMAP_FREE (tmp);
1292 bitmap_clear (df->insns_to_delete);
1293 bitmap_clear (df->insns_to_rescan);
1294 bitmap_clear (df->insns_to_notes_rescan);
1295
1296 if (no_insn_rescan)
1297 df_set_flags (DF_NO_INSN_RESCAN);
1298 if (defer_insn_rescan)
1299 df_set_flags (DF_DEFER_INSN_RESCAN);
1300
1301 /* If someone changed regs_ever_live during this pass, fix up the
1302 entry and exit blocks. */
1303 if (df->redo_entry_and_exit)
1304 {
1305 df_update_entry_exit_and_calls ();
1306 df->redo_entry_and_exit = false;
1307 }
1308 }
1309
1310
1311 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1312 the uses if INCLUDE_USES. Include the eq_uses if
1313 INCLUDE_EQ_USES. */
1314
1315 static unsigned int
1316 df_count_refs (bool include_defs, bool include_uses,
1317 bool include_eq_uses)
1318 {
1319 unsigned int regno;
1320 int size = 0;
1321 unsigned int m = df->regs_inited;
1322
1323 for (regno = 0; regno < m; regno++)
1324 {
1325 if (include_defs)
1326 size += DF_REG_DEF_COUNT (regno);
1327 if (include_uses)
1328 size += DF_REG_USE_COUNT (regno);
1329 if (include_eq_uses)
1330 size += DF_REG_EQ_USE_COUNT (regno);
1331 }
1332 return size;
1333 }
1334
1335
1336 /* Take build ref table for either the uses or defs from the reg-use
1337 or reg-def chains. This version processes the refs in reg order
1338 which is likely to be best if processing the whole function. */
1339
1340 static void
1341 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1342 bool include_defs,
1343 bool include_uses,
1344 bool include_eq_uses)
1345 {
1346 unsigned int m = df->regs_inited;
1347 unsigned int regno;
1348 unsigned int offset = 0;
1349 unsigned int start;
1350
1351 if (df->changeable_flags & DF_NO_HARD_REGS)
1352 {
1353 start = FIRST_PSEUDO_REGISTER;
1354 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1355 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1356 }
1357 else
1358 start = 0;
1359
1360 ref_info->total_size
1361 = df_count_refs (include_defs, include_uses, include_eq_uses);
1362
1363 df_check_and_grow_ref_info (ref_info, 1);
1364
1365 for (regno = start; regno < m; regno++)
1366 {
1367 int count = 0;
1368 ref_info->begin[regno] = offset;
1369 if (include_defs)
1370 {
1371 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1372 while (ref)
1373 {
1374 ref_info->refs[offset] = ref;
1375 DF_REF_ID (ref) = offset++;
1376 count++;
1377 ref = DF_REF_NEXT_REG (ref);
1378 gcc_assert (offset < ref_info->refs_size);
1379 }
1380 }
1381 if (include_uses)
1382 {
1383 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1384 while (ref)
1385 {
1386 ref_info->refs[offset] = ref;
1387 DF_REF_ID (ref) = offset++;
1388 count++;
1389 ref = DF_REF_NEXT_REG (ref);
1390 gcc_assert (offset < ref_info->refs_size);
1391 }
1392 }
1393 if (include_eq_uses)
1394 {
1395 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1396 while (ref)
1397 {
1398 ref_info->refs[offset] = ref;
1399 DF_REF_ID (ref) = offset++;
1400 count++;
1401 ref = DF_REF_NEXT_REG (ref);
1402 gcc_assert (offset < ref_info->refs_size);
1403 }
1404 }
1405 ref_info->count[regno] = count;
1406 }
1407
1408 /* The bitmap size is not decremented when refs are deleted. So
1409 reset it now that we have squished out all of the empty
1410 slots. */
1411 ref_info->table_size = offset;
1412 }
1413
1414
1415 /* Take build ref table for either the uses or defs from the reg-use
1416 or reg-def chains. This version processes the refs in insn order
1417 which is likely to be best if processing some segment of the
1418 function. */
1419
1420 static void
1421 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1422 bool include_defs,
1423 bool include_uses,
1424 bool include_eq_uses)
1425 {
1426 bitmap_iterator bi;
1427 unsigned int bb_index;
1428 unsigned int m = df->regs_inited;
1429 unsigned int offset = 0;
1430 unsigned int r;
1431 unsigned int start
1432 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1433
1434 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1435 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1436
1437 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1438 df_check_and_grow_ref_info (ref_info, 1);
1439
1440 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1441 {
1442 basic_block bb = BASIC_BLOCK (bb_index);
1443 rtx insn;
1444 struct df_ref **ref_rec;
1445
1446 if (include_defs)
1447 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1448 {
1449 unsigned int regno = DF_REF_REGNO (*ref_rec);
1450 ref_info->count[regno]++;
1451 }
1452 if (include_uses)
1453 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1454 {
1455 unsigned int regno = DF_REF_REGNO (*ref_rec);
1456 ref_info->count[regno]++;
1457 }
1458
1459 FOR_BB_INSNS (bb, insn)
1460 {
1461 if (INSN_P (insn))
1462 {
1463 unsigned int uid = INSN_UID (insn);
1464
1465 if (include_defs)
1466 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1467 {
1468 unsigned int regno = DF_REF_REGNO (*ref_rec);
1469 ref_info->count[regno]++;
1470 }
1471 if (include_uses)
1472 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1473 {
1474 unsigned int regno = DF_REF_REGNO (*ref_rec);
1475 ref_info->count[regno]++;
1476 }
1477 if (include_eq_uses)
1478 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1479 {
1480 unsigned int regno = DF_REF_REGNO (*ref_rec);
1481 ref_info->count[regno]++;
1482 }
1483 }
1484 }
1485 }
1486
1487 for (r = start; r < m; r++)
1488 {
1489 ref_info->begin[r] = offset;
1490 offset += ref_info->count[r];
1491 ref_info->count[r] = 0;
1492 }
1493
1494 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1495 {
1496 basic_block bb = BASIC_BLOCK (bb_index);
1497 rtx insn;
1498 struct df_ref **ref_rec;
1499
1500 if (include_defs)
1501 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1502 {
1503 struct df_ref *ref = *ref_rec;
1504 unsigned int regno = DF_REF_REGNO (ref);
1505 if (regno >= start)
1506 {
1507 unsigned int id
1508 = ref_info->begin[regno] + ref_info->count[regno]++;
1509 DF_REF_ID (ref) = id;
1510 ref_info->refs[id] = ref;
1511 }
1512 }
1513 if (include_uses)
1514 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1515 {
1516 struct df_ref *ref = *ref_rec;
1517 unsigned int regno = DF_REF_REGNO (ref);
1518 if (regno >= start)
1519 {
1520 unsigned int id
1521 = ref_info->begin[regno] + ref_info->count[regno]++;
1522 DF_REF_ID (ref) = id;
1523 ref_info->refs[id] = ref;
1524 }
1525 }
1526
1527 FOR_BB_INSNS (bb, insn)
1528 {
1529 if (INSN_P (insn))
1530 {
1531 unsigned int uid = INSN_UID (insn);
1532
1533 if (include_defs)
1534 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1535 {
1536 struct df_ref *ref = *ref_rec;
1537 unsigned int regno = DF_REF_REGNO (ref);
1538 if (regno >= start)
1539 {
1540 unsigned int id
1541 = ref_info->begin[regno] + ref_info->count[regno]++;
1542 DF_REF_ID (ref) = id;
1543 ref_info->refs[id] = ref;
1544 }
1545 }
1546 if (include_uses)
1547 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1548 {
1549 struct df_ref *ref = *ref_rec;
1550 unsigned int regno = DF_REF_REGNO (ref);
1551 if (regno >= start)
1552 {
1553 unsigned int id
1554 = ref_info->begin[regno] + ref_info->count[regno]++;
1555 DF_REF_ID (ref) = id;
1556 ref_info->refs[id] = ref;
1557 }
1558 }
1559 if (include_eq_uses)
1560 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1561 {
1562 struct df_ref *ref = *ref_rec;
1563 unsigned int regno = DF_REF_REGNO (ref);
1564 if (regno >= start)
1565 {
1566 unsigned int id
1567 = ref_info->begin[regno] + ref_info->count[regno]++;
1568 DF_REF_ID (ref) = id;
1569 ref_info->refs[id] = ref;
1570 }
1571 }
1572 }
1573 }
1574 }
1575
1576 /* The bitmap size is not decremented when refs are deleted. So
1577 reset it now that we have squished out all of the empty
1578 slots. */
1579
1580 ref_info->table_size = offset;
1581 }
1582
1583 /* Take build ref table for either the uses or defs from the reg-use
1584 or reg-def chains. */
1585
1586 static void
1587 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1588 bool include_defs,
1589 bool include_uses,
1590 bool include_eq_uses)
1591 {
1592 if (df->analyze_subset)
1593 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1594 include_uses, include_eq_uses);
1595 else
1596 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1597 include_uses, include_eq_uses);
1598 }
1599
1600
1601 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1602 static unsigned int
1603 df_add_refs_to_table (unsigned int offset,
1604 struct df_ref_info *ref_info,
1605 struct df_ref **ref_vec)
1606 {
1607 while (*ref_vec)
1608 {
1609 struct df_ref *ref = *ref_vec;
1610 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1611 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1612 {
1613 ref_info->refs[offset] = ref;
1614 DF_REF_ID (*ref_vec) = offset++;
1615 }
1616 ref_vec++;
1617 }
1618 return offset;
1619 }
1620
1621
1622 /* Count the number of refs in all of the insns of BB. Include the
1623 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1624 eq_uses if INCLUDE_EQ_USES. */
1625
1626 static unsigned int
1627 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1628 struct df_ref_info *ref_info,
1629 bool include_defs, bool include_uses,
1630 bool include_eq_uses)
1631 {
1632 rtx insn;
1633
1634 if (include_defs)
1635 offset = df_add_refs_to_table (offset, ref_info,
1636 df_get_artificial_defs (bb->index));
1637 if (include_uses)
1638 offset = df_add_refs_to_table (offset, ref_info,
1639 df_get_artificial_uses (bb->index));
1640
1641 FOR_BB_INSNS (bb, insn)
1642 if (INSN_P (insn))
1643 {
1644 unsigned int uid = INSN_UID (insn);
1645 if (include_defs)
1646 offset = df_add_refs_to_table (offset, ref_info,
1647 DF_INSN_UID_DEFS (uid));
1648 if (include_uses)
1649 offset = df_add_refs_to_table (offset, ref_info,
1650 DF_INSN_UID_USES (uid));
1651 if (include_eq_uses)
1652 offset = df_add_refs_to_table (offset, ref_info,
1653 DF_INSN_UID_EQ_USES (uid));
1654 }
1655 return offset;
1656 }
1657
1658
1659 /* Organize the refs by insn into the table in REF_INFO. If
1660 blocks_to_analyze is defined, use that set, otherwise the entire
1661 program. Include the defs if INCLUDE_DEFS. Include the uses if
1662 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1663
1664 static void
1665 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1666 bool include_defs, bool include_uses,
1667 bool include_eq_uses)
1668 {
1669 basic_block bb;
1670 unsigned int offset = 0;
1671
1672 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1673 df_check_and_grow_ref_info (ref_info, 1);
1674 if (df->blocks_to_analyze)
1675 {
1676 bitmap_iterator bi;
1677 unsigned int index;
1678
1679 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1680 {
1681 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1682 include_defs, include_uses,
1683 include_eq_uses);
1684 }
1685
1686 ref_info->table_size = offset;
1687 }
1688 else
1689 {
1690 FOR_ALL_BB (bb)
1691 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1692 include_defs, include_uses,
1693 include_eq_uses);
1694 ref_info->table_size = offset;
1695 }
1696 }
1697
1698
1699 /* If the use refs in DF are not organized, reorganize them. */
1700
1701 void
1702 df_maybe_reorganize_use_refs (enum df_ref_order order)
1703 {
1704 if (order == df->use_info.ref_order)
1705 return;
1706
1707 switch (order)
1708 {
1709 case DF_REF_ORDER_BY_REG:
1710 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1711 break;
1712
1713 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1714 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1715 break;
1716
1717 case DF_REF_ORDER_BY_INSN:
1718 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1719 break;
1720
1721 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1722 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1723 break;
1724
1725 case DF_REF_ORDER_NO_TABLE:
1726 free (df->use_info.refs);
1727 df->use_info.refs = NULL;
1728 df->use_info.refs_size = 0;
1729 break;
1730
1731 case DF_REF_ORDER_UNORDERED:
1732 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1733 gcc_unreachable ();
1734 break;
1735 }
1736
1737 df->use_info.ref_order = order;
1738 }
1739
1740
1741 /* If the def refs in DF are not organized, reorganize them. */
1742
1743 void
1744 df_maybe_reorganize_def_refs (enum df_ref_order order)
1745 {
1746 if (order == df->def_info.ref_order)
1747 return;
1748
1749 switch (order)
1750 {
1751 case DF_REF_ORDER_BY_REG:
1752 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1753 break;
1754
1755 case DF_REF_ORDER_BY_INSN:
1756 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1757 break;
1758
1759 case DF_REF_ORDER_NO_TABLE:
1760 free (df->def_info.refs);
1761 df->def_info.refs = NULL;
1762 df->def_info.refs_size = 0;
1763 break;
1764
1765 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1766 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1767 case DF_REF_ORDER_UNORDERED:
1768 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1769 gcc_unreachable ();
1770 break;
1771 }
1772
1773 df->def_info.ref_order = order;
1774 }
1775
1776
1777 /* Change the BB of all refs in the ref chain from OLD_BB to NEW_BB.
1778 Assumes that all refs in the chain have the same BB. */
1779
1780 static void
1781 df_ref_chain_change_bb (struct df_ref **ref_rec,
1782 basic_block old_bb,
1783 basic_block new_bb)
1784 {
1785 while (*ref_rec)
1786 {
1787 struct df_ref *ref = *ref_rec;
1788
1789 gcc_assert (DF_REF_BB (ref) == old_bb);
1790 DF_REF_BB (ref) = new_bb;
1791 ref_rec++;
1792 }
1793 }
1794
1795
1796 /* Change all of the basic block references in INSN to use the insn's
1797 current basic block. This function is called from routines that move
1798 instructions from one block to another. */
1799
1800 void
1801 df_insn_change_bb (rtx insn, basic_block new_bb)
1802 {
1803 basic_block old_bb = BLOCK_FOR_INSN (insn);
1804 struct df_insn_info *insn_info;
1805 unsigned int uid = INSN_UID (insn);
1806
1807 if (old_bb == new_bb)
1808 return;
1809
1810 set_block_for_insn (insn, new_bb);
1811
1812 if (!df)
1813 return;
1814
1815 if (dump_file)
1816 fprintf (dump_file, "changing bb of uid %d\n", uid);
1817
1818 insn_info = DF_INSN_UID_SAFE_GET (uid);
1819 if (insn_info == NULL)
1820 {
1821 if (dump_file)
1822 fprintf (dump_file, " unscanned insn\n");
1823 df_insn_rescan (insn);
1824 return;
1825 }
1826
1827 if (!INSN_P (insn))
1828 return;
1829
1830 df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1831 df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1832 df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1833
1834 df_set_bb_dirty (new_bb);
1835 if (old_bb)
1836 {
1837 if (dump_file)
1838 fprintf (dump_file, " from %d to %d\n",
1839 old_bb->index, new_bb->index);
1840 df_set_bb_dirty (old_bb);
1841 }
1842 else
1843 if (dump_file)
1844 fprintf (dump_file, " to %d\n", new_bb->index);
1845 }
1846
1847
1848 /* Helper function for df_ref_change_reg_with_loc. */
1849
1850 static void
1851 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1852 int new_regno, rtx loc)
1853 {
1854 struct df_ref *the_ref = old->reg_chain;
1855
1856 while (the_ref)
1857 {
1858 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1859 {
1860 struct df_ref *next_ref = the_ref->next_reg;
1861 struct df_ref *prev_ref = the_ref->prev_reg;
1862 struct df_ref **ref_vec, **ref_vec_t;
1863 unsigned int count = 0;
1864
1865 DF_REF_REGNO (the_ref) = new_regno;
1866 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1867
1868 /* Pull the_ref out of the old regno chain. */
1869 if (prev_ref)
1870 prev_ref->next_reg = next_ref;
1871 else
1872 old->reg_chain = next_ref;
1873 if (next_ref)
1874 next_ref->prev_reg = prev_ref;
1875 old->n_refs--;
1876
1877 /* Put the ref into the new regno chain. */
1878 the_ref->prev_reg = NULL;
1879 the_ref->next_reg = new->reg_chain;
1880 if (new->reg_chain)
1881 new->reg_chain->prev_reg = the_ref;
1882 new->reg_chain = the_ref;
1883 new->n_refs++;
1884 df_set_bb_dirty (DF_REF_BB (the_ref));
1885
1886 /* Need to resort the record that the ref was in because the
1887 regno is a sorting key. First, find the right record. */
1888 if (DF_REF_IS_ARTIFICIAL (the_ref))
1889 {
1890 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1891 if (DF_REF_REG_DEF_P (the_ref))
1892 ref_vec = df_get_artificial_defs (bb_index);
1893 else
1894 ref_vec = df_get_artificial_uses (bb_index);
1895 }
1896 else
1897 {
1898 struct df_insn_info *insn_info
1899 = DF_INSN_GET (DF_REF_INSN (the_ref));
1900 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1901 ref_vec = insn_info->eq_uses;
1902 else
1903 ref_vec = insn_info->uses;
1904 if (dump_file)
1905 fprintf (dump_file, "changing reg in insn %d\n",
1906 INSN_UID (DF_REF_INSN (the_ref)));
1907 }
1908 ref_vec_t = ref_vec;
1909
1910 /* Find the length. */
1911 while (*ref_vec_t)
1912 {
1913 count++;
1914 ref_vec_t++;
1915 }
1916 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1917
1918 the_ref = next_ref;
1919 }
1920 else
1921 the_ref = the_ref->next_reg;
1922 }
1923 }
1924
1925
1926 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1927 NEW_REGNO. Refs that do not match LOC are not changed. This call
1928 is to support the SET_REGNO macro. */
1929
1930 void
1931 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1932 {
1933 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1934 return;
1935
1936 df_grow_reg_info ();
1937
1938 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1939 DF_REG_DEF_GET (new_regno), new_regno, loc);
1940 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1941 DF_REG_USE_GET (new_regno), new_regno, loc);
1942 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1943 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1944 }
1945
1946
1947 /* Delete the mw_hardregs that point into the eq_notes. */
1948
1949 static unsigned int
1950 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1951 {
1952 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1953 unsigned int deleted = 0;
1954 unsigned int count = 0;
1955 struct df_scan_problem_data *problem_data
1956 = (struct df_scan_problem_data *) df_scan->problem_data;
1957
1958 if (!*mw_vec)
1959 return 0;
1960
1961 while (*mw_vec)
1962 {
1963 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1964 {
1965 struct df_mw_hardreg **temp_vec = mw_vec;
1966
1967 pool_free (problem_data->mw_reg_pool, *mw_vec);
1968 temp_vec = mw_vec;
1969 /* Shove the remaining ones down one to fill the gap. While
1970 this looks n**2, it is highly unusual to have any mw regs
1971 in eq_notes and the chances of more than one are almost
1972 non existent. */
1973 while (*temp_vec)
1974 {
1975 *temp_vec = *(temp_vec + 1);
1976 temp_vec++;
1977 }
1978 deleted++;
1979 }
1980 else
1981 {
1982 mw_vec++;
1983 count++;
1984 }
1985 }
1986
1987 if (count == 0)
1988 {
1989 free (insn_info->mw_hardregs);
1990 insn_info->mw_hardregs = df_null_mw_rec;
1991 return 0;
1992 }
1993 return deleted;
1994 }
1995
1996
1997 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1998
1999 void
2000 df_notes_rescan (rtx insn)
2001 {
2002 struct df_insn_info *insn_info;
2003 unsigned int uid = INSN_UID (insn);
2004
2005 if (!df)
2006 return;
2007
2008 /* The client has disabled rescanning and plans to do it itself. */
2009 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2010 return;
2011
2012 /* Do nothing if the insn hasn't been emitted yet. */
2013 if (!BLOCK_FOR_INSN (insn))
2014 return;
2015
2016 df_grow_bb_info (df_scan);
2017 df_grow_reg_info ();
2018
2019 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2020
2021 /* The client has deferred rescanning. */
2022 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2023 {
2024 if (!insn_info)
2025 {
2026 insn_info = df_insn_create_insn_record (insn);
2027 insn_info->defs = df_null_ref_rec;
2028 insn_info->uses = df_null_ref_rec;
2029 insn_info->eq_uses = df_null_ref_rec;
2030 insn_info->mw_hardregs = df_null_mw_rec;
2031 }
2032
2033 bitmap_clear_bit (df->insns_to_delete, uid);
2034 /* If the insn is set to be rescanned, it does not need to also
2035 be notes rescanned. */
2036 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2037 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2038 return;
2039 }
2040
2041 bitmap_clear_bit (df->insns_to_delete, uid);
2042 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2043
2044 if (insn_info)
2045 {
2046 basic_block bb = BLOCK_FOR_INSN (insn);
2047 rtx note;
2048 struct df_collection_rec collection_rec;
2049 unsigned int num_deleted;
2050
2051 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2052 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2053 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2054
2055 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2056 df_ref_chain_delete (insn_info->eq_uses);
2057 insn_info->eq_uses = NULL;
2058
2059 /* Process REG_EQUIV/REG_EQUAL notes */
2060 for (note = REG_NOTES (insn); note;
2061 note = XEXP (note, 1))
2062 {
2063 switch (REG_NOTE_KIND (note))
2064 {
2065 case REG_EQUIV:
2066 case REG_EQUAL:
2067 df_uses_record (&collection_rec,
2068 &XEXP (note, 0), DF_REF_REG_USE,
2069 bb, insn, DF_REF_IN_NOTE, -1, -1);
2070 default:
2071 break;
2072 }
2073 }
2074
2075 /* Find some place to put any new mw_hardregs. */
2076 df_canonize_collection_rec (&collection_rec);
2077 if (collection_rec.next_mw)
2078 {
2079 unsigned int count = 0;
2080 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2081 while (*mw_rec)
2082 {
2083 count++;
2084 mw_rec++;
2085 }
2086
2087 if (count)
2088 {
2089 /* Append to the end of the existing record after
2090 expanding it if necessary. */
2091 if (collection_rec.next_mw > num_deleted)
2092 {
2093 insn_info->mw_hardregs =
2094 xrealloc (insn_info->mw_hardregs,
2095 (count + 1 + collection_rec.next_mw)
2096 * sizeof (struct df_ref*));
2097 }
2098 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2099 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2100 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2101 sizeof (struct df_mw_hardreg *), df_mw_compare);
2102 }
2103 else
2104 {
2105 /* No vector there. */
2106 insn_info->mw_hardregs
2107 = XNEWVEC (struct df_mw_hardreg*,
2108 count + 1 + collection_rec.next_mw);
2109 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2110 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2111 }
2112 }
2113 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2114 ignore it. */
2115 collection_rec.mw_vec = NULL;
2116 collection_rec.next_mw = 0;
2117 df_refs_add_to_chains (&collection_rec, bb, insn);
2118 }
2119 else
2120 df_insn_rescan (insn);
2121
2122 }
2123
2124 \f
2125 /*----------------------------------------------------------------------------
2126 Hard core instruction scanning code. No external interfaces here,
2127 just a lot of routines that look inside insns.
2128 ----------------------------------------------------------------------------*/
2129
2130
2131 /* Return true if the contents of two df_ref's are identical.
2132 It ignores DF_REF_MARKER. */
2133
2134 static bool
2135 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2136 {
2137 if (!ref2)
2138 return false;
2139
2140 /* The two flag tests here are only to make sure we do not look at
2141 the offset and width if they are not there. The flags are
2142 compared in the next set of tests. */
2143 if ((DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2144 && (DF_REF_FLAGS_IS_SET (ref2, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2145 && ((DF_REF_OFFSET (ref1) != DF_REF_OFFSET (ref2))
2146 || (DF_REF_WIDTH (ref1) != DF_REF_WIDTH (ref2))))
2147 return false;
2148
2149 return (ref1 == ref2) ||
2150 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2151 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2152 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2153 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2154 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2155 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2156 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2157 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2158 }
2159
2160
2161 /* Compare REF1 and REF2 for sorting. This is only called from places
2162 where all of the refs are of the same type, in the same insn, and
2163 have the same bb. So these fields are not checked. */
2164
2165 static int
2166 df_ref_compare (const void *r1, const void *r2)
2167 {
2168 const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
2169 const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
2170
2171 if (ref1 == ref2)
2172 return 0;
2173
2174 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2175 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2176
2177 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2178 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2179
2180 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2181 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2182 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2183
2184 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2185 {
2186 /* If two refs are identical except that one of them has is from
2187 a mw and one is not, we need to have the one with the mw
2188 first. */
2189 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2190 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2191 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2192 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2193 return -1;
2194 else
2195 return 1;
2196 }
2197
2198 /* The flags are the same at this point so it is safe to only look
2199 at ref1. */
2200 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2201 {
2202 if (DF_REF_OFFSET (ref1) != DF_REF_OFFSET (ref2))
2203 return DF_REF_OFFSET (ref1) - DF_REF_OFFSET (ref2);
2204 if (DF_REF_WIDTH (ref1) != DF_REF_WIDTH (ref2))
2205 return DF_REF_WIDTH (ref1) - DF_REF_WIDTH (ref2);
2206 }
2207 return 0;
2208 }
2209
2210 static void
2211 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2212 {
2213 struct df_ref *tmp = ref_vec[i];
2214 ref_vec[i] = ref_vec[j];
2215 ref_vec[j] = tmp;
2216 }
2217
2218 /* Sort and compress a set of refs. */
2219
2220 static unsigned int
2221 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2222 {
2223 unsigned int i;
2224 unsigned int dist = 0;
2225
2226 ref_vec[count] = NULL;
2227 /* If there are 1 or 0 elements, there is nothing to do. */
2228 if (count < 2)
2229 return count;
2230 else if (count == 2)
2231 {
2232 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2233 df_swap_refs (ref_vec, 0, 1);
2234 }
2235 else
2236 {
2237 for (i = 0; i < count - 1; i++)
2238 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2239 break;
2240 /* If the array is already strictly ordered,
2241 which is the most common case for large COUNT case
2242 (which happens for CALL INSNs),
2243 no need to sort and filter out duplicate.
2244 Simply return the count.
2245 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2246 of DF_REF_COMPARE. */
2247 if (i == count - 1)
2248 return count;
2249 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2250 }
2251
2252 for (i=0; i<count-dist; i++)
2253 {
2254 /* Find the next ref that is not equal to the current ref. */
2255 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2256 {
2257 df_free_ref (ref_vec[i + dist + 1]);
2258 dist++;
2259 }
2260 /* Copy it down to the next position. */
2261 if (dist)
2262 ref_vec[i+1] = ref_vec[i + dist + 1];
2263 }
2264
2265 count -= dist;
2266 ref_vec[count] = NULL;
2267 return count;
2268 }
2269
2270
2271 /* Return true if the contents of two df_ref's are identical.
2272 It ignores DF_REF_MARKER. */
2273
2274 static bool
2275 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2276 {
2277 if (!mw2)
2278 return false;
2279 return (mw1 == mw2) ||
2280 (mw1->mw_reg == mw2->mw_reg
2281 && mw1->type == mw2->type
2282 && mw1->flags == mw2->flags
2283 && mw1->start_regno == mw2->start_regno
2284 && mw1->end_regno == mw2->end_regno);
2285 }
2286
2287
2288 /* Compare MW1 and MW2 for sorting. */
2289
2290 static int
2291 df_mw_compare (const void *m1, const void *m2)
2292 {
2293 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2294 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2295
2296 if (mw1 == mw2)
2297 return 0;
2298
2299 if (mw1->type != mw2->type)
2300 return mw1->type - mw2->type;
2301
2302 if (mw1->flags != mw2->flags)
2303 return mw1->flags - mw2->flags;
2304
2305 if (mw1->start_regno != mw2->start_regno)
2306 return mw1->start_regno - mw2->start_regno;
2307
2308 if (mw1->end_regno != mw2->end_regno)
2309 return mw1->end_regno - mw2->end_regno;
2310
2311 if (mw1->mw_reg != mw2->mw_reg)
2312 return mw1->mw_order - mw2->mw_order;
2313
2314 return 0;
2315 }
2316
2317
2318 /* Sort and compress a set of refs. */
2319
2320 static unsigned int
2321 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2322 {
2323 struct df_scan_problem_data *problem_data
2324 = (struct df_scan_problem_data *) df_scan->problem_data;
2325 unsigned int i;
2326 unsigned int dist = 0;
2327 mw_vec[count] = NULL;
2328
2329 if (count < 2)
2330 return count;
2331 else if (count == 2)
2332 {
2333 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2334 {
2335 struct df_mw_hardreg *tmp = mw_vec[0];
2336 mw_vec[0] = mw_vec[1];
2337 mw_vec[1] = tmp;
2338 }
2339 }
2340 else
2341 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2342
2343 for (i=0; i<count-dist; i++)
2344 {
2345 /* Find the next ref that is not equal to the current ref. */
2346 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2347 {
2348 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2349 dist++;
2350 }
2351 /* Copy it down to the next position. */
2352 if (dist)
2353 mw_vec[i+1] = mw_vec[i + dist + 1];
2354 }
2355
2356 count -= dist;
2357 mw_vec[count] = NULL;
2358 return count;
2359 }
2360
2361
2362 /* Sort and remove duplicates from the COLLECTION_REC. */
2363
2364 static void
2365 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2366 {
2367 if (collection_rec->def_vec)
2368 collection_rec->next_def
2369 = df_sort_and_compress_refs (collection_rec->def_vec,
2370 collection_rec->next_def);
2371 if (collection_rec->use_vec)
2372 collection_rec->next_use
2373 = df_sort_and_compress_refs (collection_rec->use_vec,
2374 collection_rec->next_use);
2375 if (collection_rec->eq_use_vec)
2376 collection_rec->next_eq_use
2377 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2378 collection_rec->next_eq_use);
2379 if (collection_rec->mw_vec)
2380 collection_rec->next_mw
2381 = df_sort_and_compress_mws (collection_rec->mw_vec,
2382 collection_rec->next_mw);
2383 }
2384
2385
2386 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2387
2388 static void
2389 df_install_ref (struct df_ref *this_ref,
2390 struct df_reg_info *reg_info,
2391 struct df_ref_info *ref_info,
2392 bool add_to_table)
2393 {
2394 unsigned int regno = DF_REF_REGNO (this_ref);
2395 /* Add the ref to the reg_{def,use,eq_use} chain. */
2396 struct df_ref *head = reg_info->reg_chain;
2397
2398 reg_info->reg_chain = this_ref;
2399 reg_info->n_refs++;
2400
2401 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2402 {
2403 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2404 df->hard_regs_live_count[regno]++;
2405 }
2406
2407 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2408 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2409
2410 DF_REF_NEXT_REG (this_ref) = head;
2411
2412 /* We cannot actually link to the head of the chain. */
2413 DF_REF_PREV_REG (this_ref) = NULL;
2414
2415 if (head)
2416 DF_REF_PREV_REG (head) = this_ref;
2417
2418 if (add_to_table)
2419 {
2420 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2421 df_check_and_grow_ref_info (ref_info, 1);
2422 DF_REF_ID (this_ref) = ref_info->table_size;
2423 /* Add the ref to the big array of defs. */
2424 ref_info->refs[ref_info->table_size] = this_ref;
2425 ref_info->table_size++;
2426 }
2427 else
2428 DF_REF_ID (this_ref) = -1;
2429
2430 ref_info->total_size++;
2431 }
2432
2433
2434 /* This function takes one of the groups of refs (defs, uses or
2435 eq_uses) and installs the entire group into the insn. It also adds
2436 each of these refs into the appropriate chains. */
2437
2438 static struct df_ref **
2439 df_install_refs (basic_block bb,
2440 struct df_ref **old_vec, unsigned int count,
2441 struct df_reg_info **reg_info,
2442 struct df_ref_info *ref_info,
2443 bool is_notes)
2444 {
2445 if (count)
2446 {
2447 unsigned int i;
2448 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2449 bool add_to_table;
2450
2451 switch (ref_info->ref_order)
2452 {
2453 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2454 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2455 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2456 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2457 add_to_table = true;
2458 break;
2459 case DF_REF_ORDER_UNORDERED:
2460 case DF_REF_ORDER_BY_REG:
2461 case DF_REF_ORDER_BY_INSN:
2462 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2463 add_to_table = !is_notes;
2464 break;
2465 default:
2466 add_to_table = false;
2467 break;
2468 }
2469
2470 /* Do not add if ref is not in the right blocks. */
2471 if (add_to_table && df->analyze_subset)
2472 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2473
2474 for (i = 0; i < count; i++)
2475 {
2476 struct df_ref *this_ref = old_vec[i];
2477 new_vec[i] = this_ref;
2478 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2479 ref_info, add_to_table);
2480 }
2481
2482 new_vec[count] = NULL;
2483 return new_vec;
2484 }
2485 else
2486 return df_null_ref_rec;
2487 }
2488
2489
2490 /* This function takes the mws installs the entire group into the
2491 insn. */
2492
2493 static struct df_mw_hardreg **
2494 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2495 {
2496 if (count)
2497 {
2498 struct df_mw_hardreg **new_vec
2499 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2500 memcpy (new_vec, old_vec,
2501 sizeof (struct df_mw_hardreg*) * (count + 1));
2502 return new_vec;
2503 }
2504 else
2505 return df_null_mw_rec;
2506 }
2507
2508
2509 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2510 chains and update other necessary information. */
2511
2512 static void
2513 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2514 basic_block bb, rtx insn)
2515 {
2516 if (insn)
2517 {
2518 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2519 /* If there is a vector in the collection rec, add it to the
2520 insn. A null rec is a signal that the caller will handle the
2521 chain specially. */
2522 if (collection_rec->def_vec)
2523 {
2524 if (insn_rec->defs && *insn_rec->defs)
2525 free (insn_rec->defs);
2526 insn_rec->defs
2527 = df_install_refs (bb, collection_rec->def_vec,
2528 collection_rec->next_def,
2529 df->def_regs,
2530 &df->def_info, false);
2531 }
2532 if (collection_rec->use_vec)
2533 {
2534 if (insn_rec->uses && *insn_rec->uses)
2535 free (insn_rec->uses);
2536 insn_rec->uses
2537 = df_install_refs (bb, collection_rec->use_vec,
2538 collection_rec->next_use,
2539 df->use_regs,
2540 &df->use_info, false);
2541 }
2542 if (collection_rec->eq_use_vec)
2543 {
2544 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2545 free (insn_rec->eq_uses);
2546 insn_rec->eq_uses
2547 = df_install_refs (bb, collection_rec->eq_use_vec,
2548 collection_rec->next_eq_use,
2549 df->eq_use_regs,
2550 &df->use_info, true);
2551 }
2552 if (collection_rec->mw_vec)
2553 {
2554 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2555 free (insn_rec->mw_hardregs);
2556 insn_rec->mw_hardregs
2557 = df_install_mws (collection_rec->mw_vec,
2558 collection_rec->next_mw);
2559 }
2560 }
2561 else
2562 {
2563 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2564
2565 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2566 free (bb_info->artificial_defs);
2567 bb_info->artificial_defs
2568 = df_install_refs (bb, collection_rec->def_vec,
2569 collection_rec->next_def,
2570 df->def_regs,
2571 &df->def_info, false);
2572 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2573 free (bb_info->artificial_uses);
2574 bb_info->artificial_uses
2575 = df_install_refs (bb, collection_rec->use_vec,
2576 collection_rec->next_use,
2577 df->use_regs,
2578 &df->use_info, false);
2579 }
2580 }
2581
2582
2583 /* Allocate a ref and initialize its fields.
2584
2585 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2586 DF_REF_ZERO_EXTRACT. WIDTH and OFFSET are used to access the fields
2587 if they were constants. Otherwise they should be -1 if those flags
2588 were set. */
2589
2590 static struct df_ref *
2591 df_ref_create_structure (struct df_collection_rec *collection_rec,
2592 rtx reg, rtx *loc,
2593 basic_block bb, rtx insn,
2594 enum df_ref_type ref_type,
2595 enum df_ref_flags ref_flags,
2596 int width, int offset)
2597 {
2598 struct df_ref *this_ref;
2599 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2600 struct df_scan_problem_data *problem_data
2601 = (struct df_scan_problem_data *) df_scan->problem_data;
2602
2603 if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2604 {
2605 this_ref = pool_alloc (problem_data->ref_extract_pool);
2606 DF_REF_WIDTH (this_ref) = width;
2607 DF_REF_OFFSET (this_ref) = offset;
2608 }
2609 else
2610 this_ref = pool_alloc (problem_data->ref_pool);
2611 DF_REF_ID (this_ref) = -1;
2612 DF_REF_REG (this_ref) = reg;
2613 DF_REF_REGNO (this_ref) = regno;
2614 DF_REF_LOC (this_ref) = loc;
2615 DF_REF_INSN (this_ref) = insn;
2616 DF_REF_CHAIN (this_ref) = NULL;
2617 DF_REF_TYPE (this_ref) = ref_type;
2618 DF_REF_FLAGS (this_ref) = ref_flags;
2619 DF_REF_BB (this_ref) = bb;
2620 DF_REF_NEXT_REG (this_ref) = NULL;
2621 DF_REF_PREV_REG (this_ref) = NULL;
2622 DF_REF_ORDER (this_ref) = df->ref_order++;
2623
2624 /* We need to clear this bit because fwprop, and in the future
2625 possibly other optimizations sometimes create new refs using ond
2626 refs as the model. */
2627 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2628
2629 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2630 if ((regno < FIRST_PSEUDO_REGISTER)
2631 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2632 {
2633 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2634 {
2635 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2636 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2637 }
2638 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2639 && (regno == FRAME_POINTER_REGNUM
2640 || regno == ARG_POINTER_REGNUM)))
2641 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2642 }
2643
2644 if (collection_rec)
2645 {
2646 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2647 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2648 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2649 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2650 else
2651 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2652 }
2653
2654 return this_ref;
2655 }
2656
2657
2658 /* Create new references of type DF_REF_TYPE for each part of register REG
2659 at address LOC within INSN of BB.
2660
2661 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2662 DF_REF_ZERO_EXTRACT. WIDTH and OFFSET are used to access the fields
2663 if they were constants. Otherwise they should be -1 if those flags
2664 were set. */
2665
2666
2667 static void
2668 df_ref_record (struct df_collection_rec *collection_rec,
2669 rtx reg, rtx *loc,
2670 basic_block bb, rtx insn,
2671 enum df_ref_type ref_type,
2672 enum df_ref_flags ref_flags,
2673 int width, int offset)
2674 {
2675 unsigned int regno;
2676
2677 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2678
2679 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2680 if (regno < FIRST_PSEUDO_REGISTER)
2681 {
2682 struct df_mw_hardreg *hardreg = NULL;
2683 struct df_scan_problem_data *problem_data
2684 = (struct df_scan_problem_data *) df_scan->problem_data;
2685 unsigned int i;
2686 unsigned int endregno;
2687 struct df_ref *ref;
2688
2689 if (GET_CODE (reg) == SUBREG)
2690 {
2691 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2692 SUBREG_BYTE (reg), GET_MODE (reg));
2693 endregno = regno + subreg_nregs (reg);
2694 }
2695 else
2696 endregno = END_HARD_REGNO (reg);
2697
2698 /* If this is a multiword hardreg, we create some extra
2699 datastructures that will enable us to easily build REG_DEAD
2700 and REG_UNUSED notes. */
2701 if ((endregno != regno + 1) && insn)
2702 {
2703 /* Sets to a subreg of a multiword register are partial.
2704 Sets to a non-subreg of a multiword register are not. */
2705 if (GET_CODE (reg) == SUBREG)
2706 ref_flags |= DF_REF_PARTIAL;
2707 ref_flags |= DF_REF_MW_HARDREG;
2708
2709 hardreg = pool_alloc (problem_data->mw_reg_pool);
2710 hardreg->type = ref_type;
2711 hardreg->flags = ref_flags;
2712 hardreg->mw_reg = reg;
2713 hardreg->start_regno = regno;
2714 hardreg->end_regno = endregno - 1;
2715 hardreg->mw_order = df->ref_order++;
2716 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2717 }
2718
2719 for (i = regno; i < endregno; i++)
2720 {
2721 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2722 bb, insn, ref_type, ref_flags, width, offset);
2723
2724 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2725 }
2726 }
2727 else
2728 {
2729 struct df_ref *ref;
2730 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2731 ref_type, ref_flags, width, offset);
2732 }
2733 }
2734
2735
2736 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2737 covered by the outer mode is smaller than that covered by the inner mode,
2738 is a read-modify-write operation.
2739 This function returns true iff the SUBREG X is such a SUBREG. */
2740
2741 bool
2742 df_read_modify_subreg_p (rtx x)
2743 {
2744 unsigned int isize, osize;
2745 if (GET_CODE (x) != SUBREG)
2746 return false;
2747 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2748 osize = GET_MODE_SIZE (GET_MODE (x));
2749 return isize > osize
2750 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2751 }
2752
2753
2754 /* Process all the registers defined in the rtx, X.
2755 Autoincrement/decrement definitions will be picked up by
2756 df_uses_record. */
2757
2758 static void
2759 df_def_record_1 (struct df_collection_rec *collection_rec,
2760 rtx x, basic_block bb, rtx insn,
2761 enum df_ref_flags flags)
2762 {
2763 rtx *loc;
2764 rtx dst;
2765 int offset = -1;
2766 int width = -1;
2767
2768 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2769 construct. */
2770 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2771 loc = &XEXP (x, 0);
2772 else
2773 loc = &SET_DEST (x);
2774 dst = *loc;
2775
2776 /* It is legal to have a set destination be a parallel. */
2777 if (GET_CODE (dst) == PARALLEL)
2778 {
2779 int i;
2780
2781 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2782 {
2783 rtx temp = XVECEXP (dst, 0, i);
2784 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2785 || GET_CODE (temp) == SET)
2786 df_def_record_1 (collection_rec,
2787 temp, bb, insn,
2788 GET_CODE (temp) == CLOBBER
2789 ? flags | DF_REF_MUST_CLOBBER : flags);
2790 }
2791 return;
2792 }
2793
2794 if (GET_CODE (dst) == STRICT_LOW_PART)
2795 {
2796 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2797
2798 loc = &XEXP (dst, 0);
2799 dst = *loc;
2800 }
2801
2802 if (GET_CODE (dst) == ZERO_EXTRACT)
2803 {
2804 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2805
2806 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
2807 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
2808 {
2809 width = INTVAL (XEXP (dst, 1));
2810 offset = INTVAL (XEXP (dst, 2));
2811 }
2812
2813 loc = &XEXP (dst, 0);
2814 dst = *loc;
2815 }
2816
2817 /* At this point if we do not have a reg or a subreg, just return. */
2818 if (REG_P (dst))
2819 {
2820 df_ref_record (collection_rec,
2821 dst, loc, bb, insn, DF_REF_REG_DEF, flags, width, offset);
2822
2823 /* We want to keep sp alive everywhere - by making all
2824 writes to sp also use of sp. */
2825 if (REGNO (dst) == STACK_POINTER_REGNUM)
2826 df_ref_record (collection_rec,
2827 dst, NULL, bb, insn, DF_REF_REG_USE, flags, width, offset);
2828 }
2829 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2830 {
2831 if (df_read_modify_subreg_p (dst))
2832 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2833
2834 flags |= DF_REF_SUBREG;
2835
2836 df_ref_record (collection_rec,
2837 dst, loc, bb, insn, DF_REF_REG_DEF, flags, width, offset);
2838 }
2839 }
2840
2841
2842 /* Process all the registers defined in the pattern rtx, X. */
2843
2844 static void
2845 df_defs_record (struct df_collection_rec *collection_rec,
2846 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2847 {
2848 RTX_CODE code = GET_CODE (x);
2849
2850 if (code == SET || code == CLOBBER)
2851 {
2852 /* Mark the single def within the pattern. */
2853 enum df_ref_flags clobber_flags = flags;
2854 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2855 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2856 }
2857 else if (code == COND_EXEC)
2858 {
2859 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2860 bb, insn, DF_REF_CONDITIONAL);
2861 }
2862 else if (code == PARALLEL)
2863 {
2864 int i;
2865
2866 /* Mark the multiple defs within the pattern. */
2867 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2868 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2869 }
2870 }
2871
2872
2873 /* Process all the registers used in the rtx at address LOC.
2874
2875 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2876 DF_REF_ZERO_EXTRACT. WIDTH and LOWER are used to access the fields
2877 if they were constants. Otherwise they should be -1 if those flags
2878 were set. */
2879
2880 static void
2881 df_uses_record (struct df_collection_rec *collection_rec,
2882 rtx *loc, enum df_ref_type ref_type,
2883 basic_block bb, rtx insn, enum df_ref_flags flags,
2884 int width, int offset)
2885 {
2886 RTX_CODE code;
2887 rtx x;
2888
2889 retry:
2890 x = *loc;
2891 if (!x)
2892 return;
2893 code = GET_CODE (x);
2894 switch (code)
2895 {
2896 case LABEL_REF:
2897 case SYMBOL_REF:
2898 case CONST_INT:
2899 case CONST:
2900 case CONST_DOUBLE:
2901 case CONST_FIXED:
2902 case CONST_VECTOR:
2903 case PC:
2904 case CC0:
2905 case ADDR_VEC:
2906 case ADDR_DIFF_VEC:
2907 return;
2908
2909 case CLOBBER:
2910 /* If we are clobbering a MEM, mark any registers inside the address
2911 as being used. */
2912 if (MEM_P (XEXP (x, 0)))
2913 df_uses_record (collection_rec,
2914 &XEXP (XEXP (x, 0), 0),
2915 DF_REF_REG_MEM_STORE, bb, insn, flags, width, offset);
2916
2917 /* If we're clobbering a REG then we have a def so ignore. */
2918 return;
2919
2920 case MEM:
2921 df_uses_record (collection_rec,
2922 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2923 bb, insn, flags & DF_REF_IN_NOTE, width, offset);
2924 return;
2925
2926 case SUBREG:
2927 /* While we're here, optimize this case. */
2928 flags |= DF_REF_PARTIAL;
2929 /* In case the SUBREG is not of a REG, do not optimize. */
2930 if (!REG_P (SUBREG_REG (x)))
2931 {
2932 loc = &SUBREG_REG (x);
2933 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags, width, offset);
2934 return;
2935 }
2936 /* ... Fall through ... */
2937
2938 case REG:
2939 df_ref_record (collection_rec,
2940 x, loc, bb, insn, ref_type, flags, width, offset);
2941 return;
2942
2943 case SIGN_EXTRACT:
2944 case ZERO_EXTRACT:
2945 {
2946 /* If the parameters to the zero or sign extract are
2947 constants, strip them off and recurse, otherwise there is
2948 no information that we can gain from this operation. */
2949 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2950 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2951 {
2952 width = INTVAL (XEXP (x, 1));
2953 offset = INTVAL (XEXP (x, 2));
2954
2955 if (code == ZERO_EXTRACT)
2956 flags |= DF_REF_ZERO_EXTRACT;
2957 else
2958 flags |= DF_REF_SIGN_EXTRACT;
2959
2960 df_uses_record (collection_rec,
2961 &XEXP (x, 0), ref_type, bb, insn, flags, width, offset);
2962 return;
2963 }
2964 }
2965 break;
2966
2967 case SET:
2968 {
2969 rtx dst = SET_DEST (x);
2970 gcc_assert (!(flags & DF_REF_IN_NOTE));
2971 df_uses_record (collection_rec,
2972 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags, width, offset);
2973
2974 switch (GET_CODE (dst))
2975 {
2976 case SUBREG:
2977 if (df_read_modify_subreg_p (dst))
2978 {
2979 df_uses_record (collection_rec, &SUBREG_REG (dst),
2980 DF_REF_REG_USE, bb, insn,
2981 flags | DF_REF_READ_WRITE | DF_REF_SUBREG, width, offset);
2982 break;
2983 }
2984 /* Fall through. */
2985 case REG:
2986 case PARALLEL:
2987 case SCRATCH:
2988 case PC:
2989 case CC0:
2990 break;
2991 case MEM:
2992 df_uses_record (collection_rec, &XEXP (dst, 0),
2993 DF_REF_REG_MEM_STORE, bb, insn, flags, width, offset);
2994 break;
2995 case STRICT_LOW_PART:
2996 {
2997 rtx *temp = &XEXP (dst, 0);
2998 /* A strict_low_part uses the whole REG and not just the
2999 SUBREG. */
3000 dst = XEXP (dst, 0);
3001 df_uses_record (collection_rec,
3002 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3003 DF_REF_REG_USE, bb, insn,
3004 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART, width, offset);
3005 }
3006 break;
3007 case ZERO_EXTRACT:
3008 {
3009 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
3010 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
3011 {
3012 width = INTVAL (XEXP (dst, 1));
3013 offset = INTVAL (XEXP (dst, 2));
3014 }
3015 else
3016 {
3017 df_uses_record (collection_rec, &XEXP (dst, 1),
3018 DF_REF_REG_USE, bb, insn, flags, width, offset);
3019 df_uses_record (collection_rec, &XEXP (dst, 2),
3020 DF_REF_REG_USE, bb, insn, flags, width, offset);
3021 }
3022
3023 df_uses_record (collection_rec, &XEXP (dst, 0),
3024 DF_REF_REG_USE, bb, insn,
3025 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT, width, offset);
3026 }
3027 break;
3028
3029 default:
3030 gcc_unreachable ();
3031 }
3032 return;
3033 }
3034
3035 case RETURN:
3036 break;
3037
3038 case ASM_OPERANDS:
3039 case UNSPEC_VOLATILE:
3040 case TRAP_IF:
3041 case ASM_INPUT:
3042 {
3043 /* Traditional and volatile asm instructions must be
3044 considered to use and clobber all hard registers, all
3045 pseudo-registers and all of memory. So must TRAP_IF and
3046 UNSPEC_VOLATILE operations.
3047
3048 Consider for instance a volatile asm that changes the fpu
3049 rounding mode. An insn should not be moved across this
3050 even if it only uses pseudo-regs because it might give an
3051 incorrectly rounded result.
3052
3053 However, flow.c's liveness computation did *not* do this,
3054 giving the reasoning as " ?!? Unfortunately, marking all
3055 hard registers as live causes massive problems for the
3056 register allocator and marking all pseudos as live creates
3057 mountains of uninitialized variable warnings."
3058
3059 In order to maintain the status quo with regard to liveness
3060 and uses, we do what flow.c did and just mark any regs we
3061 can find in ASM_OPERANDS as used. In global asm insns are
3062 scanned and regs_asm_clobbered is filled out.
3063
3064 For all ASM_OPERANDS, we must traverse the vector of input
3065 operands. We can not just fall through here since then we
3066 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3067 which do not indicate traditional asms unlike their normal
3068 usage. */
3069 if (code == ASM_OPERANDS)
3070 {
3071 int j;
3072
3073 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3074 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3075 DF_REF_REG_USE, bb, insn, flags, width, offset);
3076 return;
3077 }
3078 break;
3079 }
3080
3081 case PRE_DEC:
3082 case POST_DEC:
3083 case PRE_INC:
3084 case POST_INC:
3085 case PRE_MODIFY:
3086 case POST_MODIFY:
3087 /* Catch the def of the register being modified. */
3088 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
3089 DF_REF_REG_DEF,
3090 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY, width, offset);
3091
3092 /* ... Fall through to handle uses ... */
3093
3094 default:
3095 break;
3096 }
3097
3098 /* Recursively scan the operands of this expression. */
3099 {
3100 const char *fmt = GET_RTX_FORMAT (code);
3101 int i;
3102
3103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3104 {
3105 if (fmt[i] == 'e')
3106 {
3107 /* Tail recursive case: save a function call level. */
3108 if (i == 0)
3109 {
3110 loc = &XEXP (x, 0);
3111 goto retry;
3112 }
3113 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3114 bb, insn, flags, width, offset);
3115 }
3116 else if (fmt[i] == 'E')
3117 {
3118 int j;
3119 for (j = 0; j < XVECLEN (x, i); j++)
3120 df_uses_record (collection_rec,
3121 &XVECEXP (x, i, j), ref_type,
3122 bb, insn, flags, width, offset);
3123 }
3124 }
3125 }
3126
3127 return;
3128 }
3129
3130
3131 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3132
3133 static void
3134 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3135 {
3136 unsigned int i;
3137 for (i = 0; i < collection_rec->next_def; i++)
3138 {
3139 struct df_ref *ref = collection_rec->def_vec[i];
3140 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3141 {
3142 int width = -1;
3143 int offset = -1;
3144 struct df_ref *use;
3145
3146 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3147 {
3148 width = DF_REF_WIDTH (ref);
3149 offset = DF_REF_OFFSET (ref);
3150 }
3151
3152 use = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3153 DF_REF_LOC (ref), DF_REF_BB (ref),
3154 DF_REF_INSN (ref), DF_REF_REG_USE,
3155 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3156 width, offset);
3157 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3158 }
3159 }
3160 }
3161
3162
3163 /* Get call's extra defs and uses. */
3164
3165 static void
3166 df_get_call_refs (struct df_collection_rec * collection_rec,
3167 basic_block bb,
3168 rtx insn,
3169 enum df_ref_flags flags)
3170 {
3171 rtx note;
3172 bitmap_iterator bi;
3173 unsigned int ui;
3174 bool is_sibling_call;
3175 unsigned int i;
3176 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3177
3178 /* Do not generate clobbers for registers that are the result of the
3179 call. This causes ordering problems in the chain building code
3180 depending on which def is seen first. */
3181 for (i=0; i<collection_rec->next_def; i++)
3182 {
3183 struct df_ref *def = collection_rec->def_vec[i];
3184 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3185 }
3186
3187 /* Record the registers used to pass arguments, and explicitly
3188 noted as clobbered. */
3189 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3190 note = XEXP (note, 1))
3191 {
3192 if (GET_CODE (XEXP (note, 0)) == USE)
3193 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3194 DF_REF_REG_USE, bb, insn, flags, -1, -1);
3195 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3196 {
3197 if (REG_P (XEXP (XEXP (note, 0), 0)))
3198 {
3199 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3200 if (!bitmap_bit_p (defs_generated, regno))
3201 df_defs_record (collection_rec, XEXP (note, 0), bb,
3202 insn, flags);
3203 }
3204 else
3205 df_uses_record (collection_rec, &XEXP (note, 0),
3206 DF_REF_REG_USE, bb, insn, flags, -1, -1);
3207 }
3208 }
3209
3210 /* The stack ptr is used (honorarily) by a CALL insn. */
3211 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3212 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags, -1, -1);
3213
3214 /* Calls may also reference any of the global registers,
3215 so they are recorded as used. */
3216 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3217 if (global_regs[i])
3218 {
3219 df_ref_record (collection_rec, regno_reg_rtx[i],
3220 NULL, bb, insn, DF_REF_REG_USE, flags, -1, -1);
3221 df_ref_record (collection_rec, regno_reg_rtx[i],
3222 NULL, bb, insn, DF_REF_REG_DEF, flags, -1, -1);
3223 }
3224
3225 is_sibling_call = SIBLING_CALL_P (insn);
3226 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3227 {
3228 if (!global_regs[ui]
3229 && (!bitmap_bit_p (defs_generated, ui))
3230 && (!is_sibling_call
3231 || !bitmap_bit_p (df->exit_block_uses, ui)
3232 || refers_to_regno_p (ui, ui+1,
3233 current_function_return_rtx, NULL)))
3234 df_ref_record (collection_rec, regno_reg_rtx[ui],
3235 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags, -1, -1);
3236 }
3237
3238 BITMAP_FREE (defs_generated);
3239 return;
3240 }
3241
3242 /* Collect all refs in the INSN. This function is free of any
3243 side-effect - it will create and return a lists of df_ref's in the
3244 COLLECTION_REC without putting those refs into existing ref chains
3245 and reg chains. */
3246
3247 static void
3248 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3249 basic_block bb, rtx insn)
3250 {
3251 rtx note;
3252 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3253
3254 /* Clear out the collection record. */
3255 collection_rec->next_def = 0;
3256 collection_rec->next_use = 0;
3257 collection_rec->next_eq_use = 0;
3258 collection_rec->next_mw = 0;
3259
3260 /* Record register defs. */
3261 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3262
3263 /* Process REG_EQUIV/REG_EQUAL notes */
3264 for (note = REG_NOTES (insn); note;
3265 note = XEXP (note, 1))
3266 {
3267 switch (REG_NOTE_KIND (note))
3268 {
3269 case REG_EQUIV:
3270 case REG_EQUAL:
3271 df_uses_record (collection_rec,
3272 &XEXP (note, 0), DF_REF_REG_USE,
3273 bb, insn, DF_REF_IN_NOTE, -1, -1);
3274 break;
3275 case REG_NON_LOCAL_GOTO:
3276 /* The frame ptr is used by a non-local goto. */
3277 df_ref_record (collection_rec,
3278 regno_reg_rtx[FRAME_POINTER_REGNUM],
3279 NULL,
3280 bb, insn,
3281 DF_REF_REG_USE, 0, -1, -1);
3282 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3283 df_ref_record (collection_rec,
3284 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3285 NULL,
3286 bb, insn,
3287 DF_REF_REG_USE, 0, -1, -1);
3288 #endif
3289 break;
3290 default:
3291 break;
3292 }
3293 }
3294
3295 if (CALL_P (insn))
3296 df_get_call_refs (collection_rec, bb, insn,
3297 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3298
3299 /* Record the register uses. */
3300 df_uses_record (collection_rec,
3301 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0, -1, -1);
3302
3303 /* DF_REF_CONDITIONAL needs corresponding USES. */
3304 if (is_cond_exec)
3305 df_get_conditional_uses (collection_rec);
3306
3307 df_canonize_collection_rec (collection_rec);
3308 }
3309
3310 /* Recompute the luids for the insns in BB. */
3311
3312 void
3313 df_recompute_luids (basic_block bb)
3314 {
3315 rtx insn;
3316 int luid = 0;
3317
3318 df_grow_insn_info ();
3319
3320 /* Scan the block an insn at a time from beginning to end. */
3321 FOR_BB_INSNS (bb, insn)
3322 {
3323 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3324 /* Inserting labels does not always trigger the incremental
3325 rescanning. */
3326 if (!insn_info)
3327 {
3328 gcc_assert (!INSN_P (insn));
3329 df_insn_create_insn_record (insn);
3330 }
3331
3332 DF_INSN_LUID (insn) = luid;
3333 if (INSN_P (insn))
3334 luid++;
3335 }
3336 }
3337
3338
3339 /* Returns true if the function entry needs to
3340 define the static chain register. */
3341
3342 static bool
3343 df_need_static_chain_reg (struct function *fun)
3344 {
3345 tree fun_context = decl_function_context (fun->decl);
3346 return fun_context
3347 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3348 }
3349
3350
3351 /* Collect all artificial refs at the block level for BB and add them
3352 to COLLECTION_REC. */
3353
3354 static void
3355 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3356 {
3357 collection_rec->next_def = 0;
3358 collection_rec->next_use = 0;
3359 collection_rec->next_eq_use = 0;
3360 collection_rec->next_mw = 0;
3361
3362 if (bb->index == ENTRY_BLOCK)
3363 {
3364 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3365 return;
3366 }
3367 else if (bb->index == EXIT_BLOCK)
3368 {
3369 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3370 return;
3371 }
3372
3373 #ifdef EH_RETURN_DATA_REGNO
3374 if (bb_has_eh_pred (bb))
3375 {
3376 unsigned int i;
3377 /* Mark the registers that will contain data for the handler. */
3378 for (i = 0; ; ++i)
3379 {
3380 unsigned regno = EH_RETURN_DATA_REGNO (i);
3381 if (regno == INVALID_REGNUM)
3382 break;
3383 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3384 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1);
3385 }
3386 }
3387 #endif
3388
3389
3390 #ifdef EH_USES
3391 if (bb_has_eh_pred (bb))
3392 {
3393 unsigned int i;
3394 /* This code is putting in an artificial ref for the use at the
3395 TOP of the block that receives the exception. It is too
3396 cumbersome to actually put the ref on the edge. We could
3397 either model this at the top of the receiver block or the
3398 bottom of the sender block.
3399
3400 The bottom of the sender block is problematic because not all
3401 out-edges of the a block are eh-edges. However, it is true
3402 that all edges into a block are either eh-edges or none of
3403 them are eh-edges. Thus, we can model this at the top of the
3404 eh-receiver for all of the edges at once. */
3405 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3406 if (EH_USES (i))
3407 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3408 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1);
3409 }
3410 #endif
3411
3412 /* Add the hard_frame_pointer if this block is the target of a
3413 non-local goto. */
3414 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3415 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3416 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1);
3417
3418 /* Add the artificial uses. */
3419 if (bb->index >= NUM_FIXED_BLOCKS)
3420 {
3421 bitmap_iterator bi;
3422 unsigned int regno;
3423 bitmap au = bb_has_eh_pred (bb)
3424 ? df->eh_block_artificial_uses
3425 : df->regular_block_artificial_uses;
3426
3427 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3428 {
3429 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3430 bb, NULL, DF_REF_REG_USE, 0, -1, -1);
3431 }
3432 }
3433
3434 df_canonize_collection_rec (collection_rec);
3435 }
3436
3437
3438 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3439
3440 void
3441 df_bb_refs_record (int bb_index, bool scan_insns)
3442 {
3443 basic_block bb = BASIC_BLOCK (bb_index);
3444 rtx insn;
3445 int luid = 0;
3446 struct df_scan_bb_info *bb_info;
3447 struct df_collection_rec collection_rec;
3448 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3449 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3450 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3451 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3452
3453 if (!df)
3454 return;
3455
3456 bb_info = df_scan_get_bb_info (bb_index);
3457
3458 /* Need to make sure that there is a record in the basic block info. */
3459 if (!bb_info)
3460 {
3461 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3462 df_scan_set_bb_info (bb_index, bb_info);
3463 bb_info->artificial_defs = NULL;
3464 bb_info->artificial_uses = NULL;
3465 }
3466
3467 if (scan_insns)
3468 /* Scan the block an insn at a time from beginning to end. */
3469 FOR_BB_INSNS (bb, insn)
3470 {
3471 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3472 gcc_assert (!insn_info);
3473
3474 df_insn_create_insn_record (insn);
3475 if (INSN_P (insn))
3476 {
3477 /* Record refs within INSN. */
3478 DF_INSN_LUID (insn) = luid++;
3479 df_insn_refs_collect (&collection_rec, bb, insn);
3480 df_refs_add_to_chains (&collection_rec, bb, insn);
3481 }
3482 DF_INSN_LUID (insn) = luid;
3483 }
3484
3485 /* Other block level artificial refs */
3486 df_bb_refs_collect (&collection_rec, bb);
3487 df_refs_add_to_chains (&collection_rec, bb, NULL);
3488
3489 /* Now that the block has been processed, set the block as dirty so
3490 LR and LIVE will get it processed. */
3491 df_set_bb_dirty (bb);
3492 }
3493
3494
3495 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3496 block. */
3497
3498 static void
3499 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3500 {
3501 bitmap_clear (regular_block_artificial_uses);
3502
3503 if (reload_completed)
3504 {
3505 if (frame_pointer_needed)
3506 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3507 }
3508 else
3509 /* Before reload, there are a few registers that must be forced
3510 live everywhere -- which might not already be the case for
3511 blocks within infinite loops. */
3512 {
3513 /* Any reference to any pseudo before reload is a potential
3514 reference of the frame pointer. */
3515 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3516
3517 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3518 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3519 #endif
3520
3521 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3522 /* Pseudos with argument area equivalences may require
3523 reloading via the argument pointer. */
3524 if (fixed_regs[ARG_POINTER_REGNUM])
3525 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3526 #endif
3527
3528 /* Any constant, or pseudo with constant equivalences, may
3529 require reloading from memory using the pic register. */
3530 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3531 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3532 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3533 }
3534 /* The all-important stack pointer must always be live. */
3535 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3536 }
3537
3538
3539 /* Get the artificial use set for an eh block. */
3540
3541 static void
3542 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3543 {
3544 bitmap_clear (eh_block_artificial_uses);
3545
3546 /* The following code (down thru the arg_pointer setting APPEARS
3547 to be necessary because there is nothing that actually
3548 describes what the exception handling code may actually need
3549 to keep alive. */
3550 if (reload_completed)
3551 {
3552 if (frame_pointer_needed)
3553 {
3554 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3555 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3556 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3557 #endif
3558 }
3559 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3560 if (fixed_regs[ARG_POINTER_REGNUM])
3561 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3562 #endif
3563 }
3564 }
3565
3566
3567 \f
3568 /*----------------------------------------------------------------------------
3569 Specialized hard register scanning functions.
3570 ----------------------------------------------------------------------------*/
3571
3572
3573 /* Mark a register in SET. Hard registers in large modes get all
3574 of their component registers set as well. */
3575
3576 static void
3577 df_mark_reg (rtx reg, void *vset)
3578 {
3579 bitmap set = (bitmap) vset;
3580 int regno = REGNO (reg);
3581
3582 gcc_assert (GET_MODE (reg) != BLKmode);
3583
3584 bitmap_set_bit (set, regno);
3585 if (regno < FIRST_PSEUDO_REGISTER)
3586 {
3587 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3588 while (--n > 0)
3589 bitmap_set_bit (set, regno + n);
3590 }
3591 }
3592
3593
3594 /* Set the bit for regs that are considered being defined at the entry. */
3595
3596 static void
3597 df_get_entry_block_def_set (bitmap entry_block_defs)
3598 {
3599 rtx r;
3600 int i;
3601
3602 bitmap_clear (entry_block_defs);
3603
3604 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3605 {
3606 if (FUNCTION_ARG_REGNO_P (i))
3607 #ifdef INCOMING_REGNO
3608 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3609 #else
3610 bitmap_set_bit (entry_block_defs, i);
3611 #endif
3612 }
3613
3614 /* Once the prologue has been generated, all of these registers
3615 should just show up in the first regular block. */
3616 if (HAVE_prologue && epilogue_completed)
3617 {
3618 /* Defs for the callee saved registers are inserted so that the
3619 pushes have some defining location. */
3620 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3621 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3622 bitmap_set_bit (entry_block_defs, i);
3623 }
3624 else
3625 {
3626 /* The always important stack pointer. */
3627 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3628
3629 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3630 only STATIC_CHAIN_REGNUM is defined. If they are different,
3631 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3632 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3633 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3634 #else
3635 #ifdef STATIC_CHAIN_REGNUM
3636 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3637 #endif
3638 #endif
3639 }
3640
3641 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3642 if (r && REG_P (r))
3643 bitmap_set_bit (entry_block_defs, REGNO (r));
3644
3645 if ((!reload_completed) || frame_pointer_needed)
3646 {
3647 /* Any reference to any pseudo before reload is a potential
3648 reference of the frame pointer. */
3649 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3650 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3651 /* If they are different, also mark the hard frame pointer as live. */
3652 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3653 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3654 #endif
3655 }
3656
3657 /* These registers are live everywhere. */
3658 if (!reload_completed)
3659 {
3660 #ifdef EH_USES
3661 /* The ia-64, the only machine that uses this, does not define these
3662 until after reload. */
3663 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3664 if (EH_USES (i))
3665 {
3666 bitmap_set_bit (entry_block_defs, i);
3667 }
3668 #endif
3669
3670 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3671 /* Pseudos with argument area equivalences may require
3672 reloading via the argument pointer. */
3673 if (fixed_regs[ARG_POINTER_REGNUM])
3674 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3675 #endif
3676
3677 #ifdef PIC_OFFSET_TABLE_REGNUM
3678 /* Any constant, or pseudo with constant equivalences, may
3679 require reloading from memory using the pic register. */
3680 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3681 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3682 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3683 #endif
3684 }
3685
3686 #ifdef INCOMING_RETURN_ADDR_RTX
3687 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3688 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3689 #endif
3690
3691 targetm.live_on_entry (entry_block_defs);
3692
3693 /* If the function has an incoming STATIC_CHAIN,
3694 it has to show up in the entry def set. */
3695 if (df_need_static_chain_reg (cfun))
3696 {
3697 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3698 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3699 #else
3700 #ifdef STATIC_CHAIN_REGNUM
3701 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3702 #endif
3703 #endif
3704 }
3705 }
3706
3707
3708 /* Return the (conservative) set of hard registers that are defined on
3709 entry to the function.
3710 It uses df->entry_block_defs to determine which register
3711 reference to include. */
3712
3713 static void
3714 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3715 bitmap entry_block_defs)
3716 {
3717 unsigned int i;
3718 bitmap_iterator bi;
3719
3720 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3721 {
3722 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3723 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1);
3724 }
3725
3726 df_canonize_collection_rec (collection_rec);
3727 }
3728
3729
3730 /* Record the (conservative) set of hard registers that are defined on
3731 entry to the function. */
3732
3733 static void
3734 df_record_entry_block_defs (bitmap entry_block_defs)
3735 {
3736 struct df_collection_rec collection_rec;
3737 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3738 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3739
3740 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3741
3742 /* Process bb_refs chain */
3743 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3744 }
3745
3746
3747 /* Update the defs in the entry block. */
3748
3749 void
3750 df_update_entry_block_defs (void)
3751 {
3752 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3753 bool changed = false;
3754
3755 df_get_entry_block_def_set (refs);
3756 if (df->entry_block_defs)
3757 {
3758 if (!bitmap_equal_p (df->entry_block_defs, refs))
3759 {
3760 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3761 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3762 df_ref_chain_delete (bb_info->artificial_defs);
3763 bb_info->artificial_defs = NULL;
3764 changed = true;
3765 }
3766 }
3767 else
3768 {
3769 struct df_scan_problem_data *problem_data
3770 = (struct df_scan_problem_data *) df_scan->problem_data;
3771 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3772 changed = true;
3773 }
3774
3775 if (changed)
3776 {
3777 df_record_entry_block_defs (refs);
3778 bitmap_copy (df->entry_block_defs, refs);
3779 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3780 }
3781 BITMAP_FREE (refs);
3782 }
3783
3784
3785 /* Set the bit for regs that are considered being used at the exit. */
3786
3787 static void
3788 df_get_exit_block_use_set (bitmap exit_block_uses)
3789 {
3790 unsigned int i;
3791
3792 bitmap_clear (exit_block_uses);
3793
3794 /* Stack pointer is always live at the exit. */
3795 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3796
3797 /* Mark the frame pointer if needed at the end of the function.
3798 If we end up eliminating it, it will be removed from the live
3799 list of each basic block by reload. */
3800
3801 if ((!reload_completed) || frame_pointer_needed)
3802 {
3803 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3804 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3805 /* If they are different, also mark the hard frame pointer as live. */
3806 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3807 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3808 #endif
3809 }
3810
3811 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3812 /* Many architectures have a GP register even without flag_pic.
3813 Assume the pic register is not in use, or will be handled by
3814 other means, if it is not fixed. */
3815 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3816 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3817 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3818 #endif
3819
3820 /* Mark all global registers, and all registers used by the
3821 epilogue as being live at the end of the function since they
3822 may be referenced by our caller. */
3823 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3824 if (global_regs[i] || EPILOGUE_USES (i))
3825 bitmap_set_bit (exit_block_uses, i);
3826
3827 if (HAVE_epilogue && epilogue_completed)
3828 {
3829 /* Mark all call-saved registers that we actually used. */
3830 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3831 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3832 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3833 bitmap_set_bit (exit_block_uses, i);
3834 }
3835
3836 #ifdef EH_RETURN_DATA_REGNO
3837 /* Mark the registers that will contain data for the handler. */
3838 if (reload_completed && current_function_calls_eh_return)
3839 for (i = 0; ; ++i)
3840 {
3841 unsigned regno = EH_RETURN_DATA_REGNO (i);
3842 if (regno == INVALID_REGNUM)
3843 break;
3844 bitmap_set_bit (exit_block_uses, regno);
3845 }
3846 #endif
3847
3848 #ifdef EH_RETURN_STACKADJ_RTX
3849 if ((!HAVE_epilogue || ! epilogue_completed)
3850 && current_function_calls_eh_return)
3851 {
3852 rtx tmp = EH_RETURN_STACKADJ_RTX;
3853 if (tmp && REG_P (tmp))
3854 df_mark_reg (tmp, exit_block_uses);
3855 }
3856 #endif
3857
3858 #ifdef EH_RETURN_HANDLER_RTX
3859 if ((!HAVE_epilogue || ! epilogue_completed)
3860 && current_function_calls_eh_return)
3861 {
3862 rtx tmp = EH_RETURN_HANDLER_RTX;
3863 if (tmp && REG_P (tmp))
3864 df_mark_reg (tmp, exit_block_uses);
3865 }
3866 #endif
3867
3868 /* Mark function return value. */
3869 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3870 }
3871
3872
3873 /* Return the refs of hard registers that are used in the exit block.
3874 It uses df->exit_block_uses to determine register to include. */
3875
3876 static void
3877 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3878 {
3879 unsigned int i;
3880 bitmap_iterator bi;
3881
3882 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3883 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3884 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1);
3885
3886 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3887 /* It is deliberate that this is not put in the exit block uses but
3888 I do not know why. */
3889 if (reload_completed
3890 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3891 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3892 && fixed_regs[ARG_POINTER_REGNUM])
3893 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3894 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1);
3895 #endif
3896
3897 df_canonize_collection_rec (collection_rec);
3898 }
3899
3900
3901 /* Record the set of hard registers that are used in the exit block.
3902 It uses df->exit_block_uses to determine which bit to include. */
3903
3904 static void
3905 df_record_exit_block_uses (bitmap exit_block_uses)
3906 {
3907 struct df_collection_rec collection_rec;
3908 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3909 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3910
3911 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3912
3913 /* Process bb_refs chain */
3914 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3915 }
3916
3917
3918 /* Update the uses in the exit block. */
3919
3920 void
3921 df_update_exit_block_uses (void)
3922 {
3923 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3924 bool changed = false;
3925
3926 df_get_exit_block_use_set (refs);
3927 if (df->exit_block_uses)
3928 {
3929 if (!bitmap_equal_p (df->exit_block_uses, refs))
3930 {
3931 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3932 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3933 df_ref_chain_delete (bb_info->artificial_uses);
3934 bb_info->artificial_uses = NULL;
3935 changed = true;
3936 }
3937 }
3938 else
3939 {
3940 struct df_scan_problem_data *problem_data
3941 = (struct df_scan_problem_data *) df_scan->problem_data;
3942 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3943 changed = true;
3944 }
3945
3946 if (changed)
3947 {
3948 df_record_exit_block_uses (refs);
3949 bitmap_copy (df->exit_block_uses, refs);
3950 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3951 }
3952 BITMAP_FREE (refs);
3953 }
3954
3955 static bool initialized = false;
3956
3957
3958 /* Initialize some platform specific structures. */
3959
3960 void
3961 df_hard_reg_init (void)
3962 {
3963 int i;
3964 #ifdef ELIMINABLE_REGS
3965 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3966 #endif
3967 if (initialized)
3968 return;
3969
3970 bitmap_obstack_initialize (&persistent_obstack);
3971
3972 /* Record which registers will be eliminated. We use this in
3973 mark_used_regs. */
3974 CLEAR_HARD_REG_SET (elim_reg_set);
3975
3976 #ifdef ELIMINABLE_REGS
3977 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3978 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3979 #else
3980 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3981 #endif
3982
3983 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3984
3985 /* Inconveniently, this is only readily available in hard reg set
3986 form. */
3987 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3988 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3989 bitmap_set_bit (df_invalidated_by_call, i);
3990
3991 initialized = true;
3992 }
3993
3994
3995 /* Recompute the parts of scanning that are based on regs_ever_live
3996 because something changed in that array. */
3997
3998 void
3999 df_update_entry_exit_and_calls (void)
4000 {
4001 basic_block bb;
4002
4003 df_update_entry_block_defs ();
4004 df_update_exit_block_uses ();
4005
4006 /* The call insns need to be rescanned because there may be changes
4007 in the set of registers clobbered across the call. */
4008 FOR_EACH_BB (bb)
4009 {
4010 rtx insn;
4011 FOR_BB_INSNS (bb, insn)
4012 {
4013 if (INSN_P (insn) && CALL_P (insn))
4014 df_insn_rescan (insn);
4015 }
4016 }
4017 }
4018
4019
4020 /* Return true if hard REG is actually used in the some instruction.
4021 There are a fair number of conditions that affect the setting of
4022 this array. See the comment in df.h for df->hard_regs_live_count
4023 for the conditions that this array is set. */
4024
4025 bool
4026 df_hard_reg_used_p (unsigned int reg)
4027 {
4028 gcc_assert (df);
4029 return df->hard_regs_live_count[reg] != 0;
4030 }
4031
4032
4033 /* A count of the number of times REG is actually used in the some
4034 instruction. There are a fair number of conditions that affect the
4035 setting of this array. See the comment in df.h for
4036 df->hard_regs_live_count for the conditions that this array is
4037 set. */
4038
4039
4040 unsigned int
4041 df_hard_reg_used_count (unsigned int reg)
4042 {
4043 gcc_assert (df);
4044 return df->hard_regs_live_count[reg];
4045 }
4046
4047
4048 /* Get the value of regs_ever_live[REGNO]. */
4049
4050 bool
4051 df_regs_ever_live_p (unsigned int regno)
4052 {
4053 return regs_ever_live[regno];
4054 }
4055
4056
4057 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4058 to change, schedule that change for the next update. */
4059
4060 void
4061 df_set_regs_ever_live (unsigned int regno, bool value)
4062 {
4063 if (regs_ever_live[regno] == value)
4064 return;
4065
4066 regs_ever_live[regno] = value;
4067 if (df)
4068 df->redo_entry_and_exit = true;
4069 }
4070
4071
4072 /* Compute "regs_ever_live" information from the underlying df
4073 information. Set the vector to all false if RESET. */
4074
4075 void
4076 df_compute_regs_ever_live (bool reset)
4077 {
4078 unsigned int i;
4079 bool changed = df->redo_entry_and_exit;
4080
4081 if (reset)
4082 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4083
4084 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4085 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4086 {
4087 regs_ever_live[i] = true;
4088 changed = true;
4089 }
4090 if (changed)
4091 df_update_entry_exit_and_calls ();
4092 df->redo_entry_and_exit = false;
4093 }
4094
4095 \f
4096 /*----------------------------------------------------------------------------
4097 Dataflow ref information verification functions.
4098
4099 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4100 df_reg_chain_verify_unmarked (refs)
4101 df_refs_verify (ref*, ref*, bool)
4102 df_mws_verify (mw*, mw*, bool)
4103 df_insn_refs_verify (collection_rec, bb, insn, bool)
4104 df_bb_refs_verify (bb, refs, bool)
4105 df_bb_verify (bb)
4106 df_exit_block_bitmap_verify (bool)
4107 df_entry_block_bitmap_verify (bool)
4108 df_scan_verify ()
4109 ----------------------------------------------------------------------------*/
4110
4111
4112 /* Mark all refs in the reg chain. Verify that all of the registers
4113 are in the correct chain. */
4114
4115 static unsigned int
4116 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4117 bool is_def, bool is_eq_use)
4118 {
4119 unsigned int count = 0;
4120 struct df_ref *ref;
4121 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4122 {
4123 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4124
4125 /* If there are no def-use or use-def chains, make sure that all
4126 of the chains are clear. */
4127 if (!df_chain)
4128 gcc_assert (!DF_REF_CHAIN (ref));
4129
4130 /* Check to make sure the ref is in the correct chain. */
4131 gcc_assert (DF_REF_REGNO (ref) == regno);
4132 if (is_def)
4133 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4134 else
4135 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4136
4137 if (is_eq_use)
4138 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4139 else
4140 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4141
4142 if (ref->next_reg)
4143 gcc_assert (ref->next_reg->prev_reg == ref);
4144 count++;
4145 DF_REF_REG_MARK (ref);
4146 }
4147 return count;
4148 }
4149
4150
4151 /* Verify that all of the registers in the chain are unmarked. */
4152
4153 static void
4154 df_reg_chain_verify_unmarked (struct df_ref *refs)
4155 {
4156 struct df_ref *ref;
4157 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4158 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4159 }
4160
4161
4162 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4163
4164 static bool
4165 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4166 bool abort_if_fail)
4167 {
4168 while ((*new_rec) && (*old_rec))
4169 {
4170 if (!df_ref_equal_p (*new_rec, *old_rec))
4171 {
4172 if (abort_if_fail)
4173 gcc_assert (0);
4174 else
4175 return false;
4176 }
4177
4178 /* Abort if fail is called from the function level verifier. If
4179 that is the context, mark this reg as being seem. */
4180 if (abort_if_fail)
4181 {
4182 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4183 DF_REF_REG_UNMARK (*old_rec);
4184 }
4185
4186 new_rec++;
4187 old_rec++;
4188 }
4189
4190 if (abort_if_fail)
4191 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4192 else
4193 return ((*new_rec == NULL) && (*old_rec == NULL));
4194 return false;
4195 }
4196
4197
4198 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4199
4200 static bool
4201 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4202 bool abort_if_fail)
4203 {
4204 while ((*new_rec) && (*old_rec))
4205 {
4206 if (!df_mw_equal_p (*new_rec, *old_rec))
4207 {
4208 if (abort_if_fail)
4209 gcc_assert (0);
4210 else
4211 return false;
4212 }
4213 new_rec++;
4214 old_rec++;
4215 }
4216
4217 if (abort_if_fail)
4218 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4219 else
4220 return ((*new_rec == NULL) && (*old_rec == NULL));
4221 return false;
4222 }
4223
4224
4225 /* Return true if the existing insn refs information is complete and
4226 correct. Otherwise (i.e. if there's any missing or extra refs),
4227 return the correct df_ref chain in REFS_RETURN.
4228
4229 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4230 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4231 verification mode instead of the whole function, so unmark
4232 everything.
4233
4234 If ABORT_IF_FAIL is set, this function never returns false. */
4235
4236 static bool
4237 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4238 basic_block bb,
4239 rtx insn,
4240 bool abort_if_fail)
4241 {
4242 bool ret1, ret2, ret3, ret4;
4243 unsigned int uid = INSN_UID (insn);
4244
4245 df_insn_refs_collect (collection_rec, bb, insn);
4246
4247 if (!DF_INSN_UID_DEFS (uid))
4248 {
4249 /* The insn_rec was created but it was never filled out. */
4250 if (abort_if_fail)
4251 gcc_assert (0);
4252 else
4253 return false;
4254 }
4255
4256 /* Unfortunately we cannot opt out early if one of these is not
4257 right because the marks will not get cleared. */
4258 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4259 abort_if_fail);
4260 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4261 abort_if_fail);
4262 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4263 abort_if_fail);
4264 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4265 abort_if_fail);
4266 return (ret1 && ret2 && ret3 && ret4);
4267 }
4268
4269
4270 /* Return true if all refs in the basic block are correct and complete.
4271 Due to df_ref_chain_verify, it will cause all refs
4272 that are verified to have DF_REF_MARK bit set. */
4273
4274 static bool
4275 df_bb_verify (basic_block bb)
4276 {
4277 rtx insn;
4278 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4279 struct df_collection_rec collection_rec;
4280
4281 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4282 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4283 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4284 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4285 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4286
4287 gcc_assert (bb_info);
4288
4289 /* Scan the block an insn at a time from beginning to end. */
4290 FOR_BB_INSNS_REVERSE (bb, insn)
4291 {
4292 if (!INSN_P (insn))
4293 continue;
4294 df_insn_refs_verify (&collection_rec, bb, insn, true);
4295 df_free_collection_rec (&collection_rec);
4296 }
4297
4298 /* Do the artificial defs and uses. */
4299 df_bb_refs_collect (&collection_rec, bb);
4300 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4301 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4302 df_free_collection_rec (&collection_rec);
4303
4304 return true;
4305 }
4306
4307
4308 /* Returns true if the entry block has correct and complete df_ref set.
4309 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4310
4311 static bool
4312 df_entry_block_bitmap_verify (bool abort_if_fail)
4313 {
4314 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4315 bool is_eq;
4316
4317 df_get_entry_block_def_set (entry_block_defs);
4318
4319 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4320
4321 if (!is_eq && abort_if_fail)
4322 {
4323 print_current_pass (stderr);
4324 fprintf (stderr, "entry_block_defs = ");
4325 df_print_regset (stderr, entry_block_defs);
4326 fprintf (stderr, "df->entry_block_defs = ");
4327 df_print_regset (stderr, df->entry_block_defs);
4328 gcc_assert (0);
4329 }
4330
4331 BITMAP_FREE (entry_block_defs);
4332
4333 return is_eq;
4334 }
4335
4336
4337 /* Returns true if the exit block has correct and complete df_ref set.
4338 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4339
4340 static bool
4341 df_exit_block_bitmap_verify (bool abort_if_fail)
4342 {
4343 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4344 bool is_eq;
4345
4346 df_get_exit_block_use_set (exit_block_uses);
4347
4348 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4349
4350 if (!is_eq && abort_if_fail)
4351 {
4352 print_current_pass (stderr);
4353 fprintf (stderr, "exit_block_uses = ");
4354 df_print_regset (stderr, exit_block_uses);
4355 fprintf (stderr, "df->exit_block_uses = ");
4356 df_print_regset (stderr, df->exit_block_uses);
4357 gcc_assert (0);
4358 }
4359
4360 BITMAP_FREE (exit_block_uses);
4361
4362 return is_eq;
4363 }
4364
4365
4366 /* Return true if df_ref information for all insns in all BLOCKS are
4367 correct and complete. If BLOCKS is null, all blocks are
4368 checked. */
4369
4370 void
4371 df_scan_verify (void)
4372 {
4373 unsigned int i;
4374 basic_block bb;
4375 bitmap regular_block_artificial_uses;
4376 bitmap eh_block_artificial_uses;
4377
4378 if (!df)
4379 return;
4380
4381 /* Verification is a 4 step process. */
4382
4383 /* (1) All of the refs are marked by going thru the reg chains. */
4384 for (i = 0; i < DF_REG_SIZE (df); i++)
4385 {
4386 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4387 == DF_REG_DEF_COUNT(i));
4388 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4389 == DF_REG_USE_COUNT(i));
4390 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4391 == DF_REG_EQ_USE_COUNT(i));
4392 }
4393
4394 /* (2) There are various bitmaps whose value may change over the
4395 course of the compilation. This step recomputes them to make
4396 sure that they have not slipped out of date. */
4397 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4398 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4399
4400 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4401 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4402
4403 bitmap_ior_into (eh_block_artificial_uses,
4404 regular_block_artificial_uses);
4405
4406 /* Check artificial_uses bitmaps didn't change. */
4407 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4408 df->regular_block_artificial_uses));
4409 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4410 df->eh_block_artificial_uses));
4411
4412 BITMAP_FREE (regular_block_artificial_uses);
4413 BITMAP_FREE (eh_block_artificial_uses);
4414
4415 /* Verify entry block and exit block. These only verify the bitmaps,
4416 the refs are verified in df_bb_verify. */
4417 df_entry_block_bitmap_verify (true);
4418 df_exit_block_bitmap_verify (true);
4419
4420 /* (3) All of the insns in all of the blocks are traversed and the
4421 marks are cleared both in the artificial refs attached to the
4422 blocks and the real refs inside the insns. It is a failure to
4423 clear a mark that has not been set as this means that the ref in
4424 the block or insn was not in the reg chain. */
4425
4426 FOR_ALL_BB (bb)
4427 df_bb_verify (bb);
4428
4429 /* (4) See if all reg chains are traversed a second time. This time
4430 a check is made that the marks are clear. A set mark would be a
4431 from a reg that is not in any insn or basic block. */
4432
4433 for (i = 0; i < DF_REG_SIZE (df); i++)
4434 {
4435 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4436 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4437 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4438 }
4439 }