vsx.md (vsx_xxmrghw_<mode>): Adjust for little-endian.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2014 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "sbitmap.h"
39 #include "bitmap.h"
40 #include "dumpfile.h"
41 #include "tree.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "df.h"
45 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
46
47
48 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
49
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* The following two macros free the vecs that hold either the refs or
66 the mw refs. They are a little tricky because the vec has 0
67 elements is special and is not to be freed. */
68 #define df_scan_free_ref_vec(V) \
69 do { \
70 if (V && *V) \
71 free (V); \
72 } while (0)
73
74 #define df_scan_free_mws_vec(V) \
75 do { \
76 if (V && *V) \
77 free (V); \
78 } while (0)
79
80 /* The set of hard registers in eliminables[i].from. */
81
82 static HARD_REG_SET elim_reg_set;
83
84 /* Initialize ur_in and ur_out as if all hard registers were partially
85 available. */
86
87 struct df_collection_rec
88 {
89 auto_vec<df_ref, 128> def_vec;
90 auto_vec<df_ref, 32> use_vec;
91 auto_vec<df_ref, 32> eq_use_vec;
92 auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
93 };
94
95 static df_ref df_null_ref_rec[1];
96 static struct df_mw_hardreg * df_null_mw_rec[1];
97
98 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
99 rtx, rtx *,
100 basic_block, struct df_insn_info *,
101 enum df_ref_type, int ref_flags);
102 static void df_def_record_1 (struct df_collection_rec *, rtx *,
103 basic_block, struct df_insn_info *,
104 int ref_flags);
105 static void df_defs_record (struct df_collection_rec *, rtx,
106 basic_block, struct df_insn_info *,
107 int ref_flags);
108 static void df_uses_record (struct df_collection_rec *,
109 rtx *, enum df_ref_type,
110 basic_block, struct df_insn_info *,
111 int ref_flags);
112
113 static void df_install_ref_incremental (df_ref);
114 static df_ref df_ref_create_structure (enum df_ref_class,
115 struct df_collection_rec *, rtx, rtx *,
116 basic_block, struct df_insn_info *,
117 enum df_ref_type, int ref_flags);
118 static void df_insn_refs_collect (struct df_collection_rec*,
119 basic_block, struct df_insn_info *);
120 static void df_canonize_collection_rec (struct df_collection_rec *);
121
122 static void df_get_regular_block_artificial_uses (bitmap);
123 static void df_get_eh_block_artificial_uses (bitmap);
124
125 static void df_record_entry_block_defs (bitmap);
126 static void df_record_exit_block_uses (bitmap);
127 static void df_get_exit_block_use_set (bitmap);
128 static void df_get_entry_block_def_set (bitmap);
129 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
130 static void df_ref_chain_delete_du_chain (df_ref *);
131 static void df_ref_chain_delete (df_ref *);
132
133 static void df_refs_add_to_chains (struct df_collection_rec *,
134 basic_block, rtx, unsigned int);
135
136 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
137 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
138 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
139 static void df_install_ref (df_ref, struct df_reg_info *,
140 struct df_ref_info *, bool);
141
142 static int df_ref_compare (const void *, const void *);
143 static int df_mw_compare (const void *, const void *);
144
145 static void df_insn_info_delete (unsigned int);
146
147 /* Indexed by hardware reg number, is true if that register is ever
148 used in the current function.
149
150 In df-scan.c, this is set up to record the hard regs used
151 explicitly. Reload adds in the hard regs used for holding pseudo
152 regs. Final uses it to generate the code in the function prologue
153 and epilogue to save and restore registers as needed. */
154
155 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
156
157 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
158 static const unsigned int copy_defs = 0x1;
159 static const unsigned int copy_uses = 0x2;
160 static const unsigned int copy_eq_uses = 0x4;
161 static const unsigned int copy_mw = 0x8;
162 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
163 | copy_mw;
164 \f
165 /*----------------------------------------------------------------------------
166 SCANNING DATAFLOW PROBLEM
167
168 There are several ways in which scanning looks just like the other
169 dataflow problems. It shares the all the mechanisms for local info
170 as well as basic block info. Where it differs is when and how often
171 it gets run. It also has no need for the iterative solver.
172 ----------------------------------------------------------------------------*/
173
174 /* Problem data for the scanning dataflow function. */
175 struct df_scan_problem_data
176 {
177 alloc_pool ref_base_pool;
178 alloc_pool ref_artificial_pool;
179 alloc_pool ref_regular_pool;
180 alloc_pool insn_pool;
181 alloc_pool reg_pool;
182 alloc_pool mw_reg_pool;
183 bitmap_obstack reg_bitmaps;
184 bitmap_obstack insn_bitmaps;
185 };
186
187 typedef struct df_scan_bb_info *df_scan_bb_info_t;
188
189
190 /* Internal function to shut down the scanning problem. */
191 static void
192 df_scan_free_internal (void)
193 {
194 struct df_scan_problem_data *problem_data
195 = (struct df_scan_problem_data *) df_scan->problem_data;
196 unsigned int i;
197 basic_block bb;
198
199 /* The vectors that hold the refs are not pool allocated because
200 they come in many sizes. This makes them impossible to delete
201 all at once. */
202 for (i = 0; i < DF_INSN_SIZE (); i++)
203 {
204 struct df_insn_info *insn_info = DF_INSN_UID_GET (i);
205 /* Skip the insns that have no insn_info or have been
206 deleted. */
207 if (insn_info)
208 {
209 df_scan_free_ref_vec (insn_info->defs);
210 df_scan_free_ref_vec (insn_info->uses);
211 df_scan_free_ref_vec (insn_info->eq_uses);
212 df_scan_free_mws_vec (insn_info->mw_hardregs);
213 }
214 }
215
216 FOR_ALL_BB_FN (bb, cfun)
217 {
218 unsigned int bb_index = bb->index;
219 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
220 if (bb_info)
221 {
222 df_scan_free_ref_vec (bb_info->artificial_defs);
223 df_scan_free_ref_vec (bb_info->artificial_uses);
224 }
225 }
226
227 free (df->def_info.refs);
228 free (df->def_info.begin);
229 free (df->def_info.count);
230 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
231
232 free (df->use_info.refs);
233 free (df->use_info.begin);
234 free (df->use_info.count);
235 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
236
237 free (df->def_regs);
238 df->def_regs = NULL;
239 free (df->use_regs);
240 df->use_regs = NULL;
241 free (df->eq_use_regs);
242 df->eq_use_regs = NULL;
243 df->regs_size = 0;
244 DF_REG_SIZE (df) = 0;
245
246 free (df->insns);
247 df->insns = NULL;
248 DF_INSN_SIZE () = 0;
249
250 free (df_scan->block_info);
251 df_scan->block_info = NULL;
252 df_scan->block_info_size = 0;
253
254 bitmap_clear (&df->hardware_regs_used);
255 bitmap_clear (&df->regular_block_artificial_uses);
256 bitmap_clear (&df->eh_block_artificial_uses);
257 BITMAP_FREE (df->entry_block_defs);
258 BITMAP_FREE (df->exit_block_uses);
259 bitmap_clear (&df->insns_to_delete);
260 bitmap_clear (&df->insns_to_rescan);
261 bitmap_clear (&df->insns_to_notes_rescan);
262
263 free_alloc_pool (problem_data->ref_base_pool);
264 free_alloc_pool (problem_data->ref_artificial_pool);
265 free_alloc_pool (problem_data->ref_regular_pool);
266 free_alloc_pool (problem_data->insn_pool);
267 free_alloc_pool (problem_data->reg_pool);
268 free_alloc_pool (problem_data->mw_reg_pool);
269 bitmap_obstack_release (&problem_data->reg_bitmaps);
270 bitmap_obstack_release (&problem_data->insn_bitmaps);
271 free (df_scan->problem_data);
272 }
273
274
275 /* Free basic block info. */
276
277 static void
278 df_scan_free_bb_info (basic_block bb, void *vbb_info)
279 {
280 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
281 unsigned int bb_index = bb->index;
282
283 /* See if bb_info is initialized. */
284 if (bb_info->artificial_defs)
285 {
286 rtx insn;
287 FOR_BB_INSNS (bb, insn)
288 {
289 if (INSN_P (insn))
290 df_insn_info_delete (INSN_UID (insn));
291 }
292
293 if (bb_index < df_scan->block_info_size)
294 bb_info = df_scan_get_bb_info (bb_index);
295
296 /* Get rid of any artificial uses or defs. */
297 if (bb_info->artificial_defs)
298 {
299 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
300 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
301 df_ref_chain_delete (bb_info->artificial_defs);
302 df_ref_chain_delete (bb_info->artificial_uses);
303 bb_info->artificial_defs = NULL;
304 bb_info->artificial_uses = NULL;
305 }
306 }
307 }
308
309
310 /* Allocate the problem data for the scanning problem. This should be
311 called when the problem is created or when the entire function is to
312 be rescanned. */
313 void
314 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
315 {
316 struct df_scan_problem_data *problem_data;
317 unsigned int insn_num = get_max_uid () + 1;
318 unsigned int block_size = 512;
319 basic_block bb;
320
321 /* Given the number of pools, this is really faster than tearing
322 everything apart. */
323 if (df_scan->problem_data)
324 df_scan_free_internal ();
325
326 problem_data = XNEW (struct df_scan_problem_data);
327 df_scan->problem_data = problem_data;
328 df_scan->computed = true;
329
330 problem_data->ref_base_pool
331 = create_alloc_pool ("df_scan ref base",
332 sizeof (struct df_base_ref), block_size);
333 problem_data->ref_artificial_pool
334 = create_alloc_pool ("df_scan ref artificial",
335 sizeof (struct df_artificial_ref), block_size);
336 problem_data->ref_regular_pool
337 = create_alloc_pool ("df_scan ref regular",
338 sizeof (struct df_regular_ref), block_size);
339 problem_data->insn_pool
340 = create_alloc_pool ("df_scan insn",
341 sizeof (struct df_insn_info), block_size);
342 problem_data->reg_pool
343 = create_alloc_pool ("df_scan reg",
344 sizeof (struct df_reg_info), block_size);
345 problem_data->mw_reg_pool
346 = create_alloc_pool ("df_scan mw_reg",
347 sizeof (struct df_mw_hardreg), block_size / 16);
348
349 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
350 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
351
352 insn_num += insn_num / 4;
353 df_grow_reg_info ();
354
355 df_grow_insn_info ();
356 df_grow_bb_info (df_scan);
357
358 FOR_ALL_BB_FN (bb, cfun)
359 {
360 unsigned int bb_index = bb->index;
361 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
362 bb_info->artificial_defs = NULL;
363 bb_info->artificial_uses = NULL;
364 }
365
366 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
367 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
368 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
369 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
370 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
371 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
372 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
373 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
374 df_scan->optional_p = false;
375 }
376
377
378 /* Free all of the data associated with the scan problem. */
379
380 static void
381 df_scan_free (void)
382 {
383 if (df_scan->problem_data)
384 df_scan_free_internal ();
385
386 if (df->blocks_to_analyze)
387 {
388 BITMAP_FREE (df->blocks_to_analyze);
389 df->blocks_to_analyze = NULL;
390 }
391
392 free (df_scan);
393 }
394
395 /* Dump the preamble for DF_SCAN dump. */
396 static void
397 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
398 {
399 int i;
400 int dcount = 0;
401 int ucount = 0;
402 int ecount = 0;
403 int icount = 0;
404 int ccount = 0;
405 basic_block bb;
406 rtx insn;
407
408 fprintf (file, ";; invalidated by call \t");
409 df_print_regset (file, regs_invalidated_by_call_regset);
410 fprintf (file, ";; hardware regs used \t");
411 df_print_regset (file, &df->hardware_regs_used);
412 fprintf (file, ";; regular block artificial uses \t");
413 df_print_regset (file, &df->regular_block_artificial_uses);
414 fprintf (file, ";; eh block artificial uses \t");
415 df_print_regset (file, &df->eh_block_artificial_uses);
416 fprintf (file, ";; entry block defs \t");
417 df_print_regset (file, df->entry_block_defs);
418 fprintf (file, ";; exit block uses \t");
419 df_print_regset (file, df->exit_block_uses);
420 fprintf (file, ";; regs ever live \t");
421 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
422 if (df_regs_ever_live_p (i))
423 fprintf (file, " %d[%s]", i, reg_names[i]);
424 fprintf (file, "\n;; ref usage \t");
425
426 for (i = 0; i < (int)df->regs_inited; i++)
427 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
428 {
429 const char * sep = "";
430
431 fprintf (file, "r%d={", i);
432 if (DF_REG_DEF_COUNT (i))
433 {
434 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
435 sep = ",";
436 dcount += DF_REG_DEF_COUNT (i);
437 }
438 if (DF_REG_USE_COUNT (i))
439 {
440 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
441 sep = ",";
442 ucount += DF_REG_USE_COUNT (i);
443 }
444 if (DF_REG_EQ_USE_COUNT (i))
445 {
446 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
447 ecount += DF_REG_EQ_USE_COUNT (i);
448 }
449 fprintf (file, "} ");
450 }
451
452 FOR_EACH_BB_FN (bb, cfun)
453 FOR_BB_INSNS (bb, insn)
454 if (INSN_P (insn))
455 {
456 if (CALL_P (insn))
457 ccount++;
458 else
459 icount++;
460 }
461
462 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
463 " in %d{%d regular + %d call} insns.\n",
464 dcount + ucount + ecount, dcount, ucount, ecount,
465 icount + ccount, icount, ccount);
466 }
467
468 /* Dump the bb_info for a given basic block. */
469 static void
470 df_scan_start_block (basic_block bb, FILE *file)
471 {
472 struct df_scan_bb_info *bb_info
473 = df_scan_get_bb_info (bb->index);
474
475 if (bb_info)
476 {
477 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
478 df_refs_chain_dump (bb_info->artificial_defs, true, file);
479 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
480 df_refs_chain_dump (bb_info->artificial_uses, true, file);
481 fprintf (file, "\n");
482 }
483 #if 0
484 {
485 rtx insn;
486 FOR_BB_INSNS (bb, insn)
487 if (INSN_P (insn))
488 df_insn_debug (insn, false, file);
489 }
490 #endif
491 }
492
493 static struct df_problem problem_SCAN =
494 {
495 DF_SCAN, /* Problem id. */
496 DF_NONE, /* Direction. */
497 df_scan_alloc, /* Allocate the problem specific data. */
498 NULL, /* Reset global information. */
499 df_scan_free_bb_info, /* Free basic block info. */
500 NULL, /* Local compute function. */
501 NULL, /* Init the solution specific data. */
502 NULL, /* Iterative solver. */
503 NULL, /* Confluence operator 0. */
504 NULL, /* Confluence operator n. */
505 NULL, /* Transfer function. */
506 NULL, /* Finalize function. */
507 df_scan_free, /* Free all of the problem information. */
508 NULL, /* Remove this problem from the stack of dataflow problems. */
509 df_scan_start_dump, /* Debugging. */
510 df_scan_start_block, /* Debugging start block. */
511 NULL, /* Debugging end block. */
512 NULL, /* Debugging start insn. */
513 NULL, /* Debugging end insn. */
514 NULL, /* Incremental solution verify start. */
515 NULL, /* Incremental solution verify end. */
516 NULL, /* Dependent problem. */
517 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
518 TV_DF_SCAN, /* Timing variable. */
519 false /* Reset blocks on dropping out of blocks_to_analyze. */
520 };
521
522
523 /* Create a new DATAFLOW instance and add it to an existing instance
524 of DF. The returned structure is what is used to get at the
525 solution. */
526
527 void
528 df_scan_add_problem (void)
529 {
530 df_add_problem (&problem_SCAN);
531 }
532
533 \f
534 /*----------------------------------------------------------------------------
535 Storage Allocation Utilities
536 ----------------------------------------------------------------------------*/
537
538
539 /* First, grow the reg_info information. If the current size is less than
540 the number of pseudos, grow to 25% more than the number of
541 pseudos.
542
543 Second, assure that all of the slots up to max_reg_num have been
544 filled with reg_info structures. */
545
546 void
547 df_grow_reg_info (void)
548 {
549 unsigned int max_reg = max_reg_num ();
550 unsigned int new_size = max_reg;
551 struct df_scan_problem_data *problem_data
552 = (struct df_scan_problem_data *) df_scan->problem_data;
553 unsigned int i;
554
555 if (df->regs_size < new_size)
556 {
557 new_size += new_size / 4;
558 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
559 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
560 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
561 new_size);
562 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
563 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
564 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
565 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
566 df->regs_size = new_size;
567 }
568
569 for (i = df->regs_inited; i < max_reg; i++)
570 {
571 struct df_reg_info *reg_info;
572
573 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
574 memset (reg_info, 0, sizeof (struct df_reg_info));
575 df->def_regs[i] = reg_info;
576 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
577 memset (reg_info, 0, sizeof (struct df_reg_info));
578 df->use_regs[i] = reg_info;
579 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
580 memset (reg_info, 0, sizeof (struct df_reg_info));
581 df->eq_use_regs[i] = reg_info;
582 df->def_info.begin[i] = 0;
583 df->def_info.count[i] = 0;
584 df->use_info.begin[i] = 0;
585 df->use_info.count[i] = 0;
586 }
587
588 df->regs_inited = max_reg;
589 }
590
591
592 /* Grow the ref information. */
593
594 static void
595 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
596 {
597 if (ref_info->refs_size < new_size)
598 {
599 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
600 memset (ref_info->refs + ref_info->refs_size, 0,
601 (new_size - ref_info->refs_size) *sizeof (df_ref));
602 ref_info->refs_size = new_size;
603 }
604 }
605
606
607 /* Check and grow the ref information if necessary. This routine
608 guarantees total_size + BITMAP_ADDEND amount of entries in refs
609 array. It updates ref_info->refs_size only and does not change
610 ref_info->total_size. */
611
612 static void
613 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
614 unsigned bitmap_addend)
615 {
616 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
617 {
618 int new_size = ref_info->total_size + bitmap_addend;
619 new_size += ref_info->total_size / 4;
620 df_grow_ref_info (ref_info, new_size);
621 }
622 }
623
624
625 /* Grow the ref information. If the current size is less than the
626 number of instructions, grow to 25% more than the number of
627 instructions. */
628
629 void
630 df_grow_insn_info (void)
631 {
632 unsigned int new_size = get_max_uid () + 1;
633 if (DF_INSN_SIZE () < new_size)
634 {
635 new_size += new_size / 4;
636 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
637 memset (df->insns + df->insns_size, 0,
638 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
639 DF_INSN_SIZE () = new_size;
640 }
641 }
642
643
644
645 \f
646 /*----------------------------------------------------------------------------
647 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
648 ----------------------------------------------------------------------------*/
649
650 /* Rescan all of the block_to_analyze or all of the blocks in the
651 function if df_set_blocks if blocks_to_analyze is NULL; */
652
653 void
654 df_scan_blocks (void)
655 {
656 basic_block bb;
657
658 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
659 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
660
661 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
662 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
663
664 bitmap_ior_into (&df->eh_block_artificial_uses,
665 &df->regular_block_artificial_uses);
666
667 /* ENTRY and EXIT blocks have special defs/uses. */
668 df_get_entry_block_def_set (df->entry_block_defs);
669 df_record_entry_block_defs (df->entry_block_defs);
670 df_get_exit_block_use_set (df->exit_block_uses);
671 df_record_exit_block_uses (df->exit_block_uses);
672 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
673 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
674
675 /* Regular blocks */
676 FOR_EACH_BB_FN (bb, cfun)
677 {
678 unsigned int bb_index = bb->index;
679 df_bb_refs_record (bb_index, true);
680 }
681 }
682
683 /* Create new refs under address LOC within INSN. This function is
684 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
685 depending on whether LOC is inside PATTERN (INSN) or a note. */
686
687 void
688 df_uses_create (rtx *loc, rtx insn, int ref_flags)
689 {
690 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
691 df_uses_record (NULL, loc, DF_REF_REG_USE,
692 BLOCK_FOR_INSN (insn),
693 DF_INSN_INFO_GET (insn),
694 ref_flags);
695 }
696
697 /* Create a new ref of type DF_REF_TYPE for register REG at address
698 LOC within INSN of BB. This function is only used externally. */
699
700 df_ref
701 df_ref_create (rtx reg, rtx *loc, rtx insn,
702 basic_block bb,
703 enum df_ref_type ref_type,
704 int ref_flags)
705 {
706 enum df_ref_class cl;
707
708 df_grow_reg_info ();
709
710 /* You cannot hack artificial refs. */
711 gcc_assert (insn);
712
713 if (loc)
714 cl = DF_REF_REGULAR;
715 else
716 cl = DF_REF_BASE;
717
718 return df_ref_create_structure (cl, NULL, reg, loc, bb,
719 DF_INSN_INFO_GET (insn),
720 ref_type, ref_flags);
721 }
722
723 static void
724 df_install_ref_incremental (df_ref ref)
725 {
726 struct df_reg_info **reg_info;
727 struct df_ref_info *ref_info;
728 df_ref *ref_rec;
729 df_ref **ref_rec_ptr;
730 unsigned int count = 0;
731 bool add_to_table;
732
733 rtx insn = DF_REF_INSN (ref);
734 basic_block bb = BLOCK_FOR_INSN (insn);
735
736 if (DF_REF_REG_DEF_P (ref))
737 {
738 reg_info = df->def_regs;
739 ref_info = &df->def_info;
740 ref_rec_ptr = &DF_INSN_DEFS (insn);
741 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
742 }
743 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 {
745 reg_info = df->eq_use_regs;
746 ref_info = &df->use_info;
747 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
748 switch (ref_info->ref_order)
749 {
750 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
751 case DF_REF_ORDER_BY_REG_WITH_NOTES:
752 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
753 add_to_table = true;
754 break;
755 default:
756 add_to_table = false;
757 break;
758 }
759 }
760 else
761 {
762 reg_info = df->use_regs;
763 ref_info = &df->use_info;
764 ref_rec_ptr = &DF_INSN_USES (insn);
765 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
766 }
767
768 /* Do not add if ref is not in the right blocks. */
769 if (add_to_table && df->analyze_subset)
770 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
771
772 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
773
774 if (add_to_table)
775 switch (ref_info->ref_order)
776 {
777 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
778 case DF_REF_ORDER_BY_REG_WITH_NOTES:
779 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
780 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
781 break;
782 default:
783 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
784 break;
785 }
786
787 ref_rec = *ref_rec_ptr;
788 while (*ref_rec)
789 {
790 count++;
791 ref_rec++;
792 }
793
794 ref_rec = *ref_rec_ptr;
795 if (count)
796 {
797 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
798 *ref_rec_ptr = ref_rec;
799 ref_rec[count] = ref;
800 ref_rec[count+1] = NULL;
801 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
802 }
803 else
804 {
805 df_ref *ref_rec = XNEWVEC (df_ref, 2);
806 ref_rec[0] = ref;
807 ref_rec[1] = NULL;
808 *ref_rec_ptr = ref_rec;
809 }
810
811 #if 0
812 if (dump_file)
813 {
814 fprintf (dump_file, "adding ref ");
815 df_ref_debug (ref, dump_file);
816 }
817 #endif
818 /* By adding the ref directly, df_insn_rescan my not find any
819 differences even though the block will have changed. So we need
820 to mark the block dirty ourselves. */
821 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
822 df_set_bb_dirty (bb);
823 }
824
825
826 \f
827 /*----------------------------------------------------------------------------
828 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
829 ----------------------------------------------------------------------------*/
830
831 static void
832 df_free_ref (df_ref ref)
833 {
834 struct df_scan_problem_data *problem_data
835 = (struct df_scan_problem_data *) df_scan->problem_data;
836
837 switch (DF_REF_CLASS (ref))
838 {
839 case DF_REF_BASE:
840 pool_free (problem_data->ref_base_pool, ref);
841 break;
842
843 case DF_REF_ARTIFICIAL:
844 pool_free (problem_data->ref_artificial_pool, ref);
845 break;
846
847 case DF_REF_REGULAR:
848 pool_free (problem_data->ref_regular_pool, ref);
849 break;
850 }
851 }
852
853
854 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
855 Also delete the def-use or use-def chain if it exists. */
856
857 static void
858 df_reg_chain_unlink (df_ref ref)
859 {
860 df_ref next = DF_REF_NEXT_REG (ref);
861 df_ref prev = DF_REF_PREV_REG (ref);
862 int id = DF_REF_ID (ref);
863 struct df_reg_info *reg_info;
864 df_ref *refs = NULL;
865
866 if (DF_REF_REG_DEF_P (ref))
867 {
868 int regno = DF_REF_REGNO (ref);
869 reg_info = DF_REG_DEF_GET (regno);
870 refs = df->def_info.refs;
871 }
872 else
873 {
874 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
875 {
876 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
877 switch (df->use_info.ref_order)
878 {
879 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
880 case DF_REF_ORDER_BY_REG_WITH_NOTES:
881 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
882 refs = df->use_info.refs;
883 break;
884 default:
885 break;
886 }
887 }
888 else
889 {
890 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
891 refs = df->use_info.refs;
892 }
893 }
894
895 if (refs)
896 {
897 if (df->analyze_subset)
898 {
899 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
900 refs[id] = NULL;
901 }
902 else
903 refs[id] = NULL;
904 }
905
906 /* Delete any def-use or use-def chains that start here. It is
907 possible that there is trash in this field. This happens for
908 insns that have been deleted when rescanning has been deferred
909 and the chain problem has also been deleted. The chain tear down
910 code skips deleted insns. */
911 if (df_chain && DF_REF_CHAIN (ref))
912 df_chain_unlink (ref);
913
914 reg_info->n_refs--;
915 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
916 {
917 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
918 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
919 }
920
921 /* Unlink from the reg chain. If there is no prev, this is the
922 first of the list. If not, just join the next and prev. */
923 if (prev)
924 DF_REF_NEXT_REG (prev) = next;
925 else
926 {
927 gcc_assert (reg_info->reg_chain == ref);
928 reg_info->reg_chain = next;
929 }
930 if (next)
931 DF_REF_PREV_REG (next) = prev;
932
933 df_free_ref (ref);
934 }
935
936
937 /* Remove REF from VEC. */
938
939 static void
940 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
941 {
942 df_ref *vec = *vec_ptr;
943
944 if (vec[1])
945 {
946 while (*vec && *vec != ref)
947 vec++;
948
949 while (*vec)
950 {
951 *vec = *(vec+1);
952 vec++;
953 }
954 }
955 else
956 {
957 free (vec);
958 *vec_ptr = df_null_ref_rec;
959 }
960 }
961
962
963 /* Unlink REF from all def-use/use-def chains, etc. */
964
965 void
966 df_ref_remove (df_ref ref)
967 {
968 #if 0
969 if (dump_file)
970 {
971 fprintf (dump_file, "removing ref ");
972 df_ref_debug (ref, dump_file);
973 }
974 #endif
975
976 if (DF_REF_REG_DEF_P (ref))
977 {
978 if (DF_REF_IS_ARTIFICIAL (ref))
979 {
980 struct df_scan_bb_info *bb_info
981 = df_scan_get_bb_info (DF_REF_BBNO (ref));
982 df_ref_compress_rec (&bb_info->artificial_defs, ref);
983 }
984 else
985 {
986 unsigned int uid = DF_REF_INSN_UID (ref);
987 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
988 df_ref_compress_rec (&insn_rec->defs, ref);
989 }
990 }
991 else
992 {
993 if (DF_REF_IS_ARTIFICIAL (ref))
994 {
995 struct df_scan_bb_info *bb_info
996 = df_scan_get_bb_info (DF_REF_BBNO (ref));
997 df_ref_compress_rec (&bb_info->artificial_uses, ref);
998 }
999 else
1000 {
1001 unsigned int uid = DF_REF_INSN_UID (ref);
1002 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1003
1004 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1005 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1006 else
1007 df_ref_compress_rec (&insn_rec->uses, ref);
1008 }
1009 }
1010
1011 /* By deleting the ref directly, df_insn_rescan my not find any
1012 differences even though the block will have changed. So we need
1013 to mark the block dirty ourselves. */
1014 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1015 df_set_bb_dirty (DF_REF_BB (ref));
1016 df_reg_chain_unlink (ref);
1017 }
1018
1019
1020 /* Create the insn record for INSN. If there was one there, zero it
1021 out. */
1022
1023 struct df_insn_info *
1024 df_insn_create_insn_record (rtx insn)
1025 {
1026 struct df_scan_problem_data *problem_data
1027 = (struct df_scan_problem_data *) df_scan->problem_data;
1028 struct df_insn_info *insn_rec;
1029
1030 df_grow_insn_info ();
1031 insn_rec = DF_INSN_INFO_GET (insn);
1032 if (!insn_rec)
1033 {
1034 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1035 DF_INSN_INFO_SET (insn, insn_rec);
1036 }
1037 memset (insn_rec, 0, sizeof (struct df_insn_info));
1038 insn_rec->insn = insn;
1039 return insn_rec;
1040 }
1041
1042
1043 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1044
1045 static void
1046 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1047 {
1048 while (*ref_rec)
1049 {
1050 df_ref ref = *ref_rec;
1051 /* CHAIN is allocated by DF_CHAIN. So make sure to
1052 pass df_scan instance for the problem. */
1053 if (DF_REF_CHAIN (ref))
1054 df_chain_unlink (ref);
1055 ref_rec++;
1056 }
1057 }
1058
1059
1060 /* Delete all refs in the ref chain. */
1061
1062 static void
1063 df_ref_chain_delete (df_ref *ref_rec)
1064 {
1065 df_ref *start = ref_rec;
1066 while (*ref_rec)
1067 {
1068 df_reg_chain_unlink (*ref_rec);
1069 ref_rec++;
1070 }
1071
1072 /* If the list is empty, it has a special shared element that is not
1073 to be deleted. */
1074 if (*start)
1075 free (start);
1076 }
1077
1078
1079 /* Delete the hardreg chain. */
1080
1081 static void
1082 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1083 {
1084 struct df_scan_problem_data *problem_data;
1085
1086 if (!hardregs)
1087 return;
1088
1089 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1090
1091 while (*hardregs)
1092 {
1093 pool_free (problem_data->mw_reg_pool, *hardregs);
1094 hardregs++;
1095 }
1096 }
1097
1098
1099 /* Delete all of the refs information from the insn with UID.
1100 Internal helper for df_insn_delete, df_insn_rescan, and other
1101 df-scan routines that don't have to work in deferred mode
1102 and do not have to mark basic blocks for re-processing. */
1103
1104 static void
1105 df_insn_info_delete (unsigned int uid)
1106 {
1107 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1108
1109 bitmap_clear_bit (&df->insns_to_delete, uid);
1110 bitmap_clear_bit (&df->insns_to_rescan, uid);
1111 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1112 if (insn_info)
1113 {
1114 struct df_scan_problem_data *problem_data
1115 = (struct df_scan_problem_data *) df_scan->problem_data;
1116
1117 /* In general, notes do not have the insn_info fields
1118 initialized. However, combine deletes insns by changing them
1119 to notes. How clever. So we cannot just check if it is a
1120 valid insn before short circuiting this code, we need to see
1121 if we actually initialized it. */
1122 if (insn_info->defs)
1123 {
1124 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1125
1126 if (df_chain)
1127 {
1128 df_ref_chain_delete_du_chain (insn_info->defs);
1129 df_ref_chain_delete_du_chain (insn_info->uses);
1130 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1131 }
1132
1133 df_ref_chain_delete (insn_info->defs);
1134 df_ref_chain_delete (insn_info->uses);
1135 df_ref_chain_delete (insn_info->eq_uses);
1136 df_scan_free_mws_vec (insn_info->mw_hardregs);
1137 }
1138 pool_free (problem_data->insn_pool, insn_info);
1139 DF_INSN_UID_SET (uid, NULL);
1140 }
1141 }
1142
1143 /* Delete all of the refs information from INSN, either right now
1144 or marked for later in deferred mode. */
1145
1146 void
1147 df_insn_delete (rtx insn)
1148 {
1149 unsigned int uid;
1150 basic_block bb;
1151
1152 gcc_checking_assert (INSN_P (insn));
1153
1154 if (!df)
1155 return;
1156
1157 uid = INSN_UID (insn);
1158 bb = BLOCK_FOR_INSN (insn);
1159
1160 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
1161 not exist anymore (as mentioned in df-core.c: "The only requirement
1162 [for DF] is that there be a correct control flow graph." Clearly
1163 that isn't the case after pass_free_cfg. But DF is freed much later
1164 because some back-ends want to use DF info even though the CFG is
1165 already gone. It's not clear to me whether that is safe, actually.
1166 In any case, we expect BB to be non-NULL at least up to register
1167 allocation, so disallow a non-NULL BB up to there. Not perfect
1168 but better than nothing... */
1169 gcc_checking_assert (bb != NULL || reload_completed);
1170
1171 df_grow_bb_info (df_scan);
1172 df_grow_reg_info ();
1173
1174 /* The block must be marked as dirty now, rather than later as in
1175 df_insn_rescan and df_notes_rescan because it may not be there at
1176 rescanning time and the mark would blow up.
1177 DEBUG_INSNs do not make a block's data flow solution dirty (at
1178 worst the LUIDs are no longer contiguous). */
1179 if (bb != NULL && NONDEBUG_INSN_P (insn))
1180 df_set_bb_dirty (bb);
1181
1182 /* The client has deferred rescanning. */
1183 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1184 {
1185 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1186 if (insn_info)
1187 {
1188 bitmap_clear_bit (&df->insns_to_rescan, uid);
1189 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1190 bitmap_set_bit (&df->insns_to_delete, uid);
1191 }
1192 if (dump_file)
1193 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1194 return;
1195 }
1196
1197 if (dump_file)
1198 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1199
1200 df_insn_info_delete (uid);
1201 }
1202
1203
1204 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1205
1206 static void
1207 df_free_collection_rec (struct df_collection_rec *collection_rec)
1208 {
1209 unsigned int ix;
1210 struct df_scan_problem_data *problem_data
1211 = (struct df_scan_problem_data *) df_scan->problem_data;
1212 df_ref ref;
1213 struct df_mw_hardreg *mw;
1214
1215 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1216 df_free_ref (ref);
1217 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1218 df_free_ref (ref);
1219 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1220 df_free_ref (ref);
1221 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1222 pool_free (problem_data->mw_reg_pool, mw);
1223
1224 collection_rec->def_vec.release ();
1225 collection_rec->use_vec.release ();
1226 collection_rec->eq_use_vec.release ();
1227 collection_rec->mw_vec.release ();
1228 }
1229
1230 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1231
1232 bool
1233 df_insn_rescan (rtx insn)
1234 {
1235 unsigned int uid = INSN_UID (insn);
1236 struct df_insn_info *insn_info = NULL;
1237 basic_block bb = BLOCK_FOR_INSN (insn);
1238 struct df_collection_rec collection_rec;
1239
1240 if ((!df) || (!INSN_P (insn)))
1241 return false;
1242
1243 if (!bb)
1244 {
1245 if (dump_file)
1246 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1247 return false;
1248 }
1249
1250 /* The client has disabled rescanning and plans to do it itself. */
1251 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1252 return false;
1253
1254 df_grow_bb_info (df_scan);
1255 df_grow_reg_info ();
1256
1257 insn_info = DF_INSN_UID_SAFE_GET (uid);
1258
1259 /* The client has deferred rescanning. */
1260 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1261 {
1262 if (!insn_info)
1263 {
1264 insn_info = df_insn_create_insn_record (insn);
1265 insn_info->defs = df_null_ref_rec;
1266 insn_info->uses = df_null_ref_rec;
1267 insn_info->eq_uses = df_null_ref_rec;
1268 insn_info->mw_hardregs = df_null_mw_rec;
1269 }
1270 if (dump_file)
1271 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1272
1273 bitmap_clear_bit (&df->insns_to_delete, uid);
1274 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1275 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1276 return false;
1277 }
1278
1279 bitmap_clear_bit (&df->insns_to_delete, uid);
1280 bitmap_clear_bit (&df->insns_to_rescan, uid);
1281 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1282 if (insn_info)
1283 {
1284 int luid;
1285 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1286 /* If there's no change, return false. */
1287 if (the_same)
1288 {
1289 df_free_collection_rec (&collection_rec);
1290 if (dump_file)
1291 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1292 return false;
1293 }
1294 if (dump_file)
1295 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1296
1297 /* There's change - we need to delete the existing info.
1298 Since the insn isn't moved, we can salvage its LUID. */
1299 luid = DF_INSN_LUID (insn);
1300 df_insn_info_delete (uid);
1301 df_insn_create_insn_record (insn);
1302 DF_INSN_LUID (insn) = luid;
1303 }
1304 else
1305 {
1306 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1307 df_insn_refs_collect (&collection_rec, bb, insn_info);
1308 if (dump_file)
1309 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1310 }
1311
1312 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1313 if (!DEBUG_INSN_P (insn))
1314 df_set_bb_dirty (bb);
1315
1316 return true;
1317 }
1318
1319 /* Same as df_insn_rescan, but don't mark the basic block as
1320 dirty. */
1321
1322 bool
1323 df_insn_rescan_debug_internal (rtx insn)
1324 {
1325 unsigned int uid = INSN_UID (insn);
1326 struct df_insn_info *insn_info;
1327
1328 gcc_assert (DEBUG_INSN_P (insn)
1329 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1330
1331 if (!df)
1332 return false;
1333
1334 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1335 if (!insn_info)
1336 return false;
1337
1338 if (dump_file)
1339 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1340
1341 bitmap_clear_bit (&df->insns_to_delete, uid);
1342 bitmap_clear_bit (&df->insns_to_rescan, uid);
1343 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1344
1345 if (!insn_info->defs)
1346 return false;
1347
1348 if (insn_info->defs == df_null_ref_rec
1349 && insn_info->uses == df_null_ref_rec
1350 && insn_info->eq_uses == df_null_ref_rec
1351 && insn_info->mw_hardregs == df_null_mw_rec)
1352 return false;
1353
1354 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1355
1356 if (df_chain)
1357 {
1358 df_ref_chain_delete_du_chain (insn_info->defs);
1359 df_ref_chain_delete_du_chain (insn_info->uses);
1360 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1361 }
1362
1363 df_ref_chain_delete (insn_info->defs);
1364 df_ref_chain_delete (insn_info->uses);
1365 df_ref_chain_delete (insn_info->eq_uses);
1366 df_scan_free_mws_vec (insn_info->mw_hardregs);
1367
1368 insn_info->defs = df_null_ref_rec;
1369 insn_info->uses = df_null_ref_rec;
1370 insn_info->eq_uses = df_null_ref_rec;
1371 insn_info->mw_hardregs = df_null_mw_rec;
1372
1373 return true;
1374 }
1375
1376
1377 /* Rescan all of the insns in the function. Note that the artificial
1378 uses and defs are not touched. This function will destroy def-use
1379 or use-def chains. */
1380
1381 void
1382 df_insn_rescan_all (void)
1383 {
1384 bool no_insn_rescan = false;
1385 bool defer_insn_rescan = false;
1386 basic_block bb;
1387 bitmap_iterator bi;
1388 unsigned int uid;
1389 bitmap_head tmp;
1390
1391 bitmap_initialize (&tmp, &df_bitmap_obstack);
1392
1393 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1394 {
1395 df_clear_flags (DF_NO_INSN_RESCAN);
1396 no_insn_rescan = true;
1397 }
1398
1399 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1400 {
1401 df_clear_flags (DF_DEFER_INSN_RESCAN);
1402 defer_insn_rescan = true;
1403 }
1404
1405 bitmap_copy (&tmp, &df->insns_to_delete);
1406 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1407 {
1408 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1409 if (insn_info)
1410 df_insn_info_delete (uid);
1411 }
1412
1413 bitmap_clear (&tmp);
1414 bitmap_clear (&df->insns_to_delete);
1415 bitmap_clear (&df->insns_to_rescan);
1416 bitmap_clear (&df->insns_to_notes_rescan);
1417
1418 FOR_EACH_BB_FN (bb, cfun)
1419 {
1420 rtx insn;
1421 FOR_BB_INSNS (bb, insn)
1422 {
1423 df_insn_rescan (insn);
1424 }
1425 }
1426
1427 if (no_insn_rescan)
1428 df_set_flags (DF_NO_INSN_RESCAN);
1429 if (defer_insn_rescan)
1430 df_set_flags (DF_DEFER_INSN_RESCAN);
1431 }
1432
1433
1434 /* Process all of the deferred rescans or deletions. */
1435
1436 void
1437 df_process_deferred_rescans (void)
1438 {
1439 bool no_insn_rescan = false;
1440 bool defer_insn_rescan = false;
1441 bitmap_iterator bi;
1442 unsigned int uid;
1443 bitmap_head tmp;
1444
1445 bitmap_initialize (&tmp, &df_bitmap_obstack);
1446
1447 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1448 {
1449 df_clear_flags (DF_NO_INSN_RESCAN);
1450 no_insn_rescan = true;
1451 }
1452
1453 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1454 {
1455 df_clear_flags (DF_DEFER_INSN_RESCAN);
1456 defer_insn_rescan = true;
1457 }
1458
1459 if (dump_file)
1460 fprintf (dump_file, "starting the processing of deferred insns\n");
1461
1462 bitmap_copy (&tmp, &df->insns_to_delete);
1463 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1464 {
1465 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1466 if (insn_info)
1467 df_insn_info_delete (uid);
1468 }
1469
1470 bitmap_copy (&tmp, &df->insns_to_rescan);
1471 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1472 {
1473 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1474 if (insn_info)
1475 df_insn_rescan (insn_info->insn);
1476 }
1477
1478 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1479 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1480 {
1481 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1482 if (insn_info)
1483 df_notes_rescan (insn_info->insn);
1484 }
1485
1486 if (dump_file)
1487 fprintf (dump_file, "ending the processing of deferred insns\n");
1488
1489 bitmap_clear (&tmp);
1490 bitmap_clear (&df->insns_to_delete);
1491 bitmap_clear (&df->insns_to_rescan);
1492 bitmap_clear (&df->insns_to_notes_rescan);
1493
1494 if (no_insn_rescan)
1495 df_set_flags (DF_NO_INSN_RESCAN);
1496 if (defer_insn_rescan)
1497 df_set_flags (DF_DEFER_INSN_RESCAN);
1498
1499 /* If someone changed regs_ever_live during this pass, fix up the
1500 entry and exit blocks. */
1501 if (df->redo_entry_and_exit)
1502 {
1503 df_update_entry_exit_and_calls ();
1504 df->redo_entry_and_exit = false;
1505 }
1506 }
1507
1508
1509 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1510 the uses if INCLUDE_USES. Include the eq_uses if
1511 INCLUDE_EQ_USES. */
1512
1513 static unsigned int
1514 df_count_refs (bool include_defs, bool include_uses,
1515 bool include_eq_uses)
1516 {
1517 unsigned int regno;
1518 int size = 0;
1519 unsigned int m = df->regs_inited;
1520
1521 for (regno = 0; regno < m; regno++)
1522 {
1523 if (include_defs)
1524 size += DF_REG_DEF_COUNT (regno);
1525 if (include_uses)
1526 size += DF_REG_USE_COUNT (regno);
1527 if (include_eq_uses)
1528 size += DF_REG_EQ_USE_COUNT (regno);
1529 }
1530 return size;
1531 }
1532
1533
1534 /* Take build ref table for either the uses or defs from the reg-use
1535 or reg-def chains. This version processes the refs in reg order
1536 which is likely to be best if processing the whole function. */
1537
1538 static void
1539 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1540 bool include_defs,
1541 bool include_uses,
1542 bool include_eq_uses)
1543 {
1544 unsigned int m = df->regs_inited;
1545 unsigned int regno;
1546 unsigned int offset = 0;
1547 unsigned int start;
1548
1549 if (df->changeable_flags & DF_NO_HARD_REGS)
1550 {
1551 start = FIRST_PSEUDO_REGISTER;
1552 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1553 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1554 }
1555 else
1556 start = 0;
1557
1558 ref_info->total_size
1559 = df_count_refs (include_defs, include_uses, include_eq_uses);
1560
1561 df_check_and_grow_ref_info (ref_info, 1);
1562
1563 for (regno = start; regno < m; regno++)
1564 {
1565 int count = 0;
1566 ref_info->begin[regno] = offset;
1567 if (include_defs)
1568 {
1569 df_ref ref = DF_REG_DEF_CHAIN (regno);
1570 while (ref)
1571 {
1572 ref_info->refs[offset] = ref;
1573 DF_REF_ID (ref) = offset++;
1574 count++;
1575 ref = DF_REF_NEXT_REG (ref);
1576 gcc_checking_assert (offset < ref_info->refs_size);
1577 }
1578 }
1579 if (include_uses)
1580 {
1581 df_ref ref = DF_REG_USE_CHAIN (regno);
1582 while (ref)
1583 {
1584 ref_info->refs[offset] = ref;
1585 DF_REF_ID (ref) = offset++;
1586 count++;
1587 ref = DF_REF_NEXT_REG (ref);
1588 gcc_checking_assert (offset < ref_info->refs_size);
1589 }
1590 }
1591 if (include_eq_uses)
1592 {
1593 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1594 while (ref)
1595 {
1596 ref_info->refs[offset] = ref;
1597 DF_REF_ID (ref) = offset++;
1598 count++;
1599 ref = DF_REF_NEXT_REG (ref);
1600 gcc_checking_assert (offset < ref_info->refs_size);
1601 }
1602 }
1603 ref_info->count[regno] = count;
1604 }
1605
1606 /* The bitmap size is not decremented when refs are deleted. So
1607 reset it now that we have squished out all of the empty
1608 slots. */
1609 ref_info->table_size = offset;
1610 }
1611
1612
1613 /* Take build ref table for either the uses or defs from the reg-use
1614 or reg-def chains. This version processes the refs in insn order
1615 which is likely to be best if processing some segment of the
1616 function. */
1617
1618 static void
1619 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1620 bool include_defs,
1621 bool include_uses,
1622 bool include_eq_uses)
1623 {
1624 bitmap_iterator bi;
1625 unsigned int bb_index;
1626 unsigned int m = df->regs_inited;
1627 unsigned int offset = 0;
1628 unsigned int r;
1629 unsigned int start
1630 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1631
1632 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1633 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1634
1635 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1636 df_check_and_grow_ref_info (ref_info, 1);
1637
1638 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1639 {
1640 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1641 rtx insn;
1642 df_ref *ref_rec;
1643
1644 if (include_defs)
1645 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1646 {
1647 unsigned int regno = DF_REF_REGNO (*ref_rec);
1648 ref_info->count[regno]++;
1649 }
1650 if (include_uses)
1651 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1652 {
1653 unsigned int regno = DF_REF_REGNO (*ref_rec);
1654 ref_info->count[regno]++;
1655 }
1656
1657 FOR_BB_INSNS (bb, insn)
1658 {
1659 if (INSN_P (insn))
1660 {
1661 unsigned int uid = INSN_UID (insn);
1662
1663 if (include_defs)
1664 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1665 {
1666 unsigned int regno = DF_REF_REGNO (*ref_rec);
1667 ref_info->count[regno]++;
1668 }
1669 if (include_uses)
1670 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1671 {
1672 unsigned int regno = DF_REF_REGNO (*ref_rec);
1673 ref_info->count[regno]++;
1674 }
1675 if (include_eq_uses)
1676 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1677 {
1678 unsigned int regno = DF_REF_REGNO (*ref_rec);
1679 ref_info->count[regno]++;
1680 }
1681 }
1682 }
1683 }
1684
1685 for (r = start; r < m; r++)
1686 {
1687 ref_info->begin[r] = offset;
1688 offset += ref_info->count[r];
1689 ref_info->count[r] = 0;
1690 }
1691
1692 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1693 {
1694 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1695 rtx insn;
1696 df_ref *ref_rec;
1697
1698 if (include_defs)
1699 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1700 {
1701 df_ref ref = *ref_rec;
1702 unsigned int regno = DF_REF_REGNO (ref);
1703 if (regno >= start)
1704 {
1705 unsigned int id
1706 = ref_info->begin[regno] + ref_info->count[regno]++;
1707 DF_REF_ID (ref) = id;
1708 ref_info->refs[id] = ref;
1709 }
1710 }
1711 if (include_uses)
1712 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1713 {
1714 df_ref ref = *ref_rec;
1715 unsigned int regno = DF_REF_REGNO (ref);
1716 if (regno >= start)
1717 {
1718 unsigned int id
1719 = ref_info->begin[regno] + ref_info->count[regno]++;
1720 DF_REF_ID (ref) = id;
1721 ref_info->refs[id] = ref;
1722 }
1723 }
1724
1725 FOR_BB_INSNS (bb, insn)
1726 {
1727 if (INSN_P (insn))
1728 {
1729 unsigned int uid = INSN_UID (insn);
1730
1731 if (include_defs)
1732 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1733 {
1734 df_ref ref = *ref_rec;
1735 unsigned int regno = DF_REF_REGNO (ref);
1736 if (regno >= start)
1737 {
1738 unsigned int id
1739 = ref_info->begin[regno] + ref_info->count[regno]++;
1740 DF_REF_ID (ref) = id;
1741 ref_info->refs[id] = ref;
1742 }
1743 }
1744 if (include_uses)
1745 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1746 {
1747 df_ref ref = *ref_rec;
1748 unsigned int regno = DF_REF_REGNO (ref);
1749 if (regno >= start)
1750 {
1751 unsigned int id
1752 = ref_info->begin[regno] + ref_info->count[regno]++;
1753 DF_REF_ID (ref) = id;
1754 ref_info->refs[id] = ref;
1755 }
1756 }
1757 if (include_eq_uses)
1758 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1759 {
1760 df_ref ref = *ref_rec;
1761 unsigned int regno = DF_REF_REGNO (ref);
1762 if (regno >= start)
1763 {
1764 unsigned int id
1765 = ref_info->begin[regno] + ref_info->count[regno]++;
1766 DF_REF_ID (ref) = id;
1767 ref_info->refs[id] = ref;
1768 }
1769 }
1770 }
1771 }
1772 }
1773
1774 /* The bitmap size is not decremented when refs are deleted. So
1775 reset it now that we have squished out all of the empty
1776 slots. */
1777
1778 ref_info->table_size = offset;
1779 }
1780
1781 /* Take build ref table for either the uses or defs from the reg-use
1782 or reg-def chains. */
1783
1784 static void
1785 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1786 bool include_defs,
1787 bool include_uses,
1788 bool include_eq_uses)
1789 {
1790 if (df->analyze_subset)
1791 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1792 include_uses, include_eq_uses);
1793 else
1794 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1795 include_uses, include_eq_uses);
1796 }
1797
1798
1799 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1800 static unsigned int
1801 df_add_refs_to_table (unsigned int offset,
1802 struct df_ref_info *ref_info,
1803 df_ref *ref_vec)
1804 {
1805 while (*ref_vec)
1806 {
1807 df_ref ref = *ref_vec;
1808 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1809 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1810 {
1811 ref_info->refs[offset] = ref;
1812 DF_REF_ID (*ref_vec) = offset++;
1813 }
1814 ref_vec++;
1815 }
1816 return offset;
1817 }
1818
1819
1820 /* Count the number of refs in all of the insns of BB. Include the
1821 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1822 eq_uses if INCLUDE_EQ_USES. */
1823
1824 static unsigned int
1825 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1826 struct df_ref_info *ref_info,
1827 bool include_defs, bool include_uses,
1828 bool include_eq_uses)
1829 {
1830 rtx insn;
1831
1832 if (include_defs)
1833 offset = df_add_refs_to_table (offset, ref_info,
1834 df_get_artificial_defs (bb->index));
1835 if (include_uses)
1836 offset = df_add_refs_to_table (offset, ref_info,
1837 df_get_artificial_uses (bb->index));
1838
1839 FOR_BB_INSNS (bb, insn)
1840 if (INSN_P (insn))
1841 {
1842 unsigned int uid = INSN_UID (insn);
1843 if (include_defs)
1844 offset = df_add_refs_to_table (offset, ref_info,
1845 DF_INSN_UID_DEFS (uid));
1846 if (include_uses)
1847 offset = df_add_refs_to_table (offset, ref_info,
1848 DF_INSN_UID_USES (uid));
1849 if (include_eq_uses)
1850 offset = df_add_refs_to_table (offset, ref_info,
1851 DF_INSN_UID_EQ_USES (uid));
1852 }
1853 return offset;
1854 }
1855
1856
1857 /* Organize the refs by insn into the table in REF_INFO. If
1858 blocks_to_analyze is defined, use that set, otherwise the entire
1859 program. Include the defs if INCLUDE_DEFS. Include the uses if
1860 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1861
1862 static void
1863 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1864 bool include_defs, bool include_uses,
1865 bool include_eq_uses)
1866 {
1867 basic_block bb;
1868 unsigned int offset = 0;
1869
1870 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1871 df_check_and_grow_ref_info (ref_info, 1);
1872 if (df->blocks_to_analyze)
1873 {
1874 bitmap_iterator bi;
1875 unsigned int index;
1876
1877 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1878 {
1879 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1880 index),
1881 offset, ref_info,
1882 include_defs, include_uses,
1883 include_eq_uses);
1884 }
1885
1886 ref_info->table_size = offset;
1887 }
1888 else
1889 {
1890 FOR_ALL_BB_FN (bb, cfun)
1891 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1892 include_defs, include_uses,
1893 include_eq_uses);
1894 ref_info->table_size = offset;
1895 }
1896 }
1897
1898
1899 /* If the use refs in DF are not organized, reorganize them. */
1900
1901 void
1902 df_maybe_reorganize_use_refs (enum df_ref_order order)
1903 {
1904 if (order == df->use_info.ref_order)
1905 return;
1906
1907 switch (order)
1908 {
1909 case DF_REF_ORDER_BY_REG:
1910 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1911 break;
1912
1913 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1914 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1915 break;
1916
1917 case DF_REF_ORDER_BY_INSN:
1918 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1919 break;
1920
1921 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1922 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1923 break;
1924
1925 case DF_REF_ORDER_NO_TABLE:
1926 free (df->use_info.refs);
1927 df->use_info.refs = NULL;
1928 df->use_info.refs_size = 0;
1929 break;
1930
1931 case DF_REF_ORDER_UNORDERED:
1932 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1933 gcc_unreachable ();
1934 break;
1935 }
1936
1937 df->use_info.ref_order = order;
1938 }
1939
1940
1941 /* If the def refs in DF are not organized, reorganize them. */
1942
1943 void
1944 df_maybe_reorganize_def_refs (enum df_ref_order order)
1945 {
1946 if (order == df->def_info.ref_order)
1947 return;
1948
1949 switch (order)
1950 {
1951 case DF_REF_ORDER_BY_REG:
1952 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1953 break;
1954
1955 case DF_REF_ORDER_BY_INSN:
1956 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1957 break;
1958
1959 case DF_REF_ORDER_NO_TABLE:
1960 free (df->def_info.refs);
1961 df->def_info.refs = NULL;
1962 df->def_info.refs_size = 0;
1963 break;
1964
1965 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1966 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1967 case DF_REF_ORDER_UNORDERED:
1968 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1969 gcc_unreachable ();
1970 break;
1971 }
1972
1973 df->def_info.ref_order = order;
1974 }
1975
1976
1977 /* Change all of the basic block references in INSN to use the insn's
1978 current basic block. This function is called from routines that move
1979 instructions from one block to another. */
1980
1981 void
1982 df_insn_change_bb (rtx insn, basic_block new_bb)
1983 {
1984 basic_block old_bb = BLOCK_FOR_INSN (insn);
1985 struct df_insn_info *insn_info;
1986 unsigned int uid = INSN_UID (insn);
1987
1988 if (old_bb == new_bb)
1989 return;
1990
1991 set_block_for_insn (insn, new_bb);
1992
1993 if (!df)
1994 return;
1995
1996 if (dump_file)
1997 fprintf (dump_file, "changing bb of uid %d\n", uid);
1998
1999 insn_info = DF_INSN_UID_SAFE_GET (uid);
2000 if (insn_info == NULL)
2001 {
2002 if (dump_file)
2003 fprintf (dump_file, " unscanned insn\n");
2004 df_insn_rescan (insn);
2005 return;
2006 }
2007
2008 if (!INSN_P (insn))
2009 return;
2010
2011 df_set_bb_dirty (new_bb);
2012 if (old_bb)
2013 {
2014 if (dump_file)
2015 fprintf (dump_file, " from %d to %d\n",
2016 old_bb->index, new_bb->index);
2017 df_set_bb_dirty (old_bb);
2018 }
2019 else
2020 if (dump_file)
2021 fprintf (dump_file, " to %d\n", new_bb->index);
2022 }
2023
2024
2025 /* Helper function for df_ref_change_reg_with_loc. */
2026
2027 static void
2028 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2029 struct df_reg_info *new_df,
2030 int new_regno, rtx loc)
2031 {
2032 df_ref the_ref = old_df->reg_chain;
2033
2034 while (the_ref)
2035 {
2036 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2037 && DF_REF_LOC (the_ref)
2038 && (*DF_REF_LOC (the_ref) == loc))
2039 {
2040 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2041 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2042 df_ref *ref_vec, *ref_vec_t;
2043 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2044 unsigned int count = 0;
2045
2046 DF_REF_REGNO (the_ref) = new_regno;
2047 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2048
2049 /* Pull the_ref out of the old regno chain. */
2050 if (prev_ref)
2051 DF_REF_NEXT_REG (prev_ref) = next_ref;
2052 else
2053 old_df->reg_chain = next_ref;
2054 if (next_ref)
2055 DF_REF_PREV_REG (next_ref) = prev_ref;
2056 old_df->n_refs--;
2057
2058 /* Put the ref into the new regno chain. */
2059 DF_REF_PREV_REG (the_ref) = NULL;
2060 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2061 if (new_df->reg_chain)
2062 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2063 new_df->reg_chain = the_ref;
2064 new_df->n_refs++;
2065 if (DF_REF_BB (the_ref))
2066 df_set_bb_dirty (DF_REF_BB (the_ref));
2067
2068 /* Need to sort the record again that the ref was in because
2069 the regno is a sorting key. First, find the right
2070 record. */
2071 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2072 ref_vec = insn_info->eq_uses;
2073 else
2074 ref_vec = insn_info->uses;
2075 if (dump_file)
2076 fprintf (dump_file, "changing reg in insn %d\n",
2077 DF_REF_INSN_UID (the_ref));
2078
2079 ref_vec_t = ref_vec;
2080
2081 /* Find the length. */
2082 while (*ref_vec_t)
2083 {
2084 count++;
2085 ref_vec_t++;
2086 }
2087 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2088
2089 the_ref = next_ref;
2090 }
2091 else
2092 the_ref = DF_REF_NEXT_REG (the_ref);
2093 }
2094 }
2095
2096
2097 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2098 NEW_REGNO. Refs that do not match LOC are not changed which means
2099 that artificial refs are not changed since they have no loc. This
2100 call is to support the SET_REGNO macro. */
2101
2102 void
2103 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2104 {
2105 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2106 return;
2107
2108 df_grow_reg_info ();
2109
2110 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2111 DF_REG_DEF_GET (new_regno), new_regno, loc);
2112 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2113 DF_REG_USE_GET (new_regno), new_regno, loc);
2114 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2115 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2116 }
2117
2118
2119 /* Delete the mw_hardregs that point into the eq_notes. */
2120
2121 static unsigned int
2122 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2123 {
2124 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2125 unsigned int deleted = 0;
2126 unsigned int count = 0;
2127 struct df_scan_problem_data *problem_data
2128 = (struct df_scan_problem_data *) df_scan->problem_data;
2129
2130 if (!*mw_vec)
2131 return 0;
2132
2133 while (*mw_vec)
2134 {
2135 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2136 {
2137 struct df_mw_hardreg **temp_vec = mw_vec;
2138
2139 pool_free (problem_data->mw_reg_pool, *mw_vec);
2140 temp_vec = mw_vec;
2141 /* Shove the remaining ones down one to fill the gap. While
2142 this looks n**2, it is highly unusual to have any mw regs
2143 in eq_notes and the chances of more than one are almost
2144 non existent. */
2145 while (*temp_vec)
2146 {
2147 *temp_vec = *(temp_vec + 1);
2148 temp_vec++;
2149 }
2150 deleted++;
2151 }
2152 else
2153 {
2154 mw_vec++;
2155 count++;
2156 }
2157 }
2158
2159 if (count == 0)
2160 {
2161 df_scan_free_mws_vec (insn_info->mw_hardregs);
2162 insn_info->mw_hardregs = df_null_mw_rec;
2163 return 0;
2164 }
2165 return deleted;
2166 }
2167
2168
2169 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2170
2171 void
2172 df_notes_rescan (rtx insn)
2173 {
2174 struct df_insn_info *insn_info;
2175 unsigned int uid = INSN_UID (insn);
2176
2177 if (!df)
2178 return;
2179
2180 /* The client has disabled rescanning and plans to do it itself. */
2181 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2182 return;
2183
2184 /* Do nothing if the insn hasn't been emitted yet. */
2185 if (!BLOCK_FOR_INSN (insn))
2186 return;
2187
2188 df_grow_bb_info (df_scan);
2189 df_grow_reg_info ();
2190
2191 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
2192
2193 /* The client has deferred rescanning. */
2194 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2195 {
2196 if (!insn_info)
2197 {
2198 insn_info = df_insn_create_insn_record (insn);
2199 insn_info->defs = df_null_ref_rec;
2200 insn_info->uses = df_null_ref_rec;
2201 insn_info->eq_uses = df_null_ref_rec;
2202 insn_info->mw_hardregs = df_null_mw_rec;
2203 }
2204
2205 bitmap_clear_bit (&df->insns_to_delete, uid);
2206 /* If the insn is set to be rescanned, it does not need to also
2207 be notes rescanned. */
2208 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
2209 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2210 return;
2211 }
2212
2213 bitmap_clear_bit (&df->insns_to_delete, uid);
2214 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2215
2216 if (insn_info)
2217 {
2218 basic_block bb = BLOCK_FOR_INSN (insn);
2219 rtx note;
2220 struct df_collection_rec collection_rec;
2221 unsigned int num_deleted;
2222 unsigned int mw_len;
2223
2224 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2225 df_ref_chain_delete (insn_info->eq_uses);
2226 insn_info->eq_uses = NULL;
2227
2228 /* Process REG_EQUIV/REG_EQUAL notes */
2229 for (note = REG_NOTES (insn); note;
2230 note = XEXP (note, 1))
2231 {
2232 switch (REG_NOTE_KIND (note))
2233 {
2234 case REG_EQUIV:
2235 case REG_EQUAL:
2236 df_uses_record (&collection_rec,
2237 &XEXP (note, 0), DF_REF_REG_USE,
2238 bb, insn_info, DF_REF_IN_NOTE);
2239 default:
2240 break;
2241 }
2242 }
2243
2244 /* Find some place to put any new mw_hardregs. */
2245 df_canonize_collection_rec (&collection_rec);
2246 mw_len = collection_rec.mw_vec.length ();
2247 if (mw_len)
2248 {
2249 unsigned int count = 0;
2250 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2251 while (*mw_rec)
2252 {
2253 count++;
2254 mw_rec++;
2255 }
2256
2257 if (count)
2258 {
2259 /* Append to the end of the existing record after
2260 expanding it if necessary. */
2261 if (mw_len > num_deleted)
2262 {
2263 insn_info->mw_hardregs =
2264 XRESIZEVEC (struct df_mw_hardreg *,
2265 insn_info->mw_hardregs,
2266 count + 1 + mw_len);
2267 }
2268 memcpy (&insn_info->mw_hardregs[count],
2269 collection_rec.mw_vec.address (),
2270 mw_len * sizeof (struct df_mw_hardreg *));
2271 insn_info->mw_hardregs[count + mw_len] = NULL;
2272 qsort (insn_info->mw_hardregs, count + mw_len,
2273 sizeof (struct df_mw_hardreg *), df_mw_compare);
2274 }
2275 else
2276 {
2277 /* No vector there. */
2278 insn_info->mw_hardregs
2279 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2280 memcpy (insn_info->mw_hardregs,
2281 collection_rec.mw_vec.address (),
2282 mw_len * sizeof (struct df_mw_hardreg *));
2283 insn_info->mw_hardregs[mw_len] = NULL;
2284 }
2285 }
2286 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2287 }
2288 else
2289 df_insn_rescan (insn);
2290
2291 }
2292
2293 \f
2294 /*----------------------------------------------------------------------------
2295 Hard core instruction scanning code. No external interfaces here,
2296 just a lot of routines that look inside insns.
2297 ----------------------------------------------------------------------------*/
2298
2299
2300 /* Return true if the contents of two df_ref's are identical.
2301 It ignores DF_REF_MARKER. */
2302
2303 static bool
2304 df_ref_equal_p (df_ref ref1, df_ref ref2)
2305 {
2306 if (!ref2)
2307 return false;
2308
2309 if (ref1 == ref2)
2310 return true;
2311
2312 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2313 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2314 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2315 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2316 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2317 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2318 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2319 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2320 return false;
2321
2322 switch (DF_REF_CLASS (ref1))
2323 {
2324 case DF_REF_ARTIFICIAL:
2325 case DF_REF_BASE:
2326 return true;
2327
2328 case DF_REF_REGULAR:
2329 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2330
2331 default:
2332 gcc_unreachable ();
2333 }
2334 return false;
2335 }
2336
2337
2338 /* Compare REF1 and REF2 for sorting. This is only called from places
2339 where all of the refs are of the same type, in the same insn, and
2340 have the same bb. So these fields are not checked. */
2341
2342 static int
2343 df_ref_compare (const void *r1, const void *r2)
2344 {
2345 const df_ref ref1 = *(const df_ref *)r1;
2346 const df_ref ref2 = *(const df_ref *)r2;
2347
2348 if (ref1 == ref2)
2349 return 0;
2350
2351 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2352 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2353
2354 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2355 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2356
2357 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2358 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2359
2360 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2361 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2362
2363 /* Cannot look at the LOC field on artificial refs. */
2364 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2365 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2366 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2367
2368 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2369 {
2370 /* If two refs are identical except that one of them has is from
2371 a mw and one is not, we need to have the one with the mw
2372 first. */
2373 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2374 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2375 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2376 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2377 return -1;
2378 else
2379 return 1;
2380 }
2381
2382 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2383 }
2384
2385 static void
2386 df_swap_refs (vec<df_ref, va_heap> *ref_vec, int i, int j)
2387 {
2388 df_ref tmp = (*ref_vec)[i];
2389 (*ref_vec)[i] = (*ref_vec)[j];
2390 (*ref_vec)[j] = tmp;
2391 }
2392
2393 /* Sort and compress a set of refs. */
2394
2395 static void
2396 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2397 {
2398 unsigned int count;
2399 unsigned int i;
2400 unsigned int dist = 0;
2401
2402 count = ref_vec->length ();
2403
2404 /* If there are 1 or 0 elements, there is nothing to do. */
2405 if (count < 2)
2406 return;
2407 else if (count == 2)
2408 {
2409 df_ref r0 = (*ref_vec)[0];
2410 df_ref r1 = (*ref_vec)[1];
2411 if (df_ref_compare (&r0, &r1) > 0)
2412 df_swap_refs (ref_vec, 0, 1);
2413 }
2414 else
2415 {
2416 for (i = 0; i < count - 1; i++)
2417 {
2418 df_ref r0 = (*ref_vec)[i];
2419 df_ref r1 = (*ref_vec)[i + 1];
2420 if (df_ref_compare (&r0, &r1) >= 0)
2421 break;
2422 }
2423 /* If the array is already strictly ordered,
2424 which is the most common case for large COUNT case
2425 (which happens for CALL INSNs),
2426 no need to sort and filter out duplicate.
2427 Simply return the count.
2428 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2429 of DF_REF_COMPARE. */
2430 if (i == count - 1)
2431 return;
2432 ref_vec->qsort (df_ref_compare);
2433 }
2434
2435 for (i=0; i<count-dist; i++)
2436 {
2437 /* Find the next ref that is not equal to the current ref. */
2438 while (i + dist + 1 < count
2439 && df_ref_equal_p ((*ref_vec)[i],
2440 (*ref_vec)[i + dist + 1]))
2441 {
2442 df_free_ref ((*ref_vec)[i + dist + 1]);
2443 dist++;
2444 }
2445 /* Copy it down to the next position. */
2446 if (dist && i + dist + 1 < count)
2447 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2448 }
2449
2450 count -= dist;
2451 ref_vec->truncate (count);
2452 }
2453
2454
2455 /* Return true if the contents of two df_ref's are identical.
2456 It ignores DF_REF_MARKER. */
2457
2458 static bool
2459 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2460 {
2461 if (!mw2)
2462 return false;
2463 return (mw1 == mw2) ||
2464 (mw1->mw_reg == mw2->mw_reg
2465 && mw1->type == mw2->type
2466 && mw1->flags == mw2->flags
2467 && mw1->start_regno == mw2->start_regno
2468 && mw1->end_regno == mw2->end_regno);
2469 }
2470
2471
2472 /* Compare MW1 and MW2 for sorting. */
2473
2474 static int
2475 df_mw_compare (const void *m1, const void *m2)
2476 {
2477 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2478 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2479
2480 if (mw1 == mw2)
2481 return 0;
2482
2483 if (mw1->type != mw2->type)
2484 return mw1->type - mw2->type;
2485
2486 if (mw1->flags != mw2->flags)
2487 return mw1->flags - mw2->flags;
2488
2489 if (mw1->start_regno != mw2->start_regno)
2490 return mw1->start_regno - mw2->start_regno;
2491
2492 if (mw1->end_regno != mw2->end_regno)
2493 return mw1->end_regno - mw2->end_regno;
2494
2495 if (mw1->mw_reg != mw2->mw_reg)
2496 return mw1->mw_order - mw2->mw_order;
2497
2498 return 0;
2499 }
2500
2501
2502 /* Sort and compress a set of refs. */
2503
2504 static void
2505 df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
2506 {
2507 unsigned int count;
2508 struct df_scan_problem_data *problem_data
2509 = (struct df_scan_problem_data *) df_scan->problem_data;
2510 unsigned int i;
2511 unsigned int dist = 0;
2512
2513 count = mw_vec->length ();
2514 if (count < 2)
2515 return;
2516 else if (count == 2)
2517 {
2518 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2519 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2520 if (df_mw_compare (&m0, &m1) > 0)
2521 {
2522 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2523 (*mw_vec)[0] = (*mw_vec)[1];
2524 (*mw_vec)[1] = tmp;
2525 }
2526 }
2527 else
2528 mw_vec->qsort (df_mw_compare);
2529
2530 for (i=0; i<count-dist; i++)
2531 {
2532 /* Find the next ref that is not equal to the current ref. */
2533 while (i + dist + 1 < count
2534 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2535 {
2536 pool_free (problem_data->mw_reg_pool,
2537 (*mw_vec)[i + dist + 1]);
2538 dist++;
2539 }
2540 /* Copy it down to the next position. */
2541 if (dist && i + dist + 1 < count)
2542 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2543 }
2544
2545 count -= dist;
2546 mw_vec->truncate (count);
2547 }
2548
2549
2550 /* Sort and remove duplicates from the COLLECTION_REC. */
2551
2552 static void
2553 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2554 {
2555 df_sort_and_compress_refs (&collection_rec->def_vec);
2556 df_sort_and_compress_refs (&collection_rec->use_vec);
2557 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2558 df_sort_and_compress_mws (&collection_rec->mw_vec);
2559 }
2560
2561
2562 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2563
2564 static void
2565 df_install_ref (df_ref this_ref,
2566 struct df_reg_info *reg_info,
2567 struct df_ref_info *ref_info,
2568 bool add_to_table)
2569 {
2570 unsigned int regno = DF_REF_REGNO (this_ref);
2571 /* Add the ref to the reg_{def,use,eq_use} chain. */
2572 df_ref head = reg_info->reg_chain;
2573
2574 reg_info->reg_chain = this_ref;
2575 reg_info->n_refs++;
2576
2577 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2578 {
2579 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2580 df->hard_regs_live_count[regno]++;
2581 }
2582
2583 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2584 && DF_REF_PREV_REG (this_ref) == NULL);
2585
2586 DF_REF_NEXT_REG (this_ref) = head;
2587
2588 /* We cannot actually link to the head of the chain. */
2589 DF_REF_PREV_REG (this_ref) = NULL;
2590
2591 if (head)
2592 DF_REF_PREV_REG (head) = this_ref;
2593
2594 if (add_to_table)
2595 {
2596 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2597 df_check_and_grow_ref_info (ref_info, 1);
2598 DF_REF_ID (this_ref) = ref_info->table_size;
2599 /* Add the ref to the big array of defs. */
2600 ref_info->refs[ref_info->table_size] = this_ref;
2601 ref_info->table_size++;
2602 }
2603 else
2604 DF_REF_ID (this_ref) = -1;
2605
2606 ref_info->total_size++;
2607 }
2608
2609
2610 /* This function takes one of the groups of refs (defs, uses or
2611 eq_uses) and installs the entire group into the insn. It also adds
2612 each of these refs into the appropriate chains. */
2613
2614 static df_ref *
2615 df_install_refs (basic_block bb,
2616 const vec<df_ref, va_heap> *old_vec,
2617 struct df_reg_info **reg_info,
2618 struct df_ref_info *ref_info,
2619 bool is_notes)
2620 {
2621 unsigned int count = old_vec->length ();
2622 if (count)
2623 {
2624 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2625 bool add_to_table;
2626 df_ref this_ref;
2627 unsigned int ix;
2628
2629 switch (ref_info->ref_order)
2630 {
2631 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2632 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2633 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2634 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2635 add_to_table = true;
2636 break;
2637 case DF_REF_ORDER_UNORDERED:
2638 case DF_REF_ORDER_BY_REG:
2639 case DF_REF_ORDER_BY_INSN:
2640 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2641 add_to_table = !is_notes;
2642 break;
2643 default:
2644 add_to_table = false;
2645 break;
2646 }
2647
2648 /* Do not add if ref is not in the right blocks. */
2649 if (add_to_table && df->analyze_subset)
2650 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2651
2652 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2653 {
2654 new_vec[ix] = this_ref;
2655 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2656 ref_info, add_to_table);
2657 }
2658
2659 new_vec[count] = NULL;
2660 return new_vec;
2661 }
2662 else
2663 return df_null_ref_rec;
2664 }
2665
2666
2667 /* This function takes the mws installs the entire group into the
2668 insn. */
2669
2670 static struct df_mw_hardreg **
2671 df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
2672 {
2673 unsigned int count = old_vec->length ();
2674 if (count)
2675 {
2676 struct df_mw_hardreg **new_vec
2677 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2678 memcpy (new_vec, old_vec->address (),
2679 sizeof (struct df_mw_hardreg*) * count);
2680 new_vec[count] = NULL;
2681 return new_vec;
2682 }
2683 else
2684 return df_null_mw_rec;
2685 }
2686
2687
2688 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2689 chains and update other necessary information. */
2690
2691 static void
2692 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2693 basic_block bb, rtx insn, unsigned int flags)
2694 {
2695 if (insn)
2696 {
2697 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2698 /* If there is a vector in the collection rec, add it to the
2699 insn. A null rec is a signal that the caller will handle the
2700 chain specially. */
2701 if (flags & copy_defs)
2702 {
2703 df_scan_free_ref_vec (insn_rec->defs);
2704 insn_rec->defs
2705 = df_install_refs (bb, &collection_rec->def_vec,
2706 df->def_regs,
2707 &df->def_info, false);
2708 }
2709 if (flags & copy_uses)
2710 {
2711 df_scan_free_ref_vec (insn_rec->uses);
2712 insn_rec->uses
2713 = df_install_refs (bb, &collection_rec->use_vec,
2714 df->use_regs,
2715 &df->use_info, false);
2716 }
2717 if (flags & copy_eq_uses)
2718 {
2719 df_scan_free_ref_vec (insn_rec->eq_uses);
2720 insn_rec->eq_uses
2721 = df_install_refs (bb, &collection_rec->eq_use_vec,
2722 df->eq_use_regs,
2723 &df->use_info, true);
2724 }
2725 if (flags & copy_mw)
2726 {
2727 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2728 insn_rec->mw_hardregs
2729 = df_install_mws (&collection_rec->mw_vec);
2730 }
2731 }
2732 else
2733 {
2734 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2735
2736 df_scan_free_ref_vec (bb_info->artificial_defs);
2737 bb_info->artificial_defs
2738 = df_install_refs (bb, &collection_rec->def_vec,
2739 df->def_regs,
2740 &df->def_info, false);
2741 df_scan_free_ref_vec (bb_info->artificial_uses);
2742 bb_info->artificial_uses
2743 = df_install_refs (bb, &collection_rec->use_vec,
2744 df->use_regs,
2745 &df->use_info, false);
2746 }
2747 }
2748
2749
2750 /* Allocate a ref and initialize its fields. */
2751
2752 static df_ref
2753 df_ref_create_structure (enum df_ref_class cl,
2754 struct df_collection_rec *collection_rec,
2755 rtx reg, rtx *loc,
2756 basic_block bb, struct df_insn_info *info,
2757 enum df_ref_type ref_type,
2758 int ref_flags)
2759 {
2760 df_ref this_ref = NULL;
2761 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2762 struct df_scan_problem_data *problem_data
2763 = (struct df_scan_problem_data *) df_scan->problem_data;
2764
2765 switch (cl)
2766 {
2767 case DF_REF_BASE:
2768 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2769 gcc_checking_assert (loc == NULL);
2770 break;
2771
2772 case DF_REF_ARTIFICIAL:
2773 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2774 this_ref->artificial_ref.bb = bb;
2775 gcc_checking_assert (loc == NULL);
2776 break;
2777
2778 case DF_REF_REGULAR:
2779 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2780 this_ref->regular_ref.loc = loc;
2781 gcc_checking_assert (loc);
2782 break;
2783 }
2784
2785 DF_REF_CLASS (this_ref) = cl;
2786 DF_REF_ID (this_ref) = -1;
2787 DF_REF_REG (this_ref) = reg;
2788 DF_REF_REGNO (this_ref) = regno;
2789 DF_REF_TYPE (this_ref) = ref_type;
2790 DF_REF_INSN_INFO (this_ref) = info;
2791 DF_REF_CHAIN (this_ref) = NULL;
2792 DF_REF_FLAGS (this_ref) = ref_flags;
2793 DF_REF_NEXT_REG (this_ref) = NULL;
2794 DF_REF_PREV_REG (this_ref) = NULL;
2795 DF_REF_ORDER (this_ref) = df->ref_order++;
2796
2797 /* We need to clear this bit because fwprop, and in the future
2798 possibly other optimizations sometimes create new refs using ond
2799 refs as the model. */
2800 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2801
2802 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2803 if (regno < FIRST_PSEUDO_REGISTER
2804 && !DF_REF_IS_ARTIFICIAL (this_ref)
2805 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2806 {
2807 if (DF_REF_REG_DEF_P (this_ref))
2808 {
2809 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2810 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2811 }
2812 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2813 && (regno == FRAME_POINTER_REGNUM
2814 || regno == ARG_POINTER_REGNUM)))
2815 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2816 }
2817
2818 if (collection_rec)
2819 {
2820 if (DF_REF_REG_DEF_P (this_ref))
2821 collection_rec->def_vec.safe_push (this_ref);
2822 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2823 collection_rec->eq_use_vec.safe_push (this_ref);
2824 else
2825 collection_rec->use_vec.safe_push (this_ref);
2826 }
2827 else
2828 df_install_ref_incremental (this_ref);
2829
2830 return this_ref;
2831 }
2832
2833
2834 /* Create new references of type DF_REF_TYPE for each part of register REG
2835 at address LOC within INSN of BB. */
2836
2837
2838 static void
2839 df_ref_record (enum df_ref_class cl,
2840 struct df_collection_rec *collection_rec,
2841 rtx reg, rtx *loc,
2842 basic_block bb, struct df_insn_info *insn_info,
2843 enum df_ref_type ref_type,
2844 int ref_flags)
2845 {
2846 unsigned int regno;
2847
2848 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2849
2850 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2851 if (regno < FIRST_PSEUDO_REGISTER)
2852 {
2853 struct df_mw_hardreg *hardreg = NULL;
2854 struct df_scan_problem_data *problem_data
2855 = (struct df_scan_problem_data *) df_scan->problem_data;
2856 unsigned int i;
2857 unsigned int endregno;
2858 df_ref ref;
2859
2860 if (GET_CODE (reg) == SUBREG)
2861 {
2862 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2863 SUBREG_BYTE (reg), GET_MODE (reg));
2864 endregno = regno + subreg_nregs (reg);
2865 }
2866 else
2867 endregno = END_HARD_REGNO (reg);
2868
2869 /* If this is a multiword hardreg, we create some extra
2870 datastructures that will enable us to easily build REG_DEAD
2871 and REG_UNUSED notes. */
2872 if (collection_rec
2873 && (endregno != regno + 1) && insn_info)
2874 {
2875 /* Sets to a subreg of a multiword register are partial.
2876 Sets to a non-subreg of a multiword register are not. */
2877 if (GET_CODE (reg) == SUBREG)
2878 ref_flags |= DF_REF_PARTIAL;
2879 ref_flags |= DF_REF_MW_HARDREG;
2880
2881 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2882 hardreg->type = ref_type;
2883 hardreg->flags = ref_flags;
2884 hardreg->mw_reg = reg;
2885 hardreg->start_regno = regno;
2886 hardreg->end_regno = endregno - 1;
2887 hardreg->mw_order = df->ref_order++;
2888 collection_rec->mw_vec.safe_push (hardreg);
2889 }
2890
2891 for (i = regno; i < endregno; i++)
2892 {
2893 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2894 bb, insn_info, ref_type, ref_flags);
2895
2896 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2897 }
2898 }
2899 else
2900 {
2901 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2902 ref_type, ref_flags);
2903 }
2904 }
2905
2906
2907 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2908 covered by the outer mode is smaller than that covered by the inner mode,
2909 is a read-modify-write operation.
2910 This function returns true iff the SUBREG X is such a SUBREG. */
2911
2912 bool
2913 df_read_modify_subreg_p (rtx x)
2914 {
2915 unsigned int isize, osize;
2916 if (GET_CODE (x) != SUBREG)
2917 return false;
2918 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2919 osize = GET_MODE_SIZE (GET_MODE (x));
2920 return isize > osize
2921 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2922 }
2923
2924
2925 /* Process all the registers defined in the rtx pointed by LOC.
2926 Autoincrement/decrement definitions will be picked up by df_uses_record.
2927 Any change here has to be matched in df_find_hard_reg_defs_1. */
2928
2929 static void
2930 df_def_record_1 (struct df_collection_rec *collection_rec,
2931 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2932 int flags)
2933 {
2934 rtx dst = *loc;
2935
2936 /* It is legal to have a set destination be a parallel. */
2937 if (GET_CODE (dst) == PARALLEL)
2938 {
2939 int i;
2940 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2941 {
2942 rtx temp = XVECEXP (dst, 0, i);
2943 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2944 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2945 bb, insn_info, flags);
2946 }
2947 return;
2948 }
2949
2950 if (GET_CODE (dst) == STRICT_LOW_PART)
2951 {
2952 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2953
2954 loc = &XEXP (dst, 0);
2955 dst = *loc;
2956 }
2957
2958 if (GET_CODE (dst) == ZERO_EXTRACT)
2959 {
2960 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2961
2962 loc = &XEXP (dst, 0);
2963 dst = *loc;
2964 }
2965
2966 /* At this point if we do not have a reg or a subreg, just return. */
2967 if (REG_P (dst))
2968 {
2969 df_ref_record (DF_REF_REGULAR, collection_rec,
2970 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2971
2972 /* We want to keep sp alive everywhere - by making all
2973 writes to sp also use of sp. */
2974 if (REGNO (dst) == STACK_POINTER_REGNUM)
2975 df_ref_record (DF_REF_BASE, collection_rec,
2976 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2977 }
2978 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2979 {
2980 if (df_read_modify_subreg_p (dst))
2981 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2982
2983 flags |= DF_REF_SUBREG;
2984
2985 df_ref_record (DF_REF_REGULAR, collection_rec,
2986 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2987 }
2988 }
2989
2990
2991 /* Process all the registers defined in the pattern rtx, X. Any change
2992 here has to be matched in df_find_hard_reg_defs. */
2993
2994 static void
2995 df_defs_record (struct df_collection_rec *collection_rec,
2996 rtx x, basic_block bb, struct df_insn_info *insn_info,
2997 int flags)
2998 {
2999 RTX_CODE code = GET_CODE (x);
3000 int i;
3001
3002 switch (code)
3003 {
3004 case SET:
3005 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
3006 break;
3007
3008 case CLOBBER:
3009 flags |= DF_REF_MUST_CLOBBER;
3010 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
3011 break;
3012
3013 case COND_EXEC:
3014 df_defs_record (collection_rec, COND_EXEC_CODE (x),
3015 bb, insn_info, DF_REF_CONDITIONAL);
3016 break;
3017
3018 case PARALLEL:
3019 for (i = 0; i < XVECLEN (x, 0); i++)
3020 df_defs_record (collection_rec, XVECEXP (x, 0, i),
3021 bb, insn_info, flags);
3022 break;
3023 default:
3024 /* No DEFs to record in other cases */
3025 break;
3026 }
3027 }
3028
3029 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
3030 destination of a set or clobber. This has to match the logic in
3031 df_defs_record_1. */
3032
3033 static void
3034 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
3035 {
3036 /* It is legal to have a set destination be a parallel. */
3037 if (GET_CODE (dst) == PARALLEL)
3038 {
3039 int i;
3040 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3041 {
3042 rtx temp = XVECEXP (dst, 0, i);
3043 gcc_assert (GET_CODE (temp) == EXPR_LIST);
3044 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
3045 }
3046 return;
3047 }
3048
3049 if (GET_CODE (dst) == STRICT_LOW_PART)
3050 dst = XEXP (dst, 0);
3051
3052 if (GET_CODE (dst) == ZERO_EXTRACT)
3053 dst = XEXP (dst, 0);
3054
3055 /* At this point if we do not have a reg or a subreg, just return. */
3056 if (REG_P (dst) && HARD_REGISTER_P (dst))
3057 SET_HARD_REG_BIT (*defs, REGNO (dst));
3058 else if (GET_CODE (dst) == SUBREG
3059 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
3060 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
3061 }
3062
3063 /* Set bits in *DEFS for hard registers defined in the pattern X. This
3064 has to match the logic in df_defs_record. */
3065
3066 static void
3067 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
3068 {
3069 RTX_CODE code = GET_CODE (x);
3070 int i;
3071
3072 switch (code)
3073 {
3074 case SET:
3075 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
3076 break;
3077
3078 case CLOBBER:
3079 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
3080 break;
3081
3082 case COND_EXEC:
3083 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
3084 break;
3085
3086 case PARALLEL:
3087 for (i = 0; i < XVECLEN (x, 0); i++)
3088 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
3089 break;
3090 default:
3091 /* No DEFs to record in other cases */
3092 break;
3093 }
3094 }
3095
3096
3097 /* Process all the registers used in the rtx at address LOC. */
3098
3099 static void
3100 df_uses_record (struct df_collection_rec *collection_rec,
3101 rtx *loc, enum df_ref_type ref_type,
3102 basic_block bb, struct df_insn_info *insn_info,
3103 int flags)
3104 {
3105 RTX_CODE code;
3106 rtx x;
3107
3108 retry:
3109 x = *loc;
3110 if (!x)
3111 return;
3112 code = GET_CODE (x);
3113 switch (code)
3114 {
3115 case LABEL_REF:
3116 case SYMBOL_REF:
3117 case CONST:
3118 CASE_CONST_ANY:
3119 case PC:
3120 case CC0:
3121 case ADDR_VEC:
3122 case ADDR_DIFF_VEC:
3123 return;
3124
3125 case CLOBBER:
3126 /* If we are clobbering a MEM, mark any registers inside the address
3127 as being used. */
3128 if (MEM_P (XEXP (x, 0)))
3129 df_uses_record (collection_rec,
3130 &XEXP (XEXP (x, 0), 0),
3131 DF_REF_REG_MEM_STORE,
3132 bb, insn_info,
3133 flags);
3134
3135 /* If we're clobbering a REG then we have a def so ignore. */
3136 return;
3137
3138 case MEM:
3139 df_uses_record (collection_rec,
3140 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3141 bb, insn_info, flags & DF_REF_IN_NOTE);
3142 return;
3143
3144 case SUBREG:
3145 /* While we're here, optimize this case. */
3146 flags |= DF_REF_PARTIAL;
3147 /* In case the SUBREG is not of a REG, do not optimize. */
3148 if (!REG_P (SUBREG_REG (x)))
3149 {
3150 loc = &SUBREG_REG (x);
3151 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3152 return;
3153 }
3154 /* ... Fall through ... */
3155
3156 case REG:
3157 df_ref_record (DF_REF_REGULAR, collection_rec,
3158 x, loc, bb, insn_info,
3159 ref_type, flags);
3160 return;
3161
3162 case SIGN_EXTRACT:
3163 case ZERO_EXTRACT:
3164 {
3165 df_uses_record (collection_rec,
3166 &XEXP (x, 1), ref_type, bb, insn_info, flags);
3167 df_uses_record (collection_rec,
3168 &XEXP (x, 2), ref_type, bb, insn_info, flags);
3169
3170 /* If the parameters to the zero or sign extract are
3171 constants, strip them off and recurse, otherwise there is
3172 no information that we can gain from this operation. */
3173 if (code == ZERO_EXTRACT)
3174 flags |= DF_REF_ZERO_EXTRACT;
3175 else
3176 flags |= DF_REF_SIGN_EXTRACT;
3177
3178 df_uses_record (collection_rec,
3179 &XEXP (x, 0), ref_type, bb, insn_info, flags);
3180 return;
3181 }
3182 break;
3183
3184 case SET:
3185 {
3186 rtx dst = SET_DEST (x);
3187 gcc_assert (!(flags & DF_REF_IN_NOTE));
3188 df_uses_record (collection_rec,
3189 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3190
3191 switch (GET_CODE (dst))
3192 {
3193 case SUBREG:
3194 if (df_read_modify_subreg_p (dst))
3195 {
3196 df_uses_record (collection_rec, &SUBREG_REG (dst),
3197 DF_REF_REG_USE, bb, insn_info,
3198 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3199 break;
3200 }
3201 /* Fall through. */
3202 case REG:
3203 case PARALLEL:
3204 case SCRATCH:
3205 case PC:
3206 case CC0:
3207 break;
3208 case MEM:
3209 df_uses_record (collection_rec, &XEXP (dst, 0),
3210 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3211 break;
3212 case STRICT_LOW_PART:
3213 {
3214 rtx *temp = &XEXP (dst, 0);
3215 /* A strict_low_part uses the whole REG and not just the
3216 SUBREG. */
3217 dst = XEXP (dst, 0);
3218 df_uses_record (collection_rec,
3219 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3220 DF_REF_REG_USE, bb, insn_info,
3221 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3222 }
3223 break;
3224 case ZERO_EXTRACT:
3225 {
3226 df_uses_record (collection_rec, &XEXP (dst, 1),
3227 DF_REF_REG_USE, bb, insn_info, flags);
3228 df_uses_record (collection_rec, &XEXP (dst, 2),
3229 DF_REF_REG_USE, bb, insn_info, flags);
3230 if (GET_CODE (XEXP (dst,0)) == MEM)
3231 df_uses_record (collection_rec, &XEXP (dst, 0),
3232 DF_REF_REG_USE, bb, insn_info,
3233 flags);
3234 else
3235 df_uses_record (collection_rec, &XEXP (dst, 0),
3236 DF_REF_REG_USE, bb, insn_info,
3237 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3238 }
3239 break;
3240
3241 default:
3242 gcc_unreachable ();
3243 }
3244 return;
3245 }
3246
3247 case RETURN:
3248 case SIMPLE_RETURN:
3249 break;
3250
3251 case ASM_OPERANDS:
3252 case UNSPEC_VOLATILE:
3253 case TRAP_IF:
3254 case ASM_INPUT:
3255 {
3256 /* Traditional and volatile asm instructions must be
3257 considered to use and clobber all hard registers, all
3258 pseudo-registers and all of memory. So must TRAP_IF and
3259 UNSPEC_VOLATILE operations.
3260
3261 Consider for instance a volatile asm that changes the fpu
3262 rounding mode. An insn should not be moved across this
3263 even if it only uses pseudo-regs because it might give an
3264 incorrectly rounded result.
3265
3266 However, flow.c's liveness computation did *not* do this,
3267 giving the reasoning as " ?!? Unfortunately, marking all
3268 hard registers as live causes massive problems for the
3269 register allocator and marking all pseudos as live creates
3270 mountains of uninitialized variable warnings."
3271
3272 In order to maintain the status quo with regard to liveness
3273 and uses, we do what flow.c did and just mark any regs we
3274 can find in ASM_OPERANDS as used. In global asm insns are
3275 scanned and regs_asm_clobbered is filled out.
3276
3277 For all ASM_OPERANDS, we must traverse the vector of input
3278 operands. We can not just fall through here since then we
3279 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3280 which do not indicate traditional asms unlike their normal
3281 usage. */
3282 if (code == ASM_OPERANDS)
3283 {
3284 int j;
3285
3286 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3287 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3288 DF_REF_REG_USE, bb, insn_info, flags);
3289 return;
3290 }
3291 break;
3292 }
3293
3294 case VAR_LOCATION:
3295 df_uses_record (collection_rec,
3296 &PAT_VAR_LOCATION_LOC (x),
3297 DF_REF_REG_USE, bb, insn_info, flags);
3298 return;
3299
3300 case PRE_DEC:
3301 case POST_DEC:
3302 case PRE_INC:
3303 case POST_INC:
3304 case PRE_MODIFY:
3305 case POST_MODIFY:
3306 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3307 /* Catch the def of the register being modified. */
3308 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3309 bb, insn_info,
3310 DF_REF_REG_DEF,
3311 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3312
3313 /* ... Fall through to handle uses ... */
3314
3315 default:
3316 break;
3317 }
3318
3319 /* Recursively scan the operands of this expression. */
3320 {
3321 const char *fmt = GET_RTX_FORMAT (code);
3322 int i;
3323
3324 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3325 {
3326 if (fmt[i] == 'e')
3327 {
3328 /* Tail recursive case: save a function call level. */
3329 if (i == 0)
3330 {
3331 loc = &XEXP (x, 0);
3332 goto retry;
3333 }
3334 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3335 bb, insn_info, flags);
3336 }
3337 else if (fmt[i] == 'E')
3338 {
3339 int j;
3340 for (j = 0; j < XVECLEN (x, i); j++)
3341 df_uses_record (collection_rec,
3342 &XVECEXP (x, i, j), ref_type,
3343 bb, insn_info, flags);
3344 }
3345 }
3346 }
3347
3348 return;
3349 }
3350
3351
3352 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3353
3354 static void
3355 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3356 {
3357 unsigned int ix;
3358 df_ref ref;
3359
3360 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3361 {
3362 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3363 {
3364 df_ref use;
3365
3366 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3367 DF_REF_LOC (ref), DF_REF_BB (ref),
3368 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3369 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3370 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3371 }
3372 }
3373 }
3374
3375
3376 /* Get call's extra defs and uses (track caller-saved registers). */
3377
3378 static void
3379 df_get_call_refs (struct df_collection_rec *collection_rec,
3380 basic_block bb,
3381 struct df_insn_info *insn_info,
3382 int flags)
3383 {
3384 rtx note;
3385 bool is_sibling_call;
3386 unsigned int i;
3387 HARD_REG_SET defs_generated;
3388
3389 CLEAR_HARD_REG_SET (defs_generated);
3390 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3391 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3392
3393 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3394 {
3395 if (i == STACK_POINTER_REGNUM)
3396 /* The stack ptr is used (honorarily) by a CALL insn. */
3397 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3398 NULL, bb, insn_info, DF_REF_REG_USE,
3399 DF_REF_CALL_STACK_USAGE | flags);
3400 else if (global_regs[i])
3401 {
3402 /* Calls to const functions cannot access any global registers and
3403 calls to pure functions cannot set them. All other calls may
3404 reference any of the global registers, so they are recorded as
3405 used. */
3406 if (!RTL_CONST_CALL_P (insn_info->insn))
3407 {
3408 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3409 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3410 if (!RTL_PURE_CALL_P (insn_info->insn))
3411 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3412 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3413 }
3414 }
3415 else if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
3416 /* no clobbers for regs that are the result of the call */
3417 && !TEST_HARD_REG_BIT (defs_generated, i)
3418 && (!is_sibling_call
3419 || !bitmap_bit_p (df->exit_block_uses, i)
3420 || refers_to_regno_p (i, i+1,
3421 crtl->return_rtx, NULL)))
3422 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3423 NULL, bb, insn_info, DF_REF_REG_DEF,
3424 DF_REF_MAY_CLOBBER | flags);
3425 }
3426
3427 /* Record the registers used to pass arguments, and explicitly
3428 noted as clobbered. */
3429 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3430 note = XEXP (note, 1))
3431 {
3432 if (GET_CODE (XEXP (note, 0)) == USE)
3433 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3434 DF_REF_REG_USE, bb, insn_info, flags);
3435 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3436 {
3437 if (REG_P (XEXP (XEXP (note, 0), 0)))
3438 {
3439 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3440 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3441 df_defs_record (collection_rec, XEXP (note, 0), bb,
3442 insn_info, flags);
3443 }
3444 else
3445 df_uses_record (collection_rec, &XEXP (note, 0),
3446 DF_REF_REG_USE, bb, insn_info, flags);
3447 }
3448 }
3449
3450 return;
3451 }
3452
3453 /* Collect all refs in the INSN. This function is free of any
3454 side-effect - it will create and return a lists of df_ref's in the
3455 COLLECTION_REC without putting those refs into existing ref chains
3456 and reg chains. */
3457
3458 static void
3459 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3460 basic_block bb, struct df_insn_info *insn_info)
3461 {
3462 rtx note;
3463 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3464
3465 /* Clear out the collection record. */
3466 collection_rec->def_vec.truncate (0);
3467 collection_rec->use_vec.truncate (0);
3468 collection_rec->eq_use_vec.truncate (0);
3469 collection_rec->mw_vec.truncate (0);
3470
3471 /* Process REG_EQUIV/REG_EQUAL notes. */
3472 for (note = REG_NOTES (insn_info->insn); note;
3473 note = XEXP (note, 1))
3474 {
3475 switch (REG_NOTE_KIND (note))
3476 {
3477 case REG_EQUIV:
3478 case REG_EQUAL:
3479 df_uses_record (collection_rec,
3480 &XEXP (note, 0), DF_REF_REG_USE,
3481 bb, insn_info, DF_REF_IN_NOTE);
3482 break;
3483 case REG_NON_LOCAL_GOTO:
3484 /* The frame ptr is used by a non-local goto. */
3485 df_ref_record (DF_REF_BASE, collection_rec,
3486 regno_reg_rtx[FRAME_POINTER_REGNUM],
3487 NULL, bb, insn_info,
3488 DF_REF_REG_USE, 0);
3489 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3490 df_ref_record (DF_REF_BASE, collection_rec,
3491 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3492 NULL, bb, insn_info,
3493 DF_REF_REG_USE, 0);
3494 #endif
3495 break;
3496 default:
3497 break;
3498 }
3499 }
3500
3501 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3502 uses from CALL_INSN_FUNCTION_USAGE. */
3503 if (CALL_P (insn_info->insn))
3504 df_get_call_refs (collection_rec, bb, insn_info,
3505 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3506
3507 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3508 that a qsort on the defs is unnecessary in most cases. */
3509 df_defs_record (collection_rec,
3510 PATTERN (insn_info->insn), bb, insn_info, 0);
3511
3512 /* Record the register uses. */
3513 df_uses_record (collection_rec,
3514 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3515
3516 /* DF_REF_CONDITIONAL needs corresponding USES. */
3517 if (is_cond_exec)
3518 df_get_conditional_uses (collection_rec);
3519
3520 df_canonize_collection_rec (collection_rec);
3521 }
3522
3523 /* Recompute the luids for the insns in BB. */
3524
3525 void
3526 df_recompute_luids (basic_block bb)
3527 {
3528 rtx insn;
3529 int luid = 0;
3530
3531 df_grow_insn_info ();
3532
3533 /* Scan the block an insn at a time from beginning to end. */
3534 FOR_BB_INSNS (bb, insn)
3535 {
3536 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3537 /* Inserting labels does not always trigger the incremental
3538 rescanning. */
3539 if (!insn_info)
3540 {
3541 gcc_assert (!INSN_P (insn));
3542 insn_info = df_insn_create_insn_record (insn);
3543 }
3544
3545 DF_INSN_INFO_LUID (insn_info) = luid;
3546 if (INSN_P (insn))
3547 luid++;
3548 }
3549 }
3550
3551
3552 /* Collect all artificial refs at the block level for BB and add them
3553 to COLLECTION_REC. */
3554
3555 static void
3556 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3557 {
3558 collection_rec->def_vec.truncate (0);
3559 collection_rec->use_vec.truncate (0);
3560 collection_rec->eq_use_vec.truncate (0);
3561 collection_rec->mw_vec.truncate (0);
3562
3563 if (bb->index == ENTRY_BLOCK)
3564 {
3565 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3566 return;
3567 }
3568 else if (bb->index == EXIT_BLOCK)
3569 {
3570 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3571 return;
3572 }
3573
3574 #ifdef EH_RETURN_DATA_REGNO
3575 if (bb_has_eh_pred (bb))
3576 {
3577 unsigned int i;
3578 /* Mark the registers that will contain data for the handler. */
3579 for (i = 0; ; ++i)
3580 {
3581 unsigned regno = EH_RETURN_DATA_REGNO (i);
3582 if (regno == INVALID_REGNUM)
3583 break;
3584 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3585 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3586 }
3587 }
3588 #endif
3589
3590 /* Add the hard_frame_pointer if this block is the target of a
3591 non-local goto. */
3592 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3593 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3594 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3595
3596 /* Add the artificial uses. */
3597 if (bb->index >= NUM_FIXED_BLOCKS)
3598 {
3599 bitmap_iterator bi;
3600 unsigned int regno;
3601 bitmap au = bb_has_eh_pred (bb)
3602 ? &df->eh_block_artificial_uses
3603 : &df->regular_block_artificial_uses;
3604
3605 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3606 {
3607 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3608 bb, NULL, DF_REF_REG_USE, 0);
3609 }
3610 }
3611
3612 df_canonize_collection_rec (collection_rec);
3613 }
3614
3615
3616 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3617
3618 void
3619 df_bb_refs_record (int bb_index, bool scan_insns)
3620 {
3621 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3622 rtx insn;
3623 int luid = 0;
3624
3625 if (!df)
3626 return;
3627
3628 df_collection_rec collection_rec;
3629 df_grow_bb_info (df_scan);
3630 if (scan_insns)
3631 /* Scan the block an insn at a time from beginning to end. */
3632 FOR_BB_INSNS (bb, insn)
3633 {
3634 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3635 gcc_assert (!insn_info);
3636
3637 insn_info = df_insn_create_insn_record (insn);
3638 if (INSN_P (insn))
3639 {
3640 /* Record refs within INSN. */
3641 DF_INSN_INFO_LUID (insn_info) = luid++;
3642 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3643 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3644 }
3645 DF_INSN_INFO_LUID (insn_info) = luid;
3646 }
3647
3648 /* Other block level artificial refs */
3649 df_bb_refs_collect (&collection_rec, bb);
3650 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3651
3652 /* Now that the block has been processed, set the block as dirty so
3653 LR and LIVE will get it processed. */
3654 df_set_bb_dirty (bb);
3655 }
3656
3657
3658 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3659 block. */
3660
3661 static void
3662 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3663 {
3664 #ifdef EH_USES
3665 unsigned int i;
3666 #endif
3667
3668 bitmap_clear (regular_block_artificial_uses);
3669
3670 if (reload_completed)
3671 {
3672 if (frame_pointer_needed)
3673 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3674 }
3675 else
3676 /* Before reload, there are a few registers that must be forced
3677 live everywhere -- which might not already be the case for
3678 blocks within infinite loops. */
3679 {
3680 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3681
3682 /* Any reference to any pseudo before reload is a potential
3683 reference of the frame pointer. */
3684 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3685
3686 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3687 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3688 #endif
3689
3690 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3691 /* Pseudos with argument area equivalences may require
3692 reloading via the argument pointer. */
3693 if (fixed_regs[ARG_POINTER_REGNUM])
3694 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3695 #endif
3696
3697 /* Any constant, or pseudo with constant equivalences, may
3698 require reloading from memory using the pic register. */
3699 if (picreg != INVALID_REGNUM
3700 && fixed_regs[picreg])
3701 bitmap_set_bit (regular_block_artificial_uses, picreg);
3702 }
3703 /* The all-important stack pointer must always be live. */
3704 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3705
3706 #ifdef EH_USES
3707 /* EH_USES registers are used:
3708 1) at all insns that might throw (calls or with -fnon-call-exceptions
3709 trapping insns)
3710 2) in all EH edges
3711 3) to support backtraces and/or debugging, anywhere between their
3712 initialization and where they the saved registers are restored
3713 from them, including the cases where we don't reach the epilogue
3714 (noreturn call or infinite loop). */
3715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3716 if (EH_USES (i))
3717 bitmap_set_bit (regular_block_artificial_uses, i);
3718 #endif
3719 }
3720
3721
3722 /* Get the artificial use set for an eh block. */
3723
3724 static void
3725 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3726 {
3727 bitmap_clear (eh_block_artificial_uses);
3728
3729 /* The following code (down through the arg_pointer setting APPEARS
3730 to be necessary because there is nothing that actually
3731 describes what the exception handling code may actually need
3732 to keep alive. */
3733 if (reload_completed)
3734 {
3735 if (frame_pointer_needed)
3736 {
3737 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3738 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3739 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3740 #endif
3741 }
3742 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3743 if (fixed_regs[ARG_POINTER_REGNUM])
3744 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3745 #endif
3746 }
3747 }
3748
3749
3750 \f
3751 /*----------------------------------------------------------------------------
3752 Specialized hard register scanning functions.
3753 ----------------------------------------------------------------------------*/
3754
3755
3756 /* Mark a register in SET. Hard registers in large modes get all
3757 of their component registers set as well. */
3758
3759 static void
3760 df_mark_reg (rtx reg, void *vset)
3761 {
3762 bitmap set = (bitmap) vset;
3763 int regno = REGNO (reg);
3764
3765 gcc_assert (GET_MODE (reg) != BLKmode);
3766
3767 if (regno < FIRST_PSEUDO_REGISTER)
3768 {
3769 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3770 bitmap_set_range (set, regno, n);
3771 }
3772 else
3773 bitmap_set_bit (set, regno);
3774 }
3775
3776
3777 /* Set the bit for regs that are considered being defined at the entry. */
3778
3779 static void
3780 df_get_entry_block_def_set (bitmap entry_block_defs)
3781 {
3782 rtx r;
3783 int i;
3784
3785 bitmap_clear (entry_block_defs);
3786
3787 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3788 {
3789 if (global_regs[i])
3790 bitmap_set_bit (entry_block_defs, i);
3791 if (FUNCTION_ARG_REGNO_P (i))
3792 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3793 }
3794
3795 /* The always important stack pointer. */
3796 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3797
3798 /* Once the prologue has been generated, all of these registers
3799 should just show up in the first regular block. */
3800 if (HAVE_prologue && epilogue_completed)
3801 {
3802 /* Defs for the callee saved registers are inserted so that the
3803 pushes have some defining location. */
3804 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3805 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3806 bitmap_set_bit (entry_block_defs, i);
3807 }
3808
3809 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3810 if (r && REG_P (r))
3811 bitmap_set_bit (entry_block_defs, REGNO (r));
3812
3813 /* If the function has an incoming STATIC_CHAIN, it has to show up
3814 in the entry def set. */
3815 r = targetm.calls.static_chain (current_function_decl, true);
3816 if (r && REG_P (r))
3817 bitmap_set_bit (entry_block_defs, REGNO (r));
3818
3819 if ((!reload_completed) || frame_pointer_needed)
3820 {
3821 /* Any reference to any pseudo before reload is a potential
3822 reference of the frame pointer. */
3823 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3824 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3825 /* If they are different, also mark the hard frame pointer as live. */
3826 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3827 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3828 #endif
3829 }
3830
3831 /* These registers are live everywhere. */
3832 if (!reload_completed)
3833 {
3834 #ifdef PIC_OFFSET_TABLE_REGNUM
3835 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3836 #endif
3837
3838 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3839 /* Pseudos with argument area equivalences may require
3840 reloading via the argument pointer. */
3841 if (fixed_regs[ARG_POINTER_REGNUM])
3842 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3843 #endif
3844
3845 #ifdef PIC_OFFSET_TABLE_REGNUM
3846 /* Any constant, or pseudo with constant equivalences, may
3847 require reloading from memory using the pic register. */
3848 if (picreg != INVALID_REGNUM
3849 && fixed_regs[picreg])
3850 bitmap_set_bit (entry_block_defs, picreg);
3851 #endif
3852 }
3853
3854 #ifdef INCOMING_RETURN_ADDR_RTX
3855 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3856 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3857 #endif
3858
3859 targetm.extra_live_on_entry (entry_block_defs);
3860 }
3861
3862
3863 /* Return the (conservative) set of hard registers that are defined on
3864 entry to the function.
3865 It uses df->entry_block_defs to determine which register
3866 reference to include. */
3867
3868 static void
3869 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3870 bitmap entry_block_defs)
3871 {
3872 unsigned int i;
3873 bitmap_iterator bi;
3874
3875 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3876 {
3877 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3878 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3879 }
3880
3881 df_canonize_collection_rec (collection_rec);
3882 }
3883
3884
3885 /* Record the (conservative) set of hard registers that are defined on
3886 entry to the function. */
3887
3888 static void
3889 df_record_entry_block_defs (bitmap entry_block_defs)
3890 {
3891 struct df_collection_rec collection_rec;
3892 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3893
3894 /* Process bb_refs chain */
3895 df_refs_add_to_chains (&collection_rec,
3896 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3897 NULL,
3898 copy_defs);
3899 }
3900
3901
3902 /* Update the defs in the entry block. */
3903
3904 void
3905 df_update_entry_block_defs (void)
3906 {
3907 bitmap_head refs;
3908 bool changed = false;
3909
3910 bitmap_initialize (&refs, &df_bitmap_obstack);
3911 df_get_entry_block_def_set (&refs);
3912 if (df->entry_block_defs)
3913 {
3914 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3915 {
3916 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3917 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3918 df_ref_chain_delete (bb_info->artificial_defs);
3919 bb_info->artificial_defs = NULL;
3920 changed = true;
3921 }
3922 }
3923 else
3924 {
3925 struct df_scan_problem_data *problem_data
3926 = (struct df_scan_problem_data *) df_scan->problem_data;
3927 gcc_unreachable ();
3928 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3929 changed = true;
3930 }
3931
3932 if (changed)
3933 {
3934 df_record_entry_block_defs (&refs);
3935 bitmap_copy (df->entry_block_defs, &refs);
3936 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3937 }
3938 bitmap_clear (&refs);
3939 }
3940
3941
3942 /* Set the bit for regs that are considered being used at the exit. */
3943
3944 static void
3945 df_get_exit_block_use_set (bitmap exit_block_uses)
3946 {
3947 unsigned int i;
3948 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3949
3950 bitmap_clear (exit_block_uses);
3951
3952 /* Stack pointer is always live at the exit. */
3953 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3954
3955 /* Mark the frame pointer if needed at the end of the function.
3956 If we end up eliminating it, it will be removed from the live
3957 list of each basic block by reload. */
3958
3959 if ((!reload_completed) || frame_pointer_needed)
3960 {
3961 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3962 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3963 /* If they are different, also mark the hard frame pointer as live. */
3964 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3965 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3966 #endif
3967 }
3968
3969 /* Many architectures have a GP register even without flag_pic.
3970 Assume the pic register is not in use, or will be handled by
3971 other means, if it is not fixed. */
3972 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3973 && picreg != INVALID_REGNUM
3974 && fixed_regs[picreg])
3975 bitmap_set_bit (exit_block_uses, picreg);
3976
3977 /* Mark all global registers, and all registers used by the
3978 epilogue as being live at the end of the function since they
3979 may be referenced by our caller. */
3980 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3981 if (global_regs[i] || EPILOGUE_USES (i))
3982 bitmap_set_bit (exit_block_uses, i);
3983
3984 if (HAVE_epilogue && epilogue_completed)
3985 {
3986 /* Mark all call-saved registers that we actually used. */
3987 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3988 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3989 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3990 bitmap_set_bit (exit_block_uses, i);
3991 }
3992
3993 #ifdef EH_RETURN_DATA_REGNO
3994 /* Mark the registers that will contain data for the handler. */
3995 if (reload_completed && crtl->calls_eh_return)
3996 for (i = 0; ; ++i)
3997 {
3998 unsigned regno = EH_RETURN_DATA_REGNO (i);
3999 if (regno == INVALID_REGNUM)
4000 break;
4001 bitmap_set_bit (exit_block_uses, regno);
4002 }
4003 #endif
4004
4005 #ifdef EH_RETURN_STACKADJ_RTX
4006 if ((!HAVE_epilogue || ! epilogue_completed)
4007 && crtl->calls_eh_return)
4008 {
4009 rtx tmp = EH_RETURN_STACKADJ_RTX;
4010 if (tmp && REG_P (tmp))
4011 df_mark_reg (tmp, exit_block_uses);
4012 }
4013 #endif
4014
4015 #ifdef EH_RETURN_HANDLER_RTX
4016 if ((!HAVE_epilogue || ! epilogue_completed)
4017 && crtl->calls_eh_return)
4018 {
4019 rtx tmp = EH_RETURN_HANDLER_RTX;
4020 if (tmp && REG_P (tmp))
4021 df_mark_reg (tmp, exit_block_uses);
4022 }
4023 #endif
4024
4025 /* Mark function return value. */
4026 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4027 }
4028
4029
4030 /* Return the refs of hard registers that are used in the exit block.
4031 It uses df->exit_block_uses to determine register to include. */
4032
4033 static void
4034 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4035 {
4036 unsigned int i;
4037 bitmap_iterator bi;
4038
4039 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4040 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4041 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
4042
4043 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4044 /* It is deliberate that this is not put in the exit block uses but
4045 I do not know why. */
4046 if (reload_completed
4047 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4048 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
4049 && fixed_regs[ARG_POINTER_REGNUM])
4050 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4051 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
4052 #endif
4053
4054 df_canonize_collection_rec (collection_rec);
4055 }
4056
4057
4058 /* Record the set of hard registers that are used in the exit block.
4059 It uses df->exit_block_uses to determine which bit to include. */
4060
4061 static void
4062 df_record_exit_block_uses (bitmap exit_block_uses)
4063 {
4064 struct df_collection_rec collection_rec;
4065 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4066
4067 /* Process bb_refs chain */
4068 df_refs_add_to_chains (&collection_rec,
4069 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
4070 NULL,
4071 copy_uses);
4072 }
4073
4074
4075 /* Update the uses in the exit block. */
4076
4077 void
4078 df_update_exit_block_uses (void)
4079 {
4080 bitmap_head refs;
4081 bool changed = false;
4082
4083 bitmap_initialize (&refs, &df_bitmap_obstack);
4084 df_get_exit_block_use_set (&refs);
4085 if (df->exit_block_uses)
4086 {
4087 if (!bitmap_equal_p (df->exit_block_uses, &refs))
4088 {
4089 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4090 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4091 df_ref_chain_delete (bb_info->artificial_uses);
4092 bb_info->artificial_uses = NULL;
4093 changed = true;
4094 }
4095 }
4096 else
4097 {
4098 struct df_scan_problem_data *problem_data
4099 = (struct df_scan_problem_data *) df_scan->problem_data;
4100 gcc_unreachable ();
4101 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4102 changed = true;
4103 }
4104
4105 if (changed)
4106 {
4107 df_record_exit_block_uses (&refs);
4108 bitmap_copy (df->exit_block_uses,& refs);
4109 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
4110 }
4111 bitmap_clear (&refs);
4112 }
4113
4114 static bool initialized = false;
4115
4116
4117 /* Initialize some platform specific structures. */
4118
4119 void
4120 df_hard_reg_init (void)
4121 {
4122 #ifdef ELIMINABLE_REGS
4123 int i;
4124 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4125 #endif
4126 if (initialized)
4127 return;
4128
4129 /* Record which registers will be eliminated. We use this in
4130 mark_used_regs. */
4131 CLEAR_HARD_REG_SET (elim_reg_set);
4132
4133 #ifdef ELIMINABLE_REGS
4134 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4135 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4136 #else
4137 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4138 #endif
4139
4140 initialized = true;
4141 }
4142
4143
4144 /* Recompute the parts of scanning that are based on regs_ever_live
4145 because something changed in that array. */
4146
4147 void
4148 df_update_entry_exit_and_calls (void)
4149 {
4150 basic_block bb;
4151
4152 df_update_entry_block_defs ();
4153 df_update_exit_block_uses ();
4154
4155 /* The call insns need to be rescanned because there may be changes
4156 in the set of registers clobbered across the call. */
4157 FOR_EACH_BB_FN (bb, cfun)
4158 {
4159 rtx insn;
4160 FOR_BB_INSNS (bb, insn)
4161 {
4162 if (INSN_P (insn) && CALL_P (insn))
4163 df_insn_rescan (insn);
4164 }
4165 }
4166 }
4167
4168
4169 /* Return true if hard REG is actually used in the some instruction.
4170 There are a fair number of conditions that affect the setting of
4171 this array. See the comment in df.h for df->hard_regs_live_count
4172 for the conditions that this array is set. */
4173
4174 bool
4175 df_hard_reg_used_p (unsigned int reg)
4176 {
4177 return df->hard_regs_live_count[reg] != 0;
4178 }
4179
4180
4181 /* A count of the number of times REG is actually used in the some
4182 instruction. There are a fair number of conditions that affect the
4183 setting of this array. See the comment in df.h for
4184 df->hard_regs_live_count for the conditions that this array is
4185 set. */
4186
4187
4188 unsigned int
4189 df_hard_reg_used_count (unsigned int reg)
4190 {
4191 return df->hard_regs_live_count[reg];
4192 }
4193
4194
4195 /* Get the value of regs_ever_live[REGNO]. */
4196
4197 bool
4198 df_regs_ever_live_p (unsigned int regno)
4199 {
4200 return regs_ever_live[regno];
4201 }
4202
4203
4204 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4205 to change, schedule that change for the next update. */
4206
4207 void
4208 df_set_regs_ever_live (unsigned int regno, bool value)
4209 {
4210 if (regs_ever_live[regno] == value)
4211 return;
4212
4213 regs_ever_live[regno] = value;
4214 if (df)
4215 df->redo_entry_and_exit = true;
4216 }
4217
4218
4219 /* Compute "regs_ever_live" information from the underlying df
4220 information. Set the vector to all false if RESET. */
4221
4222 void
4223 df_compute_regs_ever_live (bool reset)
4224 {
4225 unsigned int i;
4226 bool changed = df->redo_entry_and_exit;
4227
4228 if (reset)
4229 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4230
4231 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4232 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4233 {
4234 regs_ever_live[i] = true;
4235 changed = true;
4236 }
4237 if (changed)
4238 df_update_entry_exit_and_calls ();
4239 df->redo_entry_and_exit = false;
4240 }
4241
4242 \f
4243 /*----------------------------------------------------------------------------
4244 Dataflow ref information verification functions.
4245
4246 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4247 df_reg_chain_verify_unmarked (refs)
4248 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
4249 df_mws_verify (mw*, mw*, bool)
4250 df_insn_refs_verify (collection_rec, bb, insn, bool)
4251 df_bb_refs_verify (bb, refs, bool)
4252 df_bb_verify (bb)
4253 df_exit_block_bitmap_verify (bool)
4254 df_entry_block_bitmap_verify (bool)
4255 df_scan_verify ()
4256 ----------------------------------------------------------------------------*/
4257
4258
4259 /* Mark all refs in the reg chain. Verify that all of the registers
4260 are in the correct chain. */
4261
4262 static unsigned int
4263 df_reg_chain_mark (df_ref refs, unsigned int regno,
4264 bool is_def, bool is_eq_use)
4265 {
4266 unsigned int count = 0;
4267 df_ref ref;
4268 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4269 {
4270 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4271
4272 /* If there are no def-use or use-def chains, make sure that all
4273 of the chains are clear. */
4274 if (!df_chain)
4275 gcc_assert (!DF_REF_CHAIN (ref));
4276
4277 /* Check to make sure the ref is in the correct chain. */
4278 gcc_assert (DF_REF_REGNO (ref) == regno);
4279 if (is_def)
4280 gcc_assert (DF_REF_REG_DEF_P (ref));
4281 else
4282 gcc_assert (!DF_REF_REG_DEF_P (ref));
4283
4284 if (is_eq_use)
4285 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4286 else
4287 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4288
4289 if (DF_REF_NEXT_REG (ref))
4290 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4291 count++;
4292 DF_REF_REG_MARK (ref);
4293 }
4294 return count;
4295 }
4296
4297
4298 /* Verify that all of the registers in the chain are unmarked. */
4299
4300 static void
4301 df_reg_chain_verify_unmarked (df_ref refs)
4302 {
4303 df_ref ref;
4304 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4305 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4306 }
4307
4308
4309 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4310
4311 static bool
4312 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref *old_rec,
4313 bool abort_if_fail)
4314 {
4315 unsigned int ix;
4316 df_ref new_ref;
4317
4318 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4319 {
4320 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4321 {
4322 if (abort_if_fail)
4323 gcc_assert (0);
4324 else
4325 return false;
4326 }
4327
4328 /* Abort if fail is called from the function level verifier. If
4329 that is the context, mark this reg as being seem. */
4330 if (abort_if_fail)
4331 {
4332 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4333 DF_REF_REG_UNMARK (*old_rec);
4334 }
4335
4336 old_rec++;
4337 }
4338
4339 if (abort_if_fail)
4340 gcc_assert (*old_rec == NULL);
4341 else
4342 return *old_rec == NULL;
4343 return false;
4344 }
4345
4346
4347 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4348
4349 static bool
4350 df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
4351 struct df_mw_hardreg **old_rec,
4352 bool abort_if_fail)
4353 {
4354 unsigned int ix;
4355 struct df_mw_hardreg *new_reg;
4356
4357 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4358 {
4359 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4360 {
4361 if (abort_if_fail)
4362 gcc_assert (0);
4363 else
4364 return false;
4365 }
4366 old_rec++;
4367 }
4368
4369 if (abort_if_fail)
4370 gcc_assert (*old_rec == NULL);
4371 else
4372 return *old_rec == NULL;
4373 return false;
4374 }
4375
4376
4377 /* Return true if the existing insn refs information is complete and
4378 correct. Otherwise (i.e. if there's any missing or extra refs),
4379 return the correct df_ref chain in REFS_RETURN.
4380
4381 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4382 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4383 verification mode instead of the whole function, so unmark
4384 everything.
4385
4386 If ABORT_IF_FAIL is set, this function never returns false. */
4387
4388 static bool
4389 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4390 basic_block bb,
4391 rtx insn,
4392 bool abort_if_fail)
4393 {
4394 bool ret1, ret2, ret3, ret4;
4395 unsigned int uid = INSN_UID (insn);
4396 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4397
4398 df_insn_refs_collect (collection_rec, bb, insn_info);
4399
4400 if (!DF_INSN_UID_DEFS (uid))
4401 {
4402 /* The insn_rec was created but it was never filled out. */
4403 if (abort_if_fail)
4404 gcc_assert (0);
4405 else
4406 return false;
4407 }
4408
4409 /* Unfortunately we cannot opt out early if one of these is not
4410 right because the marks will not get cleared. */
4411 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4412 abort_if_fail);
4413 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4414 abort_if_fail);
4415 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4416 abort_if_fail);
4417 ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4418 abort_if_fail);
4419 return (ret1 && ret2 && ret3 && ret4);
4420 }
4421
4422
4423 /* Return true if all refs in the basic block are correct and complete.
4424 Due to df_ref_chain_verify, it will cause all refs
4425 that are verified to have DF_REF_MARK bit set. */
4426
4427 static bool
4428 df_bb_verify (basic_block bb)
4429 {
4430 rtx insn;
4431 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4432 struct df_collection_rec collection_rec;
4433
4434 gcc_assert (bb_info);
4435
4436 /* Scan the block, one insn at a time, from beginning to end. */
4437 FOR_BB_INSNS_REVERSE (bb, insn)
4438 {
4439 if (!INSN_P (insn))
4440 continue;
4441 df_insn_refs_verify (&collection_rec, bb, insn, true);
4442 df_free_collection_rec (&collection_rec);
4443 }
4444
4445 /* Do the artificial defs and uses. */
4446 df_bb_refs_collect (&collection_rec, bb);
4447 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4448 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4449 df_free_collection_rec (&collection_rec);
4450
4451 return true;
4452 }
4453
4454
4455 /* Returns true if the entry block has correct and complete df_ref set.
4456 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4457
4458 static bool
4459 df_entry_block_bitmap_verify (bool abort_if_fail)
4460 {
4461 bitmap_head entry_block_defs;
4462 bool is_eq;
4463
4464 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4465 df_get_entry_block_def_set (&entry_block_defs);
4466
4467 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4468
4469 if (!is_eq && abort_if_fail)
4470 {
4471 fprintf (stderr, "entry_block_defs = ");
4472 df_print_regset (stderr, &entry_block_defs);
4473 fprintf (stderr, "df->entry_block_defs = ");
4474 df_print_regset (stderr, df->entry_block_defs);
4475 gcc_assert (0);
4476 }
4477
4478 bitmap_clear (&entry_block_defs);
4479
4480 return is_eq;
4481 }
4482
4483
4484 /* Returns true if the exit block has correct and complete df_ref set.
4485 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4486
4487 static bool
4488 df_exit_block_bitmap_verify (bool abort_if_fail)
4489 {
4490 bitmap_head exit_block_uses;
4491 bool is_eq;
4492
4493 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4494 df_get_exit_block_use_set (&exit_block_uses);
4495
4496 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4497
4498 if (!is_eq && abort_if_fail)
4499 {
4500 fprintf (stderr, "exit_block_uses = ");
4501 df_print_regset (stderr, &exit_block_uses);
4502 fprintf (stderr, "df->exit_block_uses = ");
4503 df_print_regset (stderr, df->exit_block_uses);
4504 gcc_assert (0);
4505 }
4506
4507 bitmap_clear (&exit_block_uses);
4508
4509 return is_eq;
4510 }
4511
4512
4513 /* Return true if df_ref information for all insns in all blocks are
4514 correct and complete. */
4515
4516 void
4517 df_scan_verify (void)
4518 {
4519 unsigned int i;
4520 basic_block bb;
4521 bitmap_head regular_block_artificial_uses;
4522 bitmap_head eh_block_artificial_uses;
4523
4524 if (!df)
4525 return;
4526
4527 /* Verification is a 4 step process. */
4528
4529 /* (1) All of the refs are marked by going through the reg chains. */
4530 for (i = 0; i < DF_REG_SIZE (df); i++)
4531 {
4532 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4533 == DF_REG_DEF_COUNT (i));
4534 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4535 == DF_REG_USE_COUNT (i));
4536 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4537 == DF_REG_EQ_USE_COUNT (i));
4538 }
4539
4540 /* (2) There are various bitmaps whose value may change over the
4541 course of the compilation. This step recomputes them to make
4542 sure that they have not slipped out of date. */
4543 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4544 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4545
4546 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4547 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4548
4549 bitmap_ior_into (&eh_block_artificial_uses,
4550 &regular_block_artificial_uses);
4551
4552 /* Check artificial_uses bitmaps didn't change. */
4553 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4554 &df->regular_block_artificial_uses));
4555 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4556 &df->eh_block_artificial_uses));
4557
4558 bitmap_clear (&regular_block_artificial_uses);
4559 bitmap_clear (&eh_block_artificial_uses);
4560
4561 /* Verify entry block and exit block. These only verify the bitmaps,
4562 the refs are verified in df_bb_verify. */
4563 df_entry_block_bitmap_verify (true);
4564 df_exit_block_bitmap_verify (true);
4565
4566 /* (3) All of the insns in all of the blocks are traversed and the
4567 marks are cleared both in the artificial refs attached to the
4568 blocks and the real refs inside the insns. It is a failure to
4569 clear a mark that has not been set as this means that the ref in
4570 the block or insn was not in the reg chain. */
4571
4572 FOR_ALL_BB_FN (bb, cfun)
4573 df_bb_verify (bb);
4574
4575 /* (4) See if all reg chains are traversed a second time. This time
4576 a check is made that the marks are clear. A set mark would be a
4577 from a reg that is not in any insn or basic block. */
4578
4579 for (i = 0; i < DF_REG_SIZE (df); i++)
4580 {
4581 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4582 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4583 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4584 }
4585 }