re PR debug/48866 (gcc hangs when -g is set)
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "target.h"
43 #include "timevar.h"
44 #include "df.h"
45 #include "except.h"
46 #include "dce.h"
47 #include "vecprim.h"
48
49 /* Note that turning REG_DEAD_DEBUGGING on will cause
50 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
51 addresses in the dumps. */
52 #if 0
53 #define REG_DEAD_DEBUGGING
54 #endif
55
56 #define DF_SPARSE_THRESHOLD 32
57
58 static bitmap_head seen_in_block;
59 static bitmap_head seen_in_insn;
60
61 \f
62 /*----------------------------------------------------------------------------
63 Public functions access functions for the dataflow problems.
64 ----------------------------------------------------------------------------*/
65 /* Get the live at out set for BB no matter what problem happens to be
66 defined. This function is used by the register allocators who
67 choose different dataflow problems depending on the optimization
68 level. */
69
70 bitmap
71 df_get_live_out (basic_block bb)
72 {
73 gcc_assert (df_lr);
74
75 if (df_live)
76 return DF_LIVE_OUT (bb);
77 else
78 return DF_LR_OUT (bb);
79 }
80
81 /* Get the live at in set for BB no matter what problem happens to be
82 defined. This function is used by the register allocators who
83 choose different dataflow problems depending on the optimization
84 level. */
85
86 bitmap
87 df_get_live_in (basic_block bb)
88 {
89 gcc_assert (df_lr);
90
91 if (df_live)
92 return DF_LIVE_IN (bb);
93 else
94 return DF_LR_IN (bb);
95 }
96
97 /*----------------------------------------------------------------------------
98 Utility functions.
99 ----------------------------------------------------------------------------*/
100
101 /* Generic versions to get the void* version of the block info. Only
102 used inside the problem instance vectors. */
103
104 /* Dump a def-use or use-def chain for REF to FILE. */
105
106 void
107 df_chain_dump (struct df_link *link, FILE *file)
108 {
109 fprintf (file, "{ ");
110 for (; link; link = link->next)
111 {
112 fprintf (file, "%c%d(bb %d insn %d) ",
113 DF_REF_REG_DEF_P (link->ref)
114 ? 'd'
115 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
116 DF_REF_ID (link->ref),
117 DF_REF_BBNO (link->ref),
118 DF_REF_IS_ARTIFICIAL (link->ref)
119 ? -1 : DF_REF_INSN_UID (link->ref));
120 }
121 fprintf (file, "}");
122 }
123
124
125 /* Print some basic block info as part of df_dump. */
126
127 void
128 df_print_bb_index (basic_block bb, FILE *file)
129 {
130 edge e;
131 edge_iterator ei;
132
133 fprintf (file, "\n( ");
134 FOR_EACH_EDGE (e, ei, bb->preds)
135 {
136 basic_block pred = e->src;
137 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
138 }
139 fprintf (file, ")->[%d]->( ", bb->index);
140 FOR_EACH_EDGE (e, ei, bb->succs)
141 {
142 basic_block succ = e->dest;
143 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
144 }
145 fprintf (file, ")\n");
146 }
147
148 \f
149 /*----------------------------------------------------------------------------
150 REACHING DEFINITIONS
151
152 Find the locations in the function where each definition site for a
153 pseudo reaches. In and out bitvectors are built for each basic
154 block. The id field in the ref is used to index into these sets.
155 See df.h for details.
156 ----------------------------------------------------------------------------*/
157
158 /* This problem plays a large number of games for the sake of
159 efficiency.
160
161 1) The order of the bits in the bitvectors. After the scanning
162 phase, all of the defs are sorted. All of the defs for the reg 0
163 are first, followed by all defs for reg 1 and so on.
164
165 2) There are two kill sets, one if the number of defs is less or
166 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
167 greater.
168
169 <= : Data is built directly in the kill set.
170
171 > : One level of indirection is used to keep from generating long
172 strings of 1 bits in the kill sets. Bitvectors that are indexed
173 by the regnum are used to represent that there is a killing def
174 for the register. The confluence and transfer functions use
175 these along with the bitmap_clear_range call to remove ranges of
176 bits without actually generating a knockout vector.
177
178 The kill and sparse_kill and the dense_invalidated_by_call and
179 sparse_invalidated_by_call both play this game. */
180
181 /* Private data used to compute the solution for this problem. These
182 data structures are not accessible outside of this module. */
183 struct df_rd_problem_data
184 {
185 /* The set of defs to regs invalidated by call. */
186 bitmap_head sparse_invalidated_by_call;
187 /* The set of defs to regs invalidate by call for rd. */
188 bitmap_head dense_invalidated_by_call;
189 /* An obstack for the bitmaps we need for this problem. */
190 bitmap_obstack rd_bitmaps;
191 };
192
193
194 /* Free basic block info. */
195
196 static void
197 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
198 void *vbb_info)
199 {
200 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
201 if (bb_info)
202 {
203 bitmap_clear (&bb_info->kill);
204 bitmap_clear (&bb_info->sparse_kill);
205 bitmap_clear (&bb_info->gen);
206 bitmap_clear (&bb_info->in);
207 bitmap_clear (&bb_info->out);
208 }
209 }
210
211
212 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
213 not touched unless the block is new. */
214
215 static void
216 df_rd_alloc (bitmap all_blocks)
217 {
218 unsigned int bb_index;
219 bitmap_iterator bi;
220 struct df_rd_problem_data *problem_data;
221
222 if (df_rd->problem_data)
223 {
224 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
225 bitmap_clear (&problem_data->sparse_invalidated_by_call);
226 bitmap_clear (&problem_data->dense_invalidated_by_call);
227 }
228 else
229 {
230 problem_data = XNEW (struct df_rd_problem_data);
231 df_rd->problem_data = problem_data;
232
233 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
234 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
235 &problem_data->rd_bitmaps);
236 bitmap_initialize (&problem_data->dense_invalidated_by_call,
237 &problem_data->rd_bitmaps);
238 }
239
240 df_grow_bb_info (df_rd);
241
242 /* Because of the clustering of all use sites for the same pseudo,
243 we have to process all of the blocks before doing the
244 analysis. */
245
246 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
247 {
248 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
249
250 /* When bitmaps are already initialized, just clear them. */
251 if (bb_info->kill.obstack)
252 {
253 bitmap_clear (&bb_info->kill);
254 bitmap_clear (&bb_info->sparse_kill);
255 bitmap_clear (&bb_info->gen);
256 }
257 else
258 {
259 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
260 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
261 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
262 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
263 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
264 }
265 }
266 df_rd->optional_p = true;
267 }
268
269
270 /* Add the effect of the top artificial defs of BB to the reaching definitions
271 bitmap LOCAL_RD. */
272
273 void
274 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
275 {
276 int bb_index = bb->index;
277 df_ref *def_rec;
278 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
279 {
280 df_ref def = *def_rec;
281 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
282 {
283 unsigned int dregno = DF_REF_REGNO (def);
284 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
285 bitmap_clear_range (local_rd,
286 DF_DEFS_BEGIN (dregno),
287 DF_DEFS_COUNT (dregno));
288 bitmap_set_bit (local_rd, DF_REF_ID (def));
289 }
290 }
291 }
292
293 /* Add the effect of the defs of INSN to the reaching definitions bitmap
294 LOCAL_RD. */
295
296 void
297 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
298 bitmap local_rd)
299 {
300 unsigned uid = INSN_UID (insn);
301 df_ref *def_rec;
302
303 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
304 {
305 df_ref def = *def_rec;
306 unsigned int dregno = DF_REF_REGNO (def);
307 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
308 || (dregno >= FIRST_PSEUDO_REGISTER))
309 {
310 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
311 bitmap_clear_range (local_rd,
312 DF_DEFS_BEGIN (dregno),
313 DF_DEFS_COUNT (dregno));
314 if (!(DF_REF_FLAGS (def)
315 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
316 bitmap_set_bit (local_rd, DF_REF_ID (def));
317 }
318 }
319 }
320
321 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
322 more complicated than just simulating, because we must produce the
323 gen and kill sets and hence deal with the two possible representations
324 of kill sets. */
325
326 static void
327 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
328 df_ref *def_rec,
329 int top_flag)
330 {
331 while (*def_rec)
332 {
333 df_ref def = *def_rec;
334 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
335 {
336 unsigned int regno = DF_REF_REGNO (def);
337 unsigned int begin = DF_DEFS_BEGIN (regno);
338 unsigned int n_defs = DF_DEFS_COUNT (regno);
339
340 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
341 || (regno >= FIRST_PSEUDO_REGISTER))
342 {
343 /* Only the last def(s) for a regno in the block has any
344 effect. */
345 if (!bitmap_bit_p (&seen_in_block, regno))
346 {
347 /* The first def for regno in insn gets to knock out the
348 defs from other instructions. */
349 if ((!bitmap_bit_p (&seen_in_insn, regno))
350 /* If the def is to only part of the reg, it does
351 not kill the other defs that reach here. */
352 && (!(DF_REF_FLAGS (def) &
353 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
354 {
355 if (n_defs > DF_SPARSE_THRESHOLD)
356 {
357 bitmap_set_bit (&bb_info->sparse_kill, regno);
358 bitmap_clear_range(&bb_info->gen, begin, n_defs);
359 }
360 else
361 {
362 bitmap_set_range (&bb_info->kill, begin, n_defs);
363 bitmap_clear_range (&bb_info->gen, begin, n_defs);
364 }
365 }
366
367 bitmap_set_bit (&seen_in_insn, regno);
368 /* All defs for regno in the instruction may be put into
369 the gen set. */
370 if (!(DF_REF_FLAGS (def)
371 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
372 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
373 }
374 }
375 }
376 def_rec++;
377 }
378 }
379
380 /* Compute local reaching def info for basic block BB. */
381
382 static void
383 df_rd_bb_local_compute (unsigned int bb_index)
384 {
385 basic_block bb = BASIC_BLOCK (bb_index);
386 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
387 rtx insn;
388
389 bitmap_clear (&seen_in_block);
390 bitmap_clear (&seen_in_insn);
391
392 /* Artificials are only hard regs. */
393 if (!(df->changeable_flags & DF_NO_HARD_REGS))
394 df_rd_bb_local_compute_process_def (bb_info,
395 df_get_artificial_defs (bb_index),
396 0);
397
398 FOR_BB_INSNS_REVERSE (bb, insn)
399 {
400 unsigned int uid = INSN_UID (insn);
401
402 if (!INSN_P (insn))
403 continue;
404
405 df_rd_bb_local_compute_process_def (bb_info,
406 DF_INSN_UID_DEFS (uid), 0);
407
408 /* This complex dance with the two bitmaps is required because
409 instructions can assign twice to the same pseudo. This
410 generally happens with calls that will have one def for the
411 result and another def for the clobber. If only one vector
412 is used and the clobber goes first, the result will be
413 lost. */
414 bitmap_ior_into (&seen_in_block, &seen_in_insn);
415 bitmap_clear (&seen_in_insn);
416 }
417
418 /* Process the artificial defs at the top of the block last since we
419 are going backwards through the block and these are logically at
420 the start. */
421 if (!(df->changeable_flags & DF_NO_HARD_REGS))
422 df_rd_bb_local_compute_process_def (bb_info,
423 df_get_artificial_defs (bb_index),
424 DF_REF_AT_TOP);
425 }
426
427
428 /* Compute local reaching def info for each basic block within BLOCKS. */
429
430 static void
431 df_rd_local_compute (bitmap all_blocks)
432 {
433 unsigned int bb_index;
434 bitmap_iterator bi;
435 unsigned int regno;
436 struct df_rd_problem_data *problem_data
437 = (struct df_rd_problem_data *) df_rd->problem_data;
438 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
439 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
440
441 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
442 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
443
444 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
445
446 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
447 {
448 df_rd_bb_local_compute (bb_index);
449 }
450
451 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
452 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
453 {
454 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
455 bitmap_set_bit (sparse_invalidated, regno);
456 else
457 bitmap_set_range (dense_invalidated,
458 DF_DEFS_BEGIN (regno),
459 DF_DEFS_COUNT (regno));
460 }
461
462 bitmap_clear (&seen_in_block);
463 bitmap_clear (&seen_in_insn);
464 }
465
466
467 /* Initialize the solution bit vectors for problem. */
468
469 static void
470 df_rd_init_solution (bitmap all_blocks)
471 {
472 unsigned int bb_index;
473 bitmap_iterator bi;
474
475 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
476 {
477 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
478
479 bitmap_copy (&bb_info->out, &bb_info->gen);
480 bitmap_clear (&bb_info->in);
481 }
482 }
483
484 /* In of target gets or of out of source. */
485
486 static bool
487 df_rd_confluence_n (edge e)
488 {
489 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
490 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
491 bool changed = false;
492
493 if (e->flags & EDGE_FAKE)
494 return false;
495
496 if (e->flags & EDGE_EH)
497 {
498 struct df_rd_problem_data *problem_data
499 = (struct df_rd_problem_data *) df_rd->problem_data;
500 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
501 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
502 bitmap_iterator bi;
503 unsigned int regno;
504 bitmap_head tmp;
505
506 bitmap_initialize (&tmp, &df_bitmap_obstack);
507 bitmap_copy (&tmp, op2);
508 bitmap_and_compl_into (&tmp, dense_invalidated);
509
510 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
511 {
512 bitmap_clear_range (&tmp,
513 DF_DEFS_BEGIN (regno),
514 DF_DEFS_COUNT (regno));
515 }
516 changed |= bitmap_ior_into (op1, &tmp);
517 bitmap_clear (&tmp);
518 return changed;
519 }
520 else
521 return bitmap_ior_into (op1, op2);
522 }
523
524
525 /* Transfer function. */
526
527 static bool
528 df_rd_transfer_function (int bb_index)
529 {
530 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
531 unsigned int regno;
532 bitmap_iterator bi;
533 bitmap in = &bb_info->in;
534 bitmap out = &bb_info->out;
535 bitmap gen = &bb_info->gen;
536 bitmap kill = &bb_info->kill;
537 bitmap sparse_kill = &bb_info->sparse_kill;
538
539 if (bitmap_empty_p (sparse_kill))
540 return bitmap_ior_and_compl (out, gen, in, kill);
541 else
542 {
543 struct df_rd_problem_data *problem_data;
544 bool changed = false;
545 bitmap_head tmp;
546
547 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
548 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
549 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
550 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
551
552 bitmap_copy (&tmp, in);
553 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
554 {
555 bitmap_clear_range (&tmp,
556 DF_DEFS_BEGIN (regno),
557 DF_DEFS_COUNT (regno));
558 }
559 bitmap_and_compl_into (&tmp, kill);
560 bitmap_ior_into (&tmp, gen);
561 changed = !bitmap_equal_p (&tmp, out);
562 if (changed)
563 {
564 bitmap_clear (out);
565 bb_info->out = tmp;
566 }
567 else
568 bitmap_clear (&tmp);
569 return changed;
570 }
571 }
572
573
574 /* Free all storage associated with the problem. */
575
576 static void
577 df_rd_free (void)
578 {
579 struct df_rd_problem_data *problem_data
580 = (struct df_rd_problem_data *) df_rd->problem_data;
581
582 if (problem_data)
583 {
584 bitmap_obstack_release (&problem_data->rd_bitmaps);
585
586 df_rd->block_info_size = 0;
587 free (df_rd->block_info);
588 df_rd->block_info = NULL;
589 free (df_rd->problem_data);
590 }
591 free (df_rd);
592 }
593
594
595 /* Debugging info. */
596
597 static void
598 df_rd_start_dump (FILE *file)
599 {
600 struct df_rd_problem_data *problem_data
601 = (struct df_rd_problem_data *) df_rd->problem_data;
602 unsigned int m = DF_REG_SIZE(df);
603 unsigned int regno;
604
605 if (!df_rd->block_info)
606 return;
607
608 fprintf (file, ";; Reaching defs:\n\n");
609
610 fprintf (file, " sparse invalidated \t");
611 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
612 fprintf (file, " dense invalidated \t");
613 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
614
615 for (regno = 0; regno < m; regno++)
616 if (DF_DEFS_COUNT (regno))
617 fprintf (file, "%d[%d,%d] ", regno,
618 DF_DEFS_BEGIN (regno),
619 DF_DEFS_COUNT (regno));
620 fprintf (file, "\n");
621
622 }
623
624
625 /* Debugging info at top of bb. */
626
627 static void
628 df_rd_top_dump (basic_block bb, FILE *file)
629 {
630 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
631 if (!bb_info)
632 return;
633
634 fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (&bb_info->in));
635 dump_bitmap (file, &bb_info->in);
636 fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (&bb_info->gen));
637 dump_bitmap (file, &bb_info->gen);
638 fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (&bb_info->kill));
639 dump_bitmap (file, &bb_info->kill);
640 }
641
642
643 /* Debugging info at top of bb. */
644
645 static void
646 df_rd_bottom_dump (basic_block bb, FILE *file)
647 {
648 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
649 if (!bb_info)
650 return;
651
652 fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (&bb_info->out));
653 dump_bitmap (file, &bb_info->out);
654 }
655
656 /* All of the information associated with every instance of the problem. */
657
658 static struct df_problem problem_RD =
659 {
660 DF_RD, /* Problem id. */
661 DF_FORWARD, /* Direction. */
662 df_rd_alloc, /* Allocate the problem specific data. */
663 NULL, /* Reset global information. */
664 df_rd_free_bb_info, /* Free basic block info. */
665 df_rd_local_compute, /* Local compute function. */
666 df_rd_init_solution, /* Init the solution specific data. */
667 df_worklist_dataflow, /* Worklist solver. */
668 NULL, /* Confluence operator 0. */
669 df_rd_confluence_n, /* Confluence operator n. */
670 df_rd_transfer_function, /* Transfer function. */
671 NULL, /* Finalize function. */
672 df_rd_free, /* Free all of the problem information. */
673 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
674 df_rd_start_dump, /* Debugging. */
675 df_rd_top_dump, /* Debugging start block. */
676 df_rd_bottom_dump, /* Debugging end block. */
677 NULL, /* Incremental solution verify start. */
678 NULL, /* Incremental solution verify end. */
679 NULL, /* Dependent problem. */
680 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
681 TV_DF_RD, /* Timing variable. */
682 true /* Reset blocks on dropping out of blocks_to_analyze. */
683 };
684
685
686
687 /* Create a new RD instance and add it to the existing instance
688 of DF. */
689
690 void
691 df_rd_add_problem (void)
692 {
693 df_add_problem (&problem_RD);
694 }
695
696
697 \f
698 /*----------------------------------------------------------------------------
699 LIVE REGISTERS
700
701 Find the locations in the function where any use of a pseudo can
702 reach in the backwards direction. In and out bitvectors are built
703 for each basic block. The regno is used to index into these sets.
704 See df.h for details.
705 ----------------------------------------------------------------------------*/
706
707 /* Private data used to verify the solution for this problem. */
708 struct df_lr_problem_data
709 {
710 bitmap_head *in;
711 bitmap_head *out;
712 /* An obstack for the bitmaps we need for this problem. */
713 bitmap_obstack lr_bitmaps;
714 };
715
716 /* Free basic block info. */
717
718 static void
719 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
720 void *vbb_info)
721 {
722 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
723 if (bb_info)
724 {
725 bitmap_clear (&bb_info->use);
726 bitmap_clear (&bb_info->def);
727 bitmap_clear (&bb_info->in);
728 bitmap_clear (&bb_info->out);
729 }
730 }
731
732
733 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
734 not touched unless the block is new. */
735
736 static void
737 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
738 {
739 unsigned int bb_index;
740 bitmap_iterator bi;
741 struct df_lr_problem_data *problem_data;
742
743 df_grow_bb_info (df_lr);
744 if (df_lr->problem_data)
745 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
746 else
747 {
748 problem_data = XNEW (struct df_lr_problem_data);
749 df_lr->problem_data = problem_data;
750
751 problem_data->out = NULL;
752 problem_data->in = NULL;
753 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
754 }
755
756 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
757 {
758 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
759
760 /* When bitmaps are already initialized, just clear them. */
761 if (bb_info->use.obstack)
762 {
763 bitmap_clear (&bb_info->def);
764 bitmap_clear (&bb_info->use);
765 }
766 else
767 {
768 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
769 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
770 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
771 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
772 }
773 }
774
775 df_lr->optional_p = false;
776 }
777
778
779 /* Reset the global solution for recalculation. */
780
781 static void
782 df_lr_reset (bitmap all_blocks)
783 {
784 unsigned int bb_index;
785 bitmap_iterator bi;
786
787 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
788 {
789 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
790 gcc_assert (bb_info);
791 bitmap_clear (&bb_info->in);
792 bitmap_clear (&bb_info->out);
793 }
794 }
795
796
797 /* Compute local live register info for basic block BB. */
798
799 static void
800 df_lr_bb_local_compute (unsigned int bb_index)
801 {
802 basic_block bb = BASIC_BLOCK (bb_index);
803 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
804 rtx insn;
805 df_ref *def_rec;
806 df_ref *use_rec;
807
808 /* Process the registers set in an exception handler. */
809 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
810 {
811 df_ref def = *def_rec;
812 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
813 {
814 unsigned int dregno = DF_REF_REGNO (def);
815 bitmap_set_bit (&bb_info->def, dregno);
816 bitmap_clear_bit (&bb_info->use, dregno);
817 }
818 }
819
820 /* Process the hardware registers that are always live. */
821 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
822 {
823 df_ref use = *use_rec;
824 /* Add use to set of uses in this BB. */
825 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
826 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
827 }
828
829 FOR_BB_INSNS_REVERSE (bb, insn)
830 {
831 unsigned int uid = INSN_UID (insn);
832
833 if (!NONDEBUG_INSN_P (insn))
834 continue;
835
836 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
837 {
838 df_ref def = *def_rec;
839 /* If the def is to only part of the reg, it does
840 not kill the other defs that reach here. */
841 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
842 {
843 unsigned int dregno = DF_REF_REGNO (def);
844 bitmap_set_bit (&bb_info->def, dregno);
845 bitmap_clear_bit (&bb_info->use, dregno);
846 }
847 }
848
849 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
850 {
851 df_ref use = *use_rec;
852 /* Add use to set of uses in this BB. */
853 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
854 }
855 }
856
857 /* Process the registers set in an exception handler or the hard
858 frame pointer if this block is the target of a non local
859 goto. */
860 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
861 {
862 df_ref def = *def_rec;
863 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
864 {
865 unsigned int dregno = DF_REF_REGNO (def);
866 bitmap_set_bit (&bb_info->def, dregno);
867 bitmap_clear_bit (&bb_info->use, dregno);
868 }
869 }
870
871 #ifdef EH_USES
872 /* Process the uses that are live into an exception handler. */
873 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
874 {
875 df_ref use = *use_rec;
876 /* Add use to set of uses in this BB. */
877 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
878 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
879 }
880 #endif
881
882 /* If the df_live problem is not defined, such as at -O0 and -O1, we
883 still need to keep the luids up to date. This is normally done
884 in the df_live problem since this problem has a forwards
885 scan. */
886 if (!df_live)
887 df_recompute_luids (bb);
888 }
889
890
891 /* Compute local live register info for each basic block within BLOCKS. */
892
893 static void
894 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
895 {
896 unsigned int bb_index;
897 bitmap_iterator bi;
898
899 bitmap_clear (&df->hardware_regs_used);
900
901 /* The all-important stack pointer must always be live. */
902 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
903
904 /* Before reload, there are a few registers that must be forced
905 live everywhere -- which might not already be the case for
906 blocks within infinite loops. */
907 if (!reload_completed)
908 {
909 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
910 /* Any reference to any pseudo before reload is a potential
911 reference of the frame pointer. */
912 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
913
914 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
915 /* Pseudos with argument area equivalences may require
916 reloading via the argument pointer. */
917 if (fixed_regs[ARG_POINTER_REGNUM])
918 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
919 #endif
920
921 /* Any constant, or pseudo with constant equivalences, may
922 require reloading from memory using the pic register. */
923 if (pic_offset_table_regnum != INVALID_REGNUM
924 && fixed_regs[pic_offset_table_regnum])
925 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
926 }
927
928 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
929 {
930 if (bb_index == EXIT_BLOCK)
931 {
932 /* The exit block is special for this problem and its bits are
933 computed from thin air. */
934 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
935 bitmap_copy (&bb_info->use, df->exit_block_uses);
936 }
937 else
938 df_lr_bb_local_compute (bb_index);
939 }
940
941 bitmap_clear (df_lr->out_of_date_transfer_functions);
942 }
943
944
945 /* Initialize the solution vectors. */
946
947 static void
948 df_lr_init (bitmap all_blocks)
949 {
950 unsigned int bb_index;
951 bitmap_iterator bi;
952
953 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
954 {
955 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
956 bitmap_copy (&bb_info->in, &bb_info->use);
957 bitmap_clear (&bb_info->out);
958 }
959 }
960
961
962 /* Confluence function that processes infinite loops. This might be a
963 noreturn function that throws. And even if it isn't, getting the
964 unwind info right helps debugging. */
965 static void
966 df_lr_confluence_0 (basic_block bb)
967 {
968 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
969 if (bb != EXIT_BLOCK_PTR)
970 bitmap_copy (op1, &df->hardware_regs_used);
971 }
972
973
974 /* Confluence function that ignores fake edges. */
975
976 static bool
977 df_lr_confluence_n (edge e)
978 {
979 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
980 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
981 bool changed = false;
982
983 /* Call-clobbered registers die across exception and call edges. */
984 /* ??? Abnormal call edges ignored for the moment, as this gets
985 confused by sibling call edges, which crashes reg-stack. */
986 if (e->flags & EDGE_EH)
987 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
988 else
989 changed = bitmap_ior_into (op1, op2);
990
991 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
992 return changed;
993 }
994
995
996 /* Transfer function. */
997
998 static bool
999 df_lr_transfer_function (int bb_index)
1000 {
1001 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1002 bitmap in = &bb_info->in;
1003 bitmap out = &bb_info->out;
1004 bitmap use = &bb_info->use;
1005 bitmap def = &bb_info->def;
1006
1007 return bitmap_ior_and_compl (in, use, out, def);
1008 }
1009
1010
1011 /* Run the fast dce as a side effect of building LR. */
1012
1013 static void
1014 df_lr_finalize (bitmap all_blocks)
1015 {
1016 df_lr->solutions_dirty = false;
1017 if (df->changeable_flags & DF_LR_RUN_DCE)
1018 {
1019 run_fast_df_dce ();
1020
1021 /* If dce deletes some instructions, we need to recompute the lr
1022 solution before proceeding further. The problem is that fast
1023 dce is a pessimestic dataflow algorithm. In the case where
1024 it deletes a statement S inside of a loop, the uses inside of
1025 S may not be deleted from the dataflow solution because they
1026 were carried around the loop. While it is conservatively
1027 correct to leave these extra bits, the standards of df
1028 require that we maintain the best possible (least fixed
1029 point) solution. The only way to do that is to redo the
1030 iteration from the beginning. See PR35805 for an
1031 example. */
1032 if (df_lr->solutions_dirty)
1033 {
1034 df_clear_flags (DF_LR_RUN_DCE);
1035 df_lr_alloc (all_blocks);
1036 df_lr_local_compute (all_blocks);
1037 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1038 df_lr_finalize (all_blocks);
1039 df_set_flags (DF_LR_RUN_DCE);
1040 }
1041 }
1042 }
1043
1044
1045 /* Free all storage associated with the problem. */
1046
1047 static void
1048 df_lr_free (void)
1049 {
1050 struct df_lr_problem_data *problem_data
1051 = (struct df_lr_problem_data *) df_lr->problem_data;
1052 if (df_lr->block_info)
1053 {
1054
1055 df_lr->block_info_size = 0;
1056 free (df_lr->block_info);
1057 df_lr->block_info = NULL;
1058 bitmap_obstack_release (&problem_data->lr_bitmaps);
1059 free (df_lr->problem_data);
1060 df_lr->problem_data = NULL;
1061 }
1062
1063 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1064 free (df_lr);
1065 }
1066
1067
1068 /* Debugging info at top of bb. */
1069
1070 static void
1071 df_lr_top_dump (basic_block bb, FILE *file)
1072 {
1073 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1074 struct df_lr_problem_data *problem_data;
1075 if (!bb_info)
1076 return;
1077
1078 fprintf (file, ";; lr in \t");
1079 df_print_regset (file, &bb_info->in);
1080 if (df_lr->problem_data)
1081 {
1082 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1083 if (problem_data->in)
1084 {
1085 fprintf (file, ";; old in \t");
1086 df_print_regset (file, &problem_data->in[bb->index]);
1087 }
1088 }
1089 fprintf (file, ";; lr use \t");
1090 df_print_regset (file, &bb_info->use);
1091 fprintf (file, ";; lr def \t");
1092 df_print_regset (file, &bb_info->def);
1093 }
1094
1095
1096 /* Debugging info at bottom of bb. */
1097
1098 static void
1099 df_lr_bottom_dump (basic_block bb, FILE *file)
1100 {
1101 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1102 struct df_lr_problem_data *problem_data;
1103 if (!bb_info)
1104 return;
1105
1106 fprintf (file, ";; lr out \t");
1107 df_print_regset (file, &bb_info->out);
1108 if (df_lr->problem_data)
1109 {
1110 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1111 if (problem_data->out)
1112 {
1113 fprintf (file, ";; old out \t");
1114 df_print_regset (file, &problem_data->out[bb->index]);
1115 }
1116 }
1117 }
1118
1119
1120 /* Build the datastructure to verify that the solution to the dataflow
1121 equations is not dirty. */
1122
1123 static void
1124 df_lr_verify_solution_start (void)
1125 {
1126 basic_block bb;
1127 struct df_lr_problem_data *problem_data;
1128 if (df_lr->solutions_dirty)
1129 return;
1130
1131 /* Set it true so that the solution is recomputed. */
1132 df_lr->solutions_dirty = true;
1133
1134 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1135 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1136 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1137
1138 FOR_ALL_BB (bb)
1139 {
1140 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1141 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1142 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1143 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1144 }
1145 }
1146
1147
1148 /* Compare the saved datastructure and the new solution to the dataflow
1149 equations. */
1150
1151 static void
1152 df_lr_verify_solution_end (void)
1153 {
1154 struct df_lr_problem_data *problem_data;
1155 basic_block bb;
1156
1157 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1158
1159 if (!problem_data->out)
1160 return;
1161
1162 if (df_lr->solutions_dirty)
1163 /* Do not check if the solution is still dirty. See the comment
1164 in df_lr_finalize for details. */
1165 df_lr->solutions_dirty = false;
1166 else
1167 FOR_ALL_BB (bb)
1168 {
1169 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1170 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1171 {
1172 /*df_dump (stderr);*/
1173 gcc_unreachable ();
1174 }
1175 }
1176
1177 /* Cannot delete them immediately because you may want to dump them
1178 if the comparison fails. */
1179 FOR_ALL_BB (bb)
1180 {
1181 bitmap_clear (&problem_data->in[bb->index]);
1182 bitmap_clear (&problem_data->out[bb->index]);
1183 }
1184
1185 free (problem_data->in);
1186 free (problem_data->out);
1187 problem_data->in = NULL;
1188 problem_data->out = NULL;
1189 }
1190
1191
1192 /* All of the information associated with every instance of the problem. */
1193
1194 static struct df_problem problem_LR =
1195 {
1196 DF_LR, /* Problem id. */
1197 DF_BACKWARD, /* Direction. */
1198 df_lr_alloc, /* Allocate the problem specific data. */
1199 df_lr_reset, /* Reset global information. */
1200 df_lr_free_bb_info, /* Free basic block info. */
1201 df_lr_local_compute, /* Local compute function. */
1202 df_lr_init, /* Init the solution specific data. */
1203 df_worklist_dataflow, /* Worklist solver. */
1204 df_lr_confluence_0, /* Confluence operator 0. */
1205 df_lr_confluence_n, /* Confluence operator n. */
1206 df_lr_transfer_function, /* Transfer function. */
1207 df_lr_finalize, /* Finalize function. */
1208 df_lr_free, /* Free all of the problem information. */
1209 NULL, /* Remove this problem from the stack of dataflow problems. */
1210 NULL, /* Debugging. */
1211 df_lr_top_dump, /* Debugging start block. */
1212 df_lr_bottom_dump, /* Debugging end block. */
1213 df_lr_verify_solution_start,/* Incremental solution verify start. */
1214 df_lr_verify_solution_end, /* Incremental solution verify end. */
1215 NULL, /* Dependent problem. */
1216 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1217 TV_DF_LR, /* Timing variable. */
1218 false /* Reset blocks on dropping out of blocks_to_analyze. */
1219 };
1220
1221
1222 /* Create a new DATAFLOW instance and add it to an existing instance
1223 of DF. The returned structure is what is used to get at the
1224 solution. */
1225
1226 void
1227 df_lr_add_problem (void)
1228 {
1229 df_add_problem (&problem_LR);
1230 /* These will be initialized when df_scan_blocks processes each
1231 block. */
1232 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1233 }
1234
1235
1236 /* Verify that all of the lr related info is consistent and
1237 correct. */
1238
1239 void
1240 df_lr_verify_transfer_functions (void)
1241 {
1242 basic_block bb;
1243 bitmap_head saved_def;
1244 bitmap_head saved_use;
1245 bitmap_head all_blocks;
1246
1247 if (!df)
1248 return;
1249
1250 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1251 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1252 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1253
1254 FOR_ALL_BB (bb)
1255 {
1256 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1257 bitmap_set_bit (&all_blocks, bb->index);
1258
1259 if (bb_info)
1260 {
1261 /* Make a copy of the transfer functions and then compute
1262 new ones to see if the transfer functions have
1263 changed. */
1264 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1265 bb->index))
1266 {
1267 bitmap_copy (&saved_def, &bb_info->def);
1268 bitmap_copy (&saved_use, &bb_info->use);
1269 bitmap_clear (&bb_info->def);
1270 bitmap_clear (&bb_info->use);
1271
1272 df_lr_bb_local_compute (bb->index);
1273 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1274 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1275 }
1276 }
1277 else
1278 {
1279 /* If we do not have basic block info, the block must be in
1280 the list of dirty blocks or else some one has added a
1281 block behind our backs. */
1282 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1283 bb->index));
1284 }
1285 /* Make sure no one created a block without following
1286 procedures. */
1287 gcc_assert (df_scan_get_bb_info (bb->index));
1288 }
1289
1290 /* Make sure there are no dirty bits in blocks that have been deleted. */
1291 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1292 &all_blocks));
1293
1294 bitmap_clear (&saved_def);
1295 bitmap_clear (&saved_use);
1296 bitmap_clear (&all_blocks);
1297 }
1298
1299
1300 \f
1301 /*----------------------------------------------------------------------------
1302 LIVE AND MUST-INITIALIZED REGISTERS.
1303
1304 This problem first computes the IN and OUT bitvectors for the
1305 must-initialized registers problems, which is a forward problem.
1306 It gives the set of registers for which we MUST have an available
1307 definition on any path from the entry block to the entry/exit of
1308 a basic block. Sets generate a definition, while clobbers kill
1309 a definition.
1310
1311 In and out bitvectors are built for each basic block and are indexed by
1312 regnum (see df.h for details). In and out bitvectors in struct
1313 df_live_bb_info actually refers to the must-initialized problem;
1314
1315 Then, the in and out sets for the LIVE problem itself are computed.
1316 These are the logical AND of the IN and OUT sets from the LR problem
1317 and the must-initialized problem.
1318 ----------------------------------------------------------------------------*/
1319
1320 /* Private data used to verify the solution for this problem. */
1321 struct df_live_problem_data
1322 {
1323 bitmap_head *in;
1324 bitmap_head *out;
1325 /* An obstack for the bitmaps we need for this problem. */
1326 bitmap_obstack live_bitmaps;
1327 };
1328
1329 /* Scratch var used by transfer functions. This is used to implement
1330 an optimization to reduce the amount of space used to compute the
1331 combined lr and live analysis. */
1332 static bitmap_head df_live_scratch;
1333
1334
1335 /* Free basic block info. */
1336
1337 static void
1338 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1339 void *vbb_info)
1340 {
1341 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1342 if (bb_info)
1343 {
1344 bitmap_clear (&bb_info->gen);
1345 bitmap_clear (&bb_info->kill);
1346 bitmap_clear (&bb_info->in);
1347 bitmap_clear (&bb_info->out);
1348 }
1349 }
1350
1351
1352 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1353 not touched unless the block is new. */
1354
1355 static void
1356 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1357 {
1358 unsigned int bb_index;
1359 bitmap_iterator bi;
1360 struct df_live_problem_data *problem_data;
1361
1362 if (df_live->problem_data)
1363 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1364 else
1365 {
1366 problem_data = XNEW (struct df_live_problem_data);
1367 df_live->problem_data = problem_data;
1368
1369 problem_data->out = NULL;
1370 problem_data->in = NULL;
1371 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1372 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1373 }
1374
1375 df_grow_bb_info (df_live);
1376
1377 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1378 {
1379 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1380
1381 /* When bitmaps are already initialized, just clear them. */
1382 if (bb_info->kill.obstack)
1383 {
1384 bitmap_clear (&bb_info->kill);
1385 bitmap_clear (&bb_info->gen);
1386 }
1387 else
1388 {
1389 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1390 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1391 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1392 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1393 }
1394 }
1395 df_live->optional_p = (optimize <= 1);
1396 }
1397
1398
1399 /* Reset the global solution for recalculation. */
1400
1401 static void
1402 df_live_reset (bitmap all_blocks)
1403 {
1404 unsigned int bb_index;
1405 bitmap_iterator bi;
1406
1407 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1408 {
1409 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1410 gcc_assert (bb_info);
1411 bitmap_clear (&bb_info->in);
1412 bitmap_clear (&bb_info->out);
1413 }
1414 }
1415
1416
1417 /* Compute local uninitialized register info for basic block BB. */
1418
1419 static void
1420 df_live_bb_local_compute (unsigned int bb_index)
1421 {
1422 basic_block bb = BASIC_BLOCK (bb_index);
1423 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1424 rtx insn;
1425 df_ref *def_rec;
1426 int luid = 0;
1427
1428 FOR_BB_INSNS (bb, insn)
1429 {
1430 unsigned int uid = INSN_UID (insn);
1431 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1432
1433 /* Inserting labels does not always trigger the incremental
1434 rescanning. */
1435 if (!insn_info)
1436 {
1437 gcc_assert (!INSN_P (insn));
1438 insn_info = df_insn_create_insn_record (insn);
1439 }
1440
1441 DF_INSN_INFO_LUID (insn_info) = luid;
1442 if (!INSN_P (insn))
1443 continue;
1444
1445 luid++;
1446 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1447 {
1448 df_ref def = *def_rec;
1449 unsigned int regno = DF_REF_REGNO (def);
1450
1451 if (DF_REF_FLAGS_IS_SET (def,
1452 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1453 /* All partial or conditional def
1454 seen are included in the gen set. */
1455 bitmap_set_bit (&bb_info->gen, regno);
1456 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1457 /* Only must clobbers for the entire reg destroy the
1458 value. */
1459 bitmap_set_bit (&bb_info->kill, regno);
1460 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1461 bitmap_set_bit (&bb_info->gen, regno);
1462 }
1463 }
1464
1465 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1466 {
1467 df_ref def = *def_rec;
1468 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1469 }
1470 }
1471
1472
1473 /* Compute local uninitialized register info. */
1474
1475 static void
1476 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1477 {
1478 unsigned int bb_index;
1479 bitmap_iterator bi;
1480
1481 df_grow_insn_info ();
1482
1483 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1484 0, bb_index, bi)
1485 {
1486 df_live_bb_local_compute (bb_index);
1487 }
1488
1489 bitmap_clear (df_live->out_of_date_transfer_functions);
1490 }
1491
1492
1493 /* Initialize the solution vectors. */
1494
1495 static void
1496 df_live_init (bitmap all_blocks)
1497 {
1498 unsigned int bb_index;
1499 bitmap_iterator bi;
1500
1501 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1502 {
1503 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1504 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1505
1506 /* No register may reach a location where it is not used. Thus
1507 we trim the rr result to the places where it is used. */
1508 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1509 bitmap_clear (&bb_info->in);
1510 }
1511 }
1512
1513 /* Forward confluence function that ignores fake edges. */
1514
1515 static bool
1516 df_live_confluence_n (edge e)
1517 {
1518 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1519 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1520
1521 if (e->flags & EDGE_FAKE)
1522 return false;
1523
1524 return bitmap_ior_into (op1, op2);
1525 }
1526
1527
1528 /* Transfer function for the forwards must-initialized problem. */
1529
1530 static bool
1531 df_live_transfer_function (int bb_index)
1532 {
1533 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1534 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1535 bitmap in = &bb_info->in;
1536 bitmap out = &bb_info->out;
1537 bitmap gen = &bb_info->gen;
1538 bitmap kill = &bb_info->kill;
1539
1540 /* We need to use a scratch set here so that the value returned from this
1541 function invocation properly reflects whether the sets changed in a
1542 significant way; i.e. not just because the lr set was anded in. */
1543 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1544 /* No register may reach a location where it is not used. Thus
1545 we trim the rr result to the places where it is used. */
1546 bitmap_and_into (in, &bb_lr_info->in);
1547
1548 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1549 }
1550
1551
1552 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1553
1554 static void
1555 df_live_finalize (bitmap all_blocks)
1556 {
1557
1558 if (df_live->solutions_dirty)
1559 {
1560 bitmap_iterator bi;
1561 unsigned int bb_index;
1562
1563 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1564 {
1565 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1566 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1567
1568 /* No register may reach a location where it is not used. Thus
1569 we trim the rr result to the places where it is used. */
1570 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1571 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1572 }
1573
1574 df_live->solutions_dirty = false;
1575 }
1576 }
1577
1578
1579 /* Free all storage associated with the problem. */
1580
1581 static void
1582 df_live_free (void)
1583 {
1584 struct df_live_problem_data *problem_data
1585 = (struct df_live_problem_data *) df_live->problem_data;
1586 if (df_live->block_info)
1587 {
1588 df_live->block_info_size = 0;
1589 free (df_live->block_info);
1590 df_live->block_info = NULL;
1591 bitmap_clear (&df_live_scratch);
1592 bitmap_obstack_release (&problem_data->live_bitmaps);
1593 free (problem_data);
1594 df_live->problem_data = NULL;
1595 }
1596 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1597 free (df_live);
1598 }
1599
1600
1601 /* Debugging info at top of bb. */
1602
1603 static void
1604 df_live_top_dump (basic_block bb, FILE *file)
1605 {
1606 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1607 struct df_live_problem_data *problem_data;
1608
1609 if (!bb_info)
1610 return;
1611
1612 fprintf (file, ";; live in \t");
1613 df_print_regset (file, &bb_info->in);
1614 if (df_live->problem_data)
1615 {
1616 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1617 if (problem_data->in)
1618 {
1619 fprintf (file, ";; old in \t");
1620 df_print_regset (file, &problem_data->in[bb->index]);
1621 }
1622 }
1623 fprintf (file, ";; live gen \t");
1624 df_print_regset (file, &bb_info->gen);
1625 fprintf (file, ";; live kill\t");
1626 df_print_regset (file, &bb_info->kill);
1627 }
1628
1629
1630 /* Debugging info at bottom of bb. */
1631
1632 static void
1633 df_live_bottom_dump (basic_block bb, FILE *file)
1634 {
1635 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1636 struct df_live_problem_data *problem_data;
1637
1638 if (!bb_info)
1639 return;
1640
1641 fprintf (file, ";; live out \t");
1642 df_print_regset (file, &bb_info->out);
1643 if (df_live->problem_data)
1644 {
1645 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1646 if (problem_data->out)
1647 {
1648 fprintf (file, ";; old out \t");
1649 df_print_regset (file, &problem_data->out[bb->index]);
1650 }
1651 }
1652 }
1653
1654
1655 /* Build the datastructure to verify that the solution to the dataflow
1656 equations is not dirty. */
1657
1658 static void
1659 df_live_verify_solution_start (void)
1660 {
1661 basic_block bb;
1662 struct df_live_problem_data *problem_data;
1663 if (df_live->solutions_dirty)
1664 return;
1665
1666 /* Set it true so that the solution is recomputed. */
1667 df_live->solutions_dirty = true;
1668
1669 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1670 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1671 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1672
1673 FOR_ALL_BB (bb)
1674 {
1675 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1676 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1677 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1678 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1679 }
1680 }
1681
1682
1683 /* Compare the saved datastructure and the new solution to the dataflow
1684 equations. */
1685
1686 static void
1687 df_live_verify_solution_end (void)
1688 {
1689 struct df_live_problem_data *problem_data;
1690 basic_block bb;
1691
1692 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1693 if (!problem_data->out)
1694 return;
1695
1696 FOR_ALL_BB (bb)
1697 {
1698 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1699 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1700 {
1701 /*df_dump (stderr);*/
1702 gcc_unreachable ();
1703 }
1704 }
1705
1706 /* Cannot delete them immediately because you may want to dump them
1707 if the comparison fails. */
1708 FOR_ALL_BB (bb)
1709 {
1710 bitmap_clear (&problem_data->in[bb->index]);
1711 bitmap_clear (&problem_data->out[bb->index]);
1712 }
1713
1714 free (problem_data->in);
1715 free (problem_data->out);
1716 free (problem_data);
1717 df_live->problem_data = NULL;
1718 }
1719
1720
1721 /* All of the information associated with every instance of the problem. */
1722
1723 static struct df_problem problem_LIVE =
1724 {
1725 DF_LIVE, /* Problem id. */
1726 DF_FORWARD, /* Direction. */
1727 df_live_alloc, /* Allocate the problem specific data. */
1728 df_live_reset, /* Reset global information. */
1729 df_live_free_bb_info, /* Free basic block info. */
1730 df_live_local_compute, /* Local compute function. */
1731 df_live_init, /* Init the solution specific data. */
1732 df_worklist_dataflow, /* Worklist solver. */
1733 NULL, /* Confluence operator 0. */
1734 df_live_confluence_n, /* Confluence operator n. */
1735 df_live_transfer_function, /* Transfer function. */
1736 df_live_finalize, /* Finalize function. */
1737 df_live_free, /* Free all of the problem information. */
1738 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1739 NULL, /* Debugging. */
1740 df_live_top_dump, /* Debugging start block. */
1741 df_live_bottom_dump, /* Debugging end block. */
1742 df_live_verify_solution_start,/* Incremental solution verify start. */
1743 df_live_verify_solution_end, /* Incremental solution verify end. */
1744 &problem_LR, /* Dependent problem. */
1745 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1746 TV_DF_LIVE, /* Timing variable. */
1747 false /* Reset blocks on dropping out of blocks_to_analyze. */
1748 };
1749
1750
1751 /* Create a new DATAFLOW instance and add it to an existing instance
1752 of DF. The returned structure is what is used to get at the
1753 solution. */
1754
1755 void
1756 df_live_add_problem (void)
1757 {
1758 df_add_problem (&problem_LIVE);
1759 /* These will be initialized when df_scan_blocks processes each
1760 block. */
1761 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1762 }
1763
1764
1765 /* Set all of the blocks as dirty. This needs to be done if this
1766 problem is added after all of the insns have been scanned. */
1767
1768 void
1769 df_live_set_all_dirty (void)
1770 {
1771 basic_block bb;
1772 FOR_ALL_BB (bb)
1773 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1774 bb->index);
1775 }
1776
1777
1778 /* Verify that all of the lr related info is consistent and
1779 correct. */
1780
1781 void
1782 df_live_verify_transfer_functions (void)
1783 {
1784 basic_block bb;
1785 bitmap_head saved_gen;
1786 bitmap_head saved_kill;
1787 bitmap_head all_blocks;
1788
1789 if (!df)
1790 return;
1791
1792 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1793 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1794 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1795
1796 df_grow_insn_info ();
1797
1798 FOR_ALL_BB (bb)
1799 {
1800 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1801 bitmap_set_bit (&all_blocks, bb->index);
1802
1803 if (bb_info)
1804 {
1805 /* Make a copy of the transfer functions and then compute
1806 new ones to see if the transfer functions have
1807 changed. */
1808 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1809 bb->index))
1810 {
1811 bitmap_copy (&saved_gen, &bb_info->gen);
1812 bitmap_copy (&saved_kill, &bb_info->kill);
1813 bitmap_clear (&bb_info->gen);
1814 bitmap_clear (&bb_info->kill);
1815
1816 df_live_bb_local_compute (bb->index);
1817 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1818 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1819 }
1820 }
1821 else
1822 {
1823 /* If we do not have basic block info, the block must be in
1824 the list of dirty blocks or else some one has added a
1825 block behind our backs. */
1826 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1827 bb->index));
1828 }
1829 /* Make sure no one created a block without following
1830 procedures. */
1831 gcc_assert (df_scan_get_bb_info (bb->index));
1832 }
1833
1834 /* Make sure there are no dirty bits in blocks that have been deleted. */
1835 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1836 &all_blocks));
1837 bitmap_clear (&saved_gen);
1838 bitmap_clear (&saved_kill);
1839 bitmap_clear (&all_blocks);
1840 }
1841 \f
1842 /*----------------------------------------------------------------------------
1843 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1844
1845 Link either the defs to the uses and / or the uses to the defs.
1846
1847 These problems are set up like the other dataflow problems so that
1848 they nicely fit into the framework. They are much simpler and only
1849 involve a single traversal of instructions and an examination of
1850 the reaching defs information (the dependent problem).
1851 ----------------------------------------------------------------------------*/
1852
1853 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1854
1855 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1856
1857 struct df_link *
1858 df_chain_create (df_ref src, df_ref dst)
1859 {
1860 struct df_link *head = DF_REF_CHAIN (src);
1861 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1862
1863 DF_REF_CHAIN (src) = link;
1864 link->next = head;
1865 link->ref = dst;
1866 return link;
1867 }
1868
1869
1870 /* Delete any du or ud chains that start at REF and point to
1871 TARGET. */
1872 static void
1873 df_chain_unlink_1 (df_ref ref, df_ref target)
1874 {
1875 struct df_link *chain = DF_REF_CHAIN (ref);
1876 struct df_link *prev = NULL;
1877
1878 while (chain)
1879 {
1880 if (chain->ref == target)
1881 {
1882 if (prev)
1883 prev->next = chain->next;
1884 else
1885 DF_REF_CHAIN (ref) = chain->next;
1886 pool_free (df_chain->block_pool, chain);
1887 return;
1888 }
1889 prev = chain;
1890 chain = chain->next;
1891 }
1892 }
1893
1894
1895 /* Delete a du or ud chain that leave or point to REF. */
1896
1897 void
1898 df_chain_unlink (df_ref ref)
1899 {
1900 struct df_link *chain = DF_REF_CHAIN (ref);
1901 while (chain)
1902 {
1903 struct df_link *next = chain->next;
1904 /* Delete the other side if it exists. */
1905 df_chain_unlink_1 (chain->ref, ref);
1906 pool_free (df_chain->block_pool, chain);
1907 chain = next;
1908 }
1909 DF_REF_CHAIN (ref) = NULL;
1910 }
1911
1912
1913 /* Copy the du or ud chain starting at FROM_REF and attach it to
1914 TO_REF. */
1915
1916 void
1917 df_chain_copy (df_ref to_ref,
1918 struct df_link *from_ref)
1919 {
1920 while (from_ref)
1921 {
1922 df_chain_create (to_ref, from_ref->ref);
1923 from_ref = from_ref->next;
1924 }
1925 }
1926
1927
1928 /* Remove this problem from the stack of dataflow problems. */
1929
1930 static void
1931 df_chain_remove_problem (void)
1932 {
1933 bitmap_iterator bi;
1934 unsigned int bb_index;
1935
1936 /* Wholesale destruction of the old chains. */
1937 if (df_chain->block_pool)
1938 free_alloc_pool (df_chain->block_pool);
1939
1940 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1941 {
1942 rtx insn;
1943 df_ref *def_rec;
1944 df_ref *use_rec;
1945 basic_block bb = BASIC_BLOCK (bb_index);
1946
1947 if (df_chain_problem_p (DF_DU_CHAIN))
1948 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1949 DF_REF_CHAIN (*def_rec) = NULL;
1950 if (df_chain_problem_p (DF_UD_CHAIN))
1951 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1952 DF_REF_CHAIN (*use_rec) = NULL;
1953
1954 FOR_BB_INSNS (bb, insn)
1955 {
1956 unsigned int uid = INSN_UID (insn);
1957
1958 if (INSN_P (insn))
1959 {
1960 if (df_chain_problem_p (DF_DU_CHAIN))
1961 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
1962 DF_REF_CHAIN (*def_rec) = NULL;
1963 if (df_chain_problem_p (DF_UD_CHAIN))
1964 {
1965 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
1966 DF_REF_CHAIN (*use_rec) = NULL;
1967 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
1968 DF_REF_CHAIN (*use_rec) = NULL;
1969 }
1970 }
1971 }
1972 }
1973
1974 bitmap_clear (df_chain->out_of_date_transfer_functions);
1975 df_chain->block_pool = NULL;
1976 }
1977
1978
1979 /* Remove the chain problem completely. */
1980
1981 static void
1982 df_chain_fully_remove_problem (void)
1983 {
1984 df_chain_remove_problem ();
1985 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
1986 free (df_chain);
1987 }
1988
1989
1990 /* Create def-use or use-def chains. */
1991
1992 static void
1993 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1994 {
1995 df_chain_remove_problem ();
1996 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
1997 sizeof (struct df_link), 50);
1998 df_chain->optional_p = true;
1999 }
2000
2001
2002 /* Reset all of the chains when the set of basic blocks changes. */
2003
2004 static void
2005 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2006 {
2007 df_chain_remove_problem ();
2008 }
2009
2010
2011 /* Create the chains for a list of USEs. */
2012
2013 static void
2014 df_chain_create_bb_process_use (bitmap local_rd,
2015 df_ref *use_rec,
2016 int top_flag)
2017 {
2018 bitmap_iterator bi;
2019 unsigned int def_index;
2020
2021 while (*use_rec)
2022 {
2023 df_ref use = *use_rec;
2024 unsigned int uregno = DF_REF_REGNO (use);
2025 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2026 || (uregno >= FIRST_PSEUDO_REGISTER))
2027 {
2028 /* Do not want to go through this for an uninitialized var. */
2029 int count = DF_DEFS_COUNT (uregno);
2030 if (count)
2031 {
2032 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2033 {
2034 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2035 unsigned int last_index = first_index + count - 1;
2036
2037 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2038 {
2039 df_ref def;
2040 if (def_index > last_index)
2041 break;
2042
2043 def = DF_DEFS_GET (def_index);
2044 if (df_chain_problem_p (DF_DU_CHAIN))
2045 df_chain_create (def, use);
2046 if (df_chain_problem_p (DF_UD_CHAIN))
2047 df_chain_create (use, def);
2048 }
2049 }
2050 }
2051 }
2052
2053 use_rec++;
2054 }
2055 }
2056
2057
2058 /* Create chains from reaching defs bitmaps for basic block BB. */
2059
2060 static void
2061 df_chain_create_bb (unsigned int bb_index)
2062 {
2063 basic_block bb = BASIC_BLOCK (bb_index);
2064 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2065 rtx insn;
2066 bitmap_head cpy;
2067
2068 bitmap_initialize (&cpy, &bitmap_default_obstack);
2069 bitmap_copy (&cpy, &bb_info->in);
2070 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2071
2072 /* Since we are going forwards, process the artificial uses first
2073 then the artificial defs second. */
2074
2075 #ifdef EH_USES
2076 /* Create the chains for the artificial uses from the EH_USES at the
2077 beginning of the block. */
2078
2079 /* Artificials are only hard regs. */
2080 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2081 df_chain_create_bb_process_use (&cpy,
2082 df_get_artificial_uses (bb->index),
2083 DF_REF_AT_TOP);
2084 #endif
2085
2086 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2087
2088 /* Process the regular instructions next. */
2089 FOR_BB_INSNS (bb, insn)
2090 if (INSN_P (insn))
2091 {
2092 unsigned int uid = INSN_UID (insn);
2093
2094 /* First scan the uses and link them up with the defs that remain
2095 in the cpy vector. */
2096 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2097 if (df->changeable_flags & DF_EQ_NOTES)
2098 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2099
2100 /* Since we are going forwards, process the defs second. */
2101 df_rd_simulate_one_insn (bb, insn, &cpy);
2102 }
2103
2104 /* Create the chains for the artificial uses of the hard registers
2105 at the end of the block. */
2106 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2107 df_chain_create_bb_process_use (&cpy,
2108 df_get_artificial_uses (bb->index),
2109 0);
2110
2111 bitmap_clear (&cpy);
2112 }
2113
2114 /* Create def-use chains from reaching use bitmaps for basic blocks
2115 in BLOCKS. */
2116
2117 static void
2118 df_chain_finalize (bitmap all_blocks)
2119 {
2120 unsigned int bb_index;
2121 bitmap_iterator bi;
2122
2123 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2124 {
2125 df_chain_create_bb (bb_index);
2126 }
2127 }
2128
2129
2130 /* Free all storage associated with the problem. */
2131
2132 static void
2133 df_chain_free (void)
2134 {
2135 free_alloc_pool (df_chain->block_pool);
2136 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2137 free (df_chain);
2138 }
2139
2140
2141 /* Debugging info. */
2142
2143 static void
2144 df_chain_top_dump (basic_block bb, FILE *file)
2145 {
2146 if (df_chain_problem_p (DF_DU_CHAIN))
2147 {
2148 rtx insn;
2149 df_ref *def_rec = df_get_artificial_defs (bb->index);
2150 if (*def_rec)
2151 {
2152
2153 fprintf (file, ";; DU chains for artificial defs\n");
2154 while (*def_rec)
2155 {
2156 df_ref def = *def_rec;
2157 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2158 df_chain_dump (DF_REF_CHAIN (def), file);
2159 fprintf (file, "\n");
2160 def_rec++;
2161 }
2162 }
2163
2164 FOR_BB_INSNS (bb, insn)
2165 {
2166 if (INSN_P (insn))
2167 {
2168 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2169 def_rec = DF_INSN_INFO_DEFS (insn_info);
2170 if (*def_rec)
2171 {
2172 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2173 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2174
2175 while (*def_rec)
2176 {
2177 df_ref def = *def_rec;
2178 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2179 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2180 fprintf (file, "read/write ");
2181 df_chain_dump (DF_REF_CHAIN (def), file);
2182 fprintf (file, "\n");
2183 def_rec++;
2184 }
2185 }
2186 }
2187 }
2188 }
2189 }
2190
2191
2192 static void
2193 df_chain_bottom_dump (basic_block bb, FILE *file)
2194 {
2195 if (df_chain_problem_p (DF_UD_CHAIN))
2196 {
2197 rtx insn;
2198 df_ref *use_rec = df_get_artificial_uses (bb->index);
2199
2200 if (*use_rec)
2201 {
2202 fprintf (file, ";; UD chains for artificial uses\n");
2203 while (*use_rec)
2204 {
2205 df_ref use = *use_rec;
2206 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2207 df_chain_dump (DF_REF_CHAIN (use), file);
2208 fprintf (file, "\n");
2209 use_rec++;
2210 }
2211 }
2212
2213 FOR_BB_INSNS (bb, insn)
2214 {
2215 if (INSN_P (insn))
2216 {
2217 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2218 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2219 use_rec = DF_INSN_INFO_USES (insn_info);
2220 if (*use_rec || *eq_use_rec)
2221 {
2222 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2223 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2224
2225 while (*use_rec)
2226 {
2227 df_ref use = *use_rec;
2228 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2229 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2230 fprintf (file, "read/write ");
2231 df_chain_dump (DF_REF_CHAIN (use), file);
2232 fprintf (file, "\n");
2233 use_rec++;
2234 }
2235 while (*eq_use_rec)
2236 {
2237 df_ref use = *eq_use_rec;
2238 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2239 df_chain_dump (DF_REF_CHAIN (use), file);
2240 fprintf (file, "\n");
2241 eq_use_rec++;
2242 }
2243 }
2244 }
2245 }
2246 }
2247 }
2248
2249
2250 static struct df_problem problem_CHAIN =
2251 {
2252 DF_CHAIN, /* Problem id. */
2253 DF_NONE, /* Direction. */
2254 df_chain_alloc, /* Allocate the problem specific data. */
2255 df_chain_reset, /* Reset global information. */
2256 NULL, /* Free basic block info. */
2257 NULL, /* Local compute function. */
2258 NULL, /* Init the solution specific data. */
2259 NULL, /* Iterative solver. */
2260 NULL, /* Confluence operator 0. */
2261 NULL, /* Confluence operator n. */
2262 NULL, /* Transfer function. */
2263 df_chain_finalize, /* Finalize function. */
2264 df_chain_free, /* Free all of the problem information. */
2265 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2266 NULL, /* Debugging. */
2267 df_chain_top_dump, /* Debugging start block. */
2268 df_chain_bottom_dump, /* Debugging end block. */
2269 NULL, /* Incremental solution verify start. */
2270 NULL, /* Incremental solution verify end. */
2271 &problem_RD, /* Dependent problem. */
2272 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2273 TV_DF_CHAIN, /* Timing variable. */
2274 false /* Reset blocks on dropping out of blocks_to_analyze. */
2275 };
2276
2277
2278 /* Create a new DATAFLOW instance and add it to an existing instance
2279 of DF. The returned structure is what is used to get at the
2280 solution. */
2281
2282 void
2283 df_chain_add_problem (unsigned int chain_flags)
2284 {
2285 df_add_problem (&problem_CHAIN);
2286 df_chain->local_flags = chain_flags;
2287 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2288 }
2289
2290 #undef df_chain_problem_p
2291
2292 \f
2293 /*----------------------------------------------------------------------------
2294 WORD LEVEL LIVE REGISTERS
2295
2296 Find the locations in the function where any use of a pseudo can
2297 reach in the backwards direction. In and out bitvectors are built
2298 for each basic block. We only track pseudo registers that have a
2299 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2300 contain two bits corresponding to each of the subwords.
2301
2302 ----------------------------------------------------------------------------*/
2303
2304 /* Private data used to verify the solution for this problem. */
2305 struct df_word_lr_problem_data
2306 {
2307 /* An obstack for the bitmaps we need for this problem. */
2308 bitmap_obstack word_lr_bitmaps;
2309 };
2310
2311
2312 /* Free basic block info. */
2313
2314 static void
2315 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2316 void *vbb_info)
2317 {
2318 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2319 if (bb_info)
2320 {
2321 bitmap_clear (&bb_info->use);
2322 bitmap_clear (&bb_info->def);
2323 bitmap_clear (&bb_info->in);
2324 bitmap_clear (&bb_info->out);
2325 }
2326 }
2327
2328
2329 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2330 not touched unless the block is new. */
2331
2332 static void
2333 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2334 {
2335 unsigned int bb_index;
2336 bitmap_iterator bi;
2337 basic_block bb;
2338 struct df_word_lr_problem_data *problem_data
2339 = XNEW (struct df_word_lr_problem_data);
2340
2341 df_word_lr->problem_data = problem_data;
2342
2343 df_grow_bb_info (df_word_lr);
2344
2345 /* Create the mapping from regnos to slots. This does not change
2346 unless the problem is destroyed and recreated. In particular, if
2347 we end up deleting the only insn that used a subreg, we do not
2348 want to redo the mapping because this would invalidate everything
2349 else. */
2350
2351 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2352
2353 FOR_EACH_BB (bb)
2354 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2355
2356 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2357 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2358
2359 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2360 {
2361 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2362
2363 /* When bitmaps are already initialized, just clear them. */
2364 if (bb_info->use.obstack)
2365 {
2366 bitmap_clear (&bb_info->def);
2367 bitmap_clear (&bb_info->use);
2368 }
2369 else
2370 {
2371 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2372 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2373 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2374 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2375 }
2376 }
2377
2378 df_word_lr->optional_p = true;
2379 }
2380
2381
2382 /* Reset the global solution for recalculation. */
2383
2384 static void
2385 df_word_lr_reset (bitmap all_blocks)
2386 {
2387 unsigned int bb_index;
2388 bitmap_iterator bi;
2389
2390 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2391 {
2392 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2393 gcc_assert (bb_info);
2394 bitmap_clear (&bb_info->in);
2395 bitmap_clear (&bb_info->out);
2396 }
2397 }
2398
2399 /* Examine REF, and if it is for a reg we're interested in, set or
2400 clear the bits corresponding to its subwords from the bitmap
2401 according to IS_SET. LIVE is the bitmap we should update. We do
2402 not track hard regs or pseudos of any size other than 2 *
2403 UNITS_PER_WORD.
2404 We return true if we changed the bitmap, or if we encountered a register
2405 we're not tracking. */
2406
2407 bool
2408 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2409 {
2410 rtx orig_reg = DF_REF_REG (ref);
2411 rtx reg = orig_reg;
2412 enum machine_mode reg_mode;
2413 unsigned regno;
2414 /* Left at -1 for whole accesses. */
2415 int which_subword = -1;
2416 bool changed = false;
2417
2418 if (GET_CODE (reg) == SUBREG)
2419 reg = SUBREG_REG (orig_reg);
2420 regno = REGNO (reg);
2421 reg_mode = GET_MODE (reg);
2422 if (regno < FIRST_PSEUDO_REGISTER
2423 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2424 return true;
2425
2426 if (GET_CODE (orig_reg) == SUBREG
2427 && df_read_modify_subreg_p (orig_reg))
2428 {
2429 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2430 if (subreg_lowpart_p (orig_reg))
2431 which_subword = 0;
2432 else
2433 which_subword = 1;
2434 }
2435 if (is_set)
2436 {
2437 if (which_subword != 1)
2438 changed |= bitmap_set_bit (live, regno * 2);
2439 if (which_subword != 0)
2440 changed |= bitmap_set_bit (live, regno * 2 + 1);
2441 }
2442 else
2443 {
2444 if (which_subword != 1)
2445 changed |= bitmap_clear_bit (live, regno * 2);
2446 if (which_subword != 0)
2447 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2448 }
2449 return changed;
2450 }
2451
2452 /* Compute local live register info for basic block BB. */
2453
2454 static void
2455 df_word_lr_bb_local_compute (unsigned int bb_index)
2456 {
2457 basic_block bb = BASIC_BLOCK (bb_index);
2458 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2459 rtx insn;
2460 df_ref *def_rec;
2461 df_ref *use_rec;
2462
2463 /* Ensure that artificial refs don't contain references to pseudos. */
2464 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2465 {
2466 df_ref def = *def_rec;
2467 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2468 }
2469
2470 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2471 {
2472 df_ref use = *use_rec;
2473 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2474 }
2475
2476 FOR_BB_INSNS_REVERSE (bb, insn)
2477 {
2478 unsigned int uid = INSN_UID (insn);
2479
2480 if (!NONDEBUG_INSN_P (insn))
2481 continue;
2482 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2483 {
2484 df_ref def = *def_rec;
2485 /* If the def is to only part of the reg, it does
2486 not kill the other defs that reach here. */
2487 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2488 {
2489 df_word_lr_mark_ref (def, true, &bb_info->def);
2490 df_word_lr_mark_ref (def, false, &bb_info->use);
2491 }
2492 }
2493 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2494 {
2495 df_ref use = *use_rec;
2496 df_word_lr_mark_ref (use, true, &bb_info->use);
2497 }
2498 }
2499 }
2500
2501
2502 /* Compute local live register info for each basic block within BLOCKS. */
2503
2504 static void
2505 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2506 {
2507 unsigned int bb_index;
2508 bitmap_iterator bi;
2509
2510 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2511 {
2512 if (bb_index == EXIT_BLOCK)
2513 {
2514 unsigned regno;
2515 bitmap_iterator bi;
2516 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2517 regno, bi)
2518 gcc_unreachable ();
2519 }
2520 else
2521 df_word_lr_bb_local_compute (bb_index);
2522 }
2523
2524 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2525 }
2526
2527
2528 /* Initialize the solution vectors. */
2529
2530 static void
2531 df_word_lr_init (bitmap all_blocks)
2532 {
2533 unsigned int bb_index;
2534 bitmap_iterator bi;
2535
2536 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2537 {
2538 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2539 bitmap_copy (&bb_info->in, &bb_info->use);
2540 bitmap_clear (&bb_info->out);
2541 }
2542 }
2543
2544
2545 /* Confluence function that ignores fake edges. */
2546
2547 static bool
2548 df_word_lr_confluence_n (edge e)
2549 {
2550 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2551 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2552
2553 return bitmap_ior_into (op1, op2);
2554 }
2555
2556
2557 /* Transfer function. */
2558
2559 static bool
2560 df_word_lr_transfer_function (int bb_index)
2561 {
2562 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2563 bitmap in = &bb_info->in;
2564 bitmap out = &bb_info->out;
2565 bitmap use = &bb_info->use;
2566 bitmap def = &bb_info->def;
2567
2568 return bitmap_ior_and_compl (in, use, out, def);
2569 }
2570
2571
2572 /* Free all storage associated with the problem. */
2573
2574 static void
2575 df_word_lr_free (void)
2576 {
2577 struct df_word_lr_problem_data *problem_data
2578 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2579
2580 if (df_word_lr->block_info)
2581 {
2582 df_word_lr->block_info_size = 0;
2583 free (df_word_lr->block_info);
2584 df_word_lr->block_info = NULL;
2585 }
2586
2587 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2588 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2589 free (problem_data);
2590 free (df_word_lr);
2591 }
2592
2593
2594 /* Debugging info at top of bb. */
2595
2596 static void
2597 df_word_lr_top_dump (basic_block bb, FILE *file)
2598 {
2599 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2600 if (!bb_info)
2601 return;
2602
2603 fprintf (file, ";; blr in \t");
2604 df_print_word_regset (file, &bb_info->in);
2605 fprintf (file, ";; blr use \t");
2606 df_print_word_regset (file, &bb_info->use);
2607 fprintf (file, ";; blr def \t");
2608 df_print_word_regset (file, &bb_info->def);
2609 }
2610
2611
2612 /* Debugging info at bottom of bb. */
2613
2614 static void
2615 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2616 {
2617 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2618 if (!bb_info)
2619 return;
2620
2621 fprintf (file, ";; blr out \t");
2622 df_print_word_regset (file, &bb_info->out);
2623 }
2624
2625
2626 /* All of the information associated with every instance of the problem. */
2627
2628 static struct df_problem problem_WORD_LR =
2629 {
2630 DF_WORD_LR, /* Problem id. */
2631 DF_BACKWARD, /* Direction. */
2632 df_word_lr_alloc, /* Allocate the problem specific data. */
2633 df_word_lr_reset, /* Reset global information. */
2634 df_word_lr_free_bb_info, /* Free basic block info. */
2635 df_word_lr_local_compute, /* Local compute function. */
2636 df_word_lr_init, /* Init the solution specific data. */
2637 df_worklist_dataflow, /* Worklist solver. */
2638 NULL, /* Confluence operator 0. */
2639 df_word_lr_confluence_n, /* Confluence operator n. */
2640 df_word_lr_transfer_function, /* Transfer function. */
2641 NULL, /* Finalize function. */
2642 df_word_lr_free, /* Free all of the problem information. */
2643 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2644 NULL, /* Debugging. */
2645 df_word_lr_top_dump, /* Debugging start block. */
2646 df_word_lr_bottom_dump, /* Debugging end block. */
2647 NULL, /* Incremental solution verify start. */
2648 NULL, /* Incremental solution verify end. */
2649 NULL, /* Dependent problem. */
2650 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2651 TV_DF_WORD_LR, /* Timing variable. */
2652 false /* Reset blocks on dropping out of blocks_to_analyze. */
2653 };
2654
2655
2656 /* Create a new DATAFLOW instance and add it to an existing instance
2657 of DF. The returned structure is what is used to get at the
2658 solution. */
2659
2660 void
2661 df_word_lr_add_problem (void)
2662 {
2663 df_add_problem (&problem_WORD_LR);
2664 /* These will be initialized when df_scan_blocks processes each
2665 block. */
2666 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2667 }
2668
2669
2670 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2671 any bits, which is used by the caller to determine whether a set is
2672 necessary. We also return true if there are other reasons not to delete
2673 an insn. */
2674
2675 bool
2676 df_word_lr_simulate_defs (rtx insn, bitmap live)
2677 {
2678 bool changed = false;
2679 df_ref *def_rec;
2680 unsigned int uid = INSN_UID (insn);
2681
2682 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2683 {
2684 df_ref def = *def_rec;
2685 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2686 changed = true;
2687 else
2688 changed |= df_word_lr_mark_ref (*def_rec, false, live);
2689 }
2690 return changed;
2691 }
2692
2693
2694 /* Simulate the effects of the uses of INSN on LIVE. */
2695
2696 void
2697 df_word_lr_simulate_uses (rtx insn, bitmap live)
2698 {
2699 df_ref *use_rec;
2700 unsigned int uid = INSN_UID (insn);
2701
2702 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2703 df_word_lr_mark_ref (*use_rec, true, live);
2704 }
2705 \f
2706 /*----------------------------------------------------------------------------
2707 This problem computes REG_DEAD and REG_UNUSED notes.
2708 ----------------------------------------------------------------------------*/
2709
2710 static void
2711 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2712 {
2713 df_note->optional_p = true;
2714 }
2715
2716 #ifdef REG_DEAD_DEBUGGING
2717 static void
2718 df_print_note (const char *prefix, rtx insn, rtx note)
2719 {
2720 if (dump_file)
2721 {
2722 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2723 print_rtl (dump_file, note);
2724 fprintf (dump_file, "\n");
2725 }
2726 }
2727 #endif
2728
2729
2730 /* After reg-stack, the x86 floating point stack regs are difficult to
2731 analyze because of all of the pushes, pops and rotations. Thus, we
2732 just leave the notes alone. */
2733
2734 #ifdef STACK_REGS
2735 static inline bool
2736 df_ignore_stack_reg (int regno)
2737 {
2738 return regstack_completed
2739 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2740 }
2741 #else
2742 static inline bool
2743 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2744 {
2745 return false;
2746 }
2747 #endif
2748
2749
2750 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add
2751 them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. Remove also
2752 REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
2753 as the bitmap of currently live registers. */
2754
2755 static void
2756 df_kill_notes (rtx insn, bitmap live)
2757 {
2758 rtx *pprev = &REG_NOTES (insn);
2759 rtx link = *pprev;
2760
2761 while (link)
2762 {
2763 switch (REG_NOTE_KIND (link))
2764 {
2765 case REG_DEAD:
2766 /* After reg-stack, we need to ignore any unused notes
2767 for the stack registers. */
2768 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2769 {
2770 pprev = &XEXP (link, 1);
2771 link = *pprev;
2772 }
2773 else
2774 {
2775 rtx next = XEXP (link, 1);
2776 #ifdef REG_DEAD_DEBUGGING
2777 df_print_note ("deleting: ", insn, link);
2778 #endif
2779 free_EXPR_LIST_node (link);
2780 *pprev = link = next;
2781 }
2782 break;
2783
2784 case REG_UNUSED:
2785 /* After reg-stack, we need to ignore any unused notes
2786 for the stack registers. */
2787 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2788 {
2789 pprev = &XEXP (link, 1);
2790 link = *pprev;
2791 }
2792 else
2793 {
2794 rtx next = XEXP (link, 1);
2795 #ifdef REG_DEAD_DEBUGGING
2796 df_print_note ("deleting: ", insn, link);
2797 #endif
2798 free_EXPR_LIST_node (link);
2799 *pprev = link = next;
2800 }
2801 break;
2802
2803 case REG_EQUAL:
2804 case REG_EQUIV:
2805 {
2806 /* Remove the notes that refer to dead registers. As we have at most
2807 one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
2808 so we need to purge the complete EQ_USES vector when removing
2809 the note using df_notes_rescan. */
2810 df_ref *use_rec;
2811 bool deleted = false;
2812
2813 for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
2814 {
2815 df_ref use = *use_rec;
2816 if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
2817 && DF_REF_LOC (use)
2818 && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
2819 && ! bitmap_bit_p (live, DF_REF_REGNO (use))
2820 && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
2821 {
2822 deleted = true;
2823 break;
2824 }
2825 }
2826 if (deleted)
2827 {
2828 rtx next;
2829 #ifdef REG_DEAD_DEBUGGING
2830 df_print_note ("deleting: ", insn, link);
2831 #endif
2832 next = XEXP (link, 1);
2833 free_EXPR_LIST_node (link);
2834 *pprev = link = next;
2835 df_notes_rescan (insn);
2836 }
2837 else
2838 {
2839 pprev = &XEXP (link, 1);
2840 link = *pprev;
2841 }
2842 break;
2843 }
2844 default:
2845 pprev = &XEXP (link, 1);
2846 link = *pprev;
2847 break;
2848 }
2849 }
2850 }
2851
2852
2853 /* Set a NOTE_TYPE note for REG in INSN. */
2854
2855 static inline void
2856 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2857 {
2858 gcc_checking_assert (!DEBUG_INSN_P (insn));
2859 add_reg_note (insn, note_type, reg);
2860 }
2861
2862 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2863 arguments. Return true if the register value described by MWS's
2864 mw_reg is known to be completely unused, and if mw_reg can therefore
2865 be used in a REG_UNUSED note. */
2866
2867 static bool
2868 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2869 bitmap live, bitmap artificial_uses)
2870 {
2871 unsigned int r;
2872
2873 /* If MWS describes a partial reference, create REG_UNUSED notes for
2874 individual hard registers. */
2875 if (mws->flags & DF_REF_PARTIAL)
2876 return false;
2877
2878 /* Likewise if some part of the register is used. */
2879 for (r = mws->start_regno; r <= mws->end_regno; r++)
2880 if (bitmap_bit_p (live, r)
2881 || bitmap_bit_p (artificial_uses, r))
2882 return false;
2883
2884 gcc_assert (REG_P (mws->mw_reg));
2885 return true;
2886 }
2887
2888
2889 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2890 based on the bits in LIVE. Do not generate notes for registers in
2891 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2892 not generated if the reg is both read and written by the
2893 instruction.
2894 */
2895
2896 static void
2897 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2898 bitmap live, bitmap do_not_gen,
2899 bitmap artificial_uses,
2900 struct dead_debug *debug)
2901 {
2902 unsigned int r;
2903
2904 #ifdef REG_DEAD_DEBUGGING
2905 if (dump_file)
2906 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
2907 mws->start_regno, mws->end_regno);
2908 #endif
2909
2910 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
2911 {
2912 unsigned int regno = mws->start_regno;
2913 df_set_note (REG_UNUSED, insn, mws->mw_reg);
2914 dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
2915
2916 #ifdef REG_DEAD_DEBUGGING
2917 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2918 #endif
2919 bitmap_set_bit (do_not_gen, regno);
2920 /* Only do this if the value is totally dead. */
2921 }
2922 else
2923 for (r = mws->start_regno; r <= mws->end_regno; r++)
2924 {
2925 if (!bitmap_bit_p (live, r)
2926 && !bitmap_bit_p (artificial_uses, r))
2927 {
2928 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
2929 dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
2930 #ifdef REG_DEAD_DEBUGGING
2931 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
2932 #endif
2933 }
2934 bitmap_set_bit (do_not_gen, r);
2935 }
2936 }
2937
2938
2939 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
2940 arguments. Return true if the register value described by MWS's
2941 mw_reg is known to be completely dead, and if mw_reg can therefore
2942 be used in a REG_DEAD note. */
2943
2944 static bool
2945 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
2946 bitmap live, bitmap artificial_uses,
2947 bitmap do_not_gen)
2948 {
2949 unsigned int r;
2950
2951 /* If MWS describes a partial reference, create REG_DEAD notes for
2952 individual hard registers. */
2953 if (mws->flags & DF_REF_PARTIAL)
2954 return false;
2955
2956 /* Likewise if some part of the register is not dead. */
2957 for (r = mws->start_regno; r <= mws->end_regno; r++)
2958 if (bitmap_bit_p (live, r)
2959 || bitmap_bit_p (artificial_uses, r)
2960 || bitmap_bit_p (do_not_gen, r))
2961 return false;
2962
2963 gcc_assert (REG_P (mws->mw_reg));
2964 return true;
2965 }
2966
2967 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
2968 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
2969 from being set if the instruction both reads and writes the
2970 register. */
2971
2972 static void
2973 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2974 bitmap live, bitmap do_not_gen,
2975 bitmap artificial_uses, bool *added_notes_p)
2976 {
2977 unsigned int r;
2978 bool is_debug = *added_notes_p;
2979
2980 *added_notes_p = false;
2981
2982 #ifdef REG_DEAD_DEBUGGING
2983 if (dump_file)
2984 {
2985 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
2986 mws->start_regno, mws->end_regno);
2987 df_print_regset (dump_file, do_not_gen);
2988 fprintf (dump_file, " live =");
2989 df_print_regset (dump_file, live);
2990 fprintf (dump_file, " artificial uses =");
2991 df_print_regset (dump_file, artificial_uses);
2992 }
2993 #endif
2994
2995 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
2996 {
2997 if (is_debug)
2998 {
2999 *added_notes_p = true;
3000 return;
3001 }
3002 /* Add a dead note for the entire multi word register. */
3003 df_set_note (REG_DEAD, insn, mws->mw_reg);
3004 #ifdef REG_DEAD_DEBUGGING
3005 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3006 #endif
3007 }
3008 else
3009 {
3010 for (r = mws->start_regno; r <= mws->end_regno; r++)
3011 if (!bitmap_bit_p (live, r)
3012 && !bitmap_bit_p (artificial_uses, r)
3013 && !bitmap_bit_p (do_not_gen, r))
3014 {
3015 if (is_debug)
3016 {
3017 *added_notes_p = true;
3018 return;
3019 }
3020 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
3021 #ifdef REG_DEAD_DEBUGGING
3022 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3023 #endif
3024 }
3025 }
3026 return;
3027 }
3028
3029
3030 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3031 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3032
3033 static void
3034 df_create_unused_note (rtx insn, df_ref def,
3035 bitmap live, bitmap artificial_uses,
3036 struct dead_debug *debug)
3037 {
3038 unsigned int dregno = DF_REF_REGNO (def);
3039
3040 #ifdef REG_DEAD_DEBUGGING
3041 if (dump_file)
3042 {
3043 fprintf (dump_file, " regular looking at def ");
3044 df_ref_debug (def, dump_file);
3045 }
3046 #endif
3047
3048 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3049 || bitmap_bit_p (live, dregno)
3050 || bitmap_bit_p (artificial_uses, dregno)
3051 || df_ignore_stack_reg (dregno)))
3052 {
3053 rtx reg = (DF_REF_LOC (def))
3054 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3055 df_set_note (REG_UNUSED, insn, reg);
3056 dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3057 #ifdef REG_DEAD_DEBUGGING
3058 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3059 #endif
3060 }
3061
3062 return;
3063 }
3064
3065
3066 /* Initialize DEBUG to an empty list, and clear USED, if given. */
3067 void
3068 dead_debug_init (struct dead_debug *debug, bitmap used)
3069 {
3070 debug->head = NULL;
3071 debug->used = used;
3072 debug->to_rescan = NULL;
3073 if (used)
3074 bitmap_clear (used);
3075 }
3076
3077 /* Reset all debug uses in HEAD, and clear DEBUG->to_rescan bits of
3078 each reset insn. DEBUG is not otherwise modified. If HEAD is
3079 DEBUG->head, DEBUG->head will be set to NULL at the end.
3080 Otherwise, entries from DEBUG->head that pertain to reset insns
3081 will be removed, and only then rescanned. */
3082
3083 static void
3084 dead_debug_reset_uses (struct dead_debug *debug, struct dead_debug_use *head)
3085 {
3086 bool got_head = (debug->head == head);
3087 bitmap rescan;
3088 struct dead_debug_use **tailp = &debug->head;
3089 struct dead_debug_use *cur;
3090 bitmap_iterator bi;
3091 unsigned int uid;
3092
3093 if (got_head)
3094 rescan = NULL;
3095 else
3096 rescan = BITMAP_ALLOC (NULL);
3097
3098 while (head)
3099 {
3100 struct dead_debug_use *next = head->next;
3101 rtx insn;
3102
3103 insn = DF_REF_INSN (head->use);
3104 if (!next || DF_REF_INSN (next->use) != insn)
3105 {
3106 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3107 if (got_head)
3108 df_insn_rescan_debug_internal (insn);
3109 else
3110 bitmap_set_bit (rescan, INSN_UID (insn));
3111 if (debug->to_rescan)
3112 bitmap_clear_bit (debug->to_rescan, INSN_UID (insn));
3113 }
3114 XDELETE (head);
3115 head = next;
3116 }
3117
3118 if (got_head)
3119 {
3120 debug->head = NULL;
3121 return;
3122 }
3123
3124 while ((cur = *tailp))
3125 if (bitmap_bit_p (rescan, INSN_UID (DF_REF_INSN (cur->use))))
3126 {
3127 *tailp = cur->next;
3128 XDELETE (cur);
3129 }
3130 else
3131 tailp = &cur->next;
3132
3133 EXECUTE_IF_SET_IN_BITMAP (rescan, 0, uid, bi)
3134 {
3135 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
3136 if (insn_info)
3137 df_insn_rescan_debug_internal (insn_info->insn);
3138 }
3139
3140 BITMAP_FREE (rescan);
3141 }
3142
3143 /* Reset all debug insns with pending uses. Release the bitmap in it,
3144 unless it is USED. USED must be the same bitmap passed to
3145 dead_debug_init. */
3146 void
3147 dead_debug_finish (struct dead_debug *debug, bitmap used)
3148 {
3149 if (debug->used != used)
3150 BITMAP_FREE (debug->used);
3151
3152 dead_debug_reset_uses (debug, debug->head);
3153
3154 if (debug->to_rescan)
3155 {
3156 bitmap_iterator bi;
3157 unsigned int uid;
3158
3159 EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi)
3160 {
3161 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
3162 if (insn_info)
3163 df_insn_rescan (insn_info->insn);
3164 }
3165 BITMAP_FREE (debug->to_rescan);
3166 }
3167 }
3168
3169 /* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug
3170 insn. Create a bitmap for DEBUG as needed. */
3171 void
3172 dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno)
3173 {
3174 struct dead_debug_use *newddu = XNEW (struct dead_debug_use);
3175
3176 newddu->use = use;
3177 newddu->next = debug->head;
3178 debug->head = newddu;
3179
3180 if (!debug->used)
3181 debug->used = BITMAP_ALLOC (NULL);
3182
3183 bitmap_set_bit (debug->used, uregno);
3184 }
3185
3186 /* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
3187 before or after INSN (depending on WHERE), that binds a debug temp
3188 to the widest-mode use of UREGNO, if WHERE is *_WITH_REG, or the
3189 value stored in UREGNO by INSN otherwise, and replace all uses of
3190 UREGNO in DEBUG with uses of the debug temp. INSN must be where
3191 UREGNO dies, if WHERE is *_BEFORE_*, or where it is set otherwise.
3192 Return the number of debug insns emitted. */
3193 int
3194 dead_debug_insert_temp (struct dead_debug *debug, unsigned int uregno,
3195 rtx insn, enum debug_temp_where where)
3196 {
3197 struct dead_debug_use **tailp = &debug->head;
3198 struct dead_debug_use *cur;
3199 struct dead_debug_use *uses = NULL;
3200 struct dead_debug_use **usesp = &uses;
3201 rtx reg = NULL;
3202 rtx breg;
3203 rtx dval;
3204 rtx bind;
3205
3206 if (!debug->used || !bitmap_clear_bit (debug->used, uregno))
3207 return 0;
3208
3209 /* Move all uses of uregno from debug->head to uses, setting mode to
3210 the widest referenced mode. */
3211 while ((cur = *tailp))
3212 {
3213 if (DF_REF_REGNO (cur->use) == uregno)
3214 {
3215 *usesp = cur;
3216 usesp = &cur->next;
3217 *tailp = cur->next;
3218 cur->next = NULL;
3219 if (!reg
3220 || (GET_MODE_BITSIZE (GET_MODE (reg))
3221 < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use)))))
3222 reg = *DF_REF_REAL_LOC (cur->use);
3223 }
3224 else
3225 tailp = &(*tailp)->next;
3226 }
3227
3228 /* We may have dangling bits in debug->used for registers that were part
3229 of a multi-register use, one component of which has been reset. */
3230 if (reg == NULL)
3231 {
3232 gcc_checking_assert (!uses);
3233 return 0;
3234 }
3235
3236 gcc_checking_assert (uses);
3237
3238 breg = reg;
3239 /* Recover the expression INSN stores in REG. */
3240 if (where == DEBUG_TEMP_BEFORE_WITH_VALUE)
3241 {
3242 rtx set = single_set (insn);
3243 rtx dest, src;
3244
3245 if (set)
3246 {
3247 dest = SET_DEST (set);
3248 src = SET_SRC (set);
3249 /* Lose if the REG-setting insn is a CALL. */
3250 if (GET_CODE (src) == CALL)
3251 {
3252 while (uses)
3253 {
3254 cur = uses->next;
3255 XDELETE (uses);
3256 uses = cur;
3257 }
3258 return 0;
3259 }
3260 }
3261
3262 /* ??? Should we try to extract it from a PARALLEL? */
3263 if (!set)
3264 breg = NULL;
3265 /* Cool, it's the same REG, we can use SRC. */
3266 else if (dest == reg)
3267 breg = copy_rtx (src);
3268 else if (REG_P (dest))
3269 {
3270 /* Hmm... Something's fishy, we should be setting REG here. */
3271 if (REGNO (dest) != REGNO (reg))
3272 breg = NULL;
3273 /* Ok, it's the same (hardware) REG, but with a different
3274 mode, so SUBREG it. */
3275 else
3276 breg = lowpart_subreg (GET_MODE (reg), copy_rtx (src),
3277 GET_MODE (dest));
3278 }
3279 else if (GET_CODE (dest) == SUBREG)
3280 {
3281 /* We should be setting REG here. Lose. */
3282 if (REGNO (SUBREG_REG (dest)) != REGNO (reg))
3283 breg = NULL;
3284 /* Lose if we're setting something other than the lowpart of
3285 REG. */
3286 else if (!subreg_lowpart_p (dest))
3287 breg = NULL;
3288 /* If we're not overwriting all the hardware registers that
3289 setting REG in its mode would, we won't know what to bind
3290 the debug temp to. */
3291 else if (REGNO (reg) < FIRST_PSEUDO_REGISTER
3292 && (hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]
3293 != hard_regno_nregs[REGNO (reg)][GET_MODE (dest)]))
3294 breg = NULL;
3295 /* Yay, we can use SRC, just adjust its mode. */
3296 else
3297 breg = lowpart_subreg (GET_MODE (reg), copy_rtx (src),
3298 GET_MODE (dest));
3299 }
3300 /* Oh well, we're out of luck. */
3301 else
3302 breg = NULL;
3303
3304 /* We couldn't figure out the value stored in REG, so reset all
3305 of its pending debug uses. */
3306 if (!breg)
3307 {
3308 dead_debug_reset_uses (debug, uses);
3309 return 0;
3310 }
3311 }
3312
3313 /* If there's a single (debug) use of an otherwise unused REG, and
3314 the debug use is not part of a larger expression, then it
3315 probably doesn't make sense to introduce a new debug temp. */
3316 if (where == DEBUG_TEMP_AFTER_WITH_REG && !uses->next)
3317 {
3318 rtx next = DF_REF_INSN (uses->use);
3319
3320 if (DEBUG_INSN_P (next) && reg == INSN_VAR_LOCATION_LOC (next))
3321 {
3322 XDELETE (uses);
3323 return 0;
3324 }
3325 }
3326
3327 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3328 dval = make_debug_expr_from_rtl (reg);
3329
3330 /* Emit a debug bind insn before the insn in which reg dies. */
3331 bind = gen_rtx_VAR_LOCATION (GET_MODE (reg),
3332 DEBUG_EXPR_TREE_DECL (dval), breg,
3333 VAR_INIT_STATUS_INITIALIZED);
3334
3335 if (where == DEBUG_TEMP_AFTER_WITH_REG)
3336 bind = emit_debug_insn_after (bind, insn);
3337 else
3338 bind = emit_debug_insn_before (bind, insn);
3339 df_insn_rescan (bind);
3340
3341 /* Adjust all uses. */
3342 while ((cur = uses))
3343 {
3344 if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg))
3345 *DF_REF_REAL_LOC (cur->use) = dval;
3346 else
3347 *DF_REF_REAL_LOC (cur->use)
3348 = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
3349 /* ??? Should we simplify subreg of subreg? */
3350 if (debug->to_rescan == NULL)
3351 debug->to_rescan = BITMAP_ALLOC (NULL);
3352 bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use)));
3353 uses = cur->next;
3354 XDELETE (cur);
3355 }
3356
3357 return 1;
3358 }
3359
3360 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3361 info: lifetime, bb, and number of defs and uses for basic block
3362 BB. The three bitvectors are scratch regs used here. */
3363
3364 static void
3365 df_note_bb_compute (unsigned int bb_index,
3366 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3367 {
3368 basic_block bb = BASIC_BLOCK (bb_index);
3369 rtx insn;
3370 df_ref *def_rec;
3371 df_ref *use_rec;
3372 struct dead_debug debug;
3373
3374 dead_debug_init (&debug, NULL);
3375
3376 bitmap_copy (live, df_get_live_out (bb));
3377 bitmap_clear (artificial_uses);
3378
3379 #ifdef REG_DEAD_DEBUGGING
3380 if (dump_file)
3381 {
3382 fprintf (dump_file, "live at bottom ");
3383 df_print_regset (dump_file, live);
3384 }
3385 #endif
3386
3387 /* Process the artificial defs and uses at the bottom of the block
3388 to begin processing. */
3389 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3390 {
3391 df_ref def = *def_rec;
3392 #ifdef REG_DEAD_DEBUGGING
3393 if (dump_file)
3394 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3395 #endif
3396
3397 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3398 bitmap_clear_bit (live, DF_REF_REGNO (def));
3399 }
3400
3401 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3402 {
3403 df_ref use = *use_rec;
3404 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3405 {
3406 unsigned int regno = DF_REF_REGNO (use);
3407 bitmap_set_bit (live, regno);
3408
3409 /* Notes are not generated for any of the artificial registers
3410 at the bottom of the block. */
3411 bitmap_set_bit (artificial_uses, regno);
3412 }
3413 }
3414
3415 #ifdef REG_DEAD_DEBUGGING
3416 if (dump_file)
3417 {
3418 fprintf (dump_file, "live before artificials out ");
3419 df_print_regset (dump_file, live);
3420 }
3421 #endif
3422
3423 FOR_BB_INSNS_REVERSE (bb, insn)
3424 {
3425 unsigned int uid = INSN_UID (insn);
3426 struct df_mw_hardreg **mws_rec;
3427 int debug_insn;
3428
3429 if (!INSN_P (insn))
3430 continue;
3431
3432 debug_insn = DEBUG_INSN_P (insn);
3433
3434 bitmap_clear (do_not_gen);
3435 df_kill_notes (insn, live);
3436
3437 /* Process the defs. */
3438 if (CALL_P (insn))
3439 {
3440 #ifdef REG_DEAD_DEBUGGING
3441 if (dump_file)
3442 {
3443 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3444 df_print_regset (dump_file, live);
3445 }
3446 #endif
3447 /* We only care about real sets for calls. Clobbers cannot
3448 be depended on to really die. */
3449 mws_rec = DF_INSN_UID_MWS (uid);
3450 while (*mws_rec)
3451 {
3452 struct df_mw_hardreg *mws = *mws_rec;
3453 if ((DF_MWS_REG_DEF_P (mws))
3454 && !df_ignore_stack_reg (mws->start_regno))
3455 df_set_unused_notes_for_mw (insn,
3456 mws, live, do_not_gen,
3457 artificial_uses, &debug);
3458 mws_rec++;
3459 }
3460
3461 /* All of the defs except the return value are some sort of
3462 clobber. This code is for the return. */
3463 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3464 {
3465 df_ref def = *def_rec;
3466 unsigned int dregno = DF_REF_REGNO (def);
3467 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3468 {
3469 df_create_unused_note (insn,
3470 def, live, artificial_uses, &debug);
3471 bitmap_set_bit (do_not_gen, dregno);
3472 }
3473
3474 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3475 bitmap_clear_bit (live, dregno);
3476 }
3477 }
3478 else
3479 {
3480 /* Regular insn. */
3481 mws_rec = DF_INSN_UID_MWS (uid);
3482 while (*mws_rec)
3483 {
3484 struct df_mw_hardreg *mws = *mws_rec;
3485 if (DF_MWS_REG_DEF_P (mws))
3486 df_set_unused_notes_for_mw (insn,
3487 mws, live, do_not_gen,
3488 artificial_uses, &debug);
3489 mws_rec++;
3490 }
3491
3492 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3493 {
3494 df_ref def = *def_rec;
3495 unsigned int dregno = DF_REF_REGNO (def);
3496 df_create_unused_note (insn,
3497 def, live, artificial_uses, &debug);
3498
3499 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3500 bitmap_set_bit (do_not_gen, dregno);
3501
3502 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3503 bitmap_clear_bit (live, dregno);
3504 }
3505 }
3506
3507 /* Process the uses. */
3508 mws_rec = DF_INSN_UID_MWS (uid);
3509 while (*mws_rec)
3510 {
3511 struct df_mw_hardreg *mws = *mws_rec;
3512 if (DF_MWS_REG_USE_P (mws)
3513 && !df_ignore_stack_reg (mws->start_regno))
3514 {
3515 bool really_add_notes = debug_insn != 0;
3516
3517 df_set_dead_notes_for_mw (insn,
3518 mws, live, do_not_gen,
3519 artificial_uses,
3520 &really_add_notes);
3521
3522 if (really_add_notes)
3523 debug_insn = -1;
3524 }
3525 mws_rec++;
3526 }
3527
3528 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3529 {
3530 df_ref use = *use_rec;
3531 unsigned int uregno = DF_REF_REGNO (use);
3532
3533 #ifdef REG_DEAD_DEBUGGING
3534 if (dump_file && !debug_insn)
3535 {
3536 fprintf (dump_file, " regular looking at use ");
3537 df_ref_debug (use, dump_file);
3538 }
3539 #endif
3540 if (!bitmap_bit_p (live, uregno))
3541 {
3542 if (debug_insn)
3543 {
3544 if (debug_insn > 0)
3545 {
3546 /* We won't add REG_UNUSED or REG_DEAD notes for
3547 these, so we don't have to mess with them in
3548 debug insns either. */
3549 if (!bitmap_bit_p (artificial_uses, uregno)
3550 && !df_ignore_stack_reg (uregno))
3551 dead_debug_add (&debug, use, uregno);
3552 continue;
3553 }
3554 break;
3555 }
3556 else
3557 dead_debug_insert_temp (&debug, uregno, insn,
3558 DEBUG_TEMP_BEFORE_WITH_REG);
3559
3560 if ( (!(DF_REF_FLAGS (use)
3561 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3562 && (!bitmap_bit_p (do_not_gen, uregno))
3563 && (!bitmap_bit_p (artificial_uses, uregno))
3564 && (!df_ignore_stack_reg (uregno)))
3565 {
3566 rtx reg = (DF_REF_LOC (use))
3567 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3568 df_set_note (REG_DEAD, insn, reg);
3569
3570 #ifdef REG_DEAD_DEBUGGING
3571 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3572 #endif
3573 }
3574 /* This register is now live. */
3575 bitmap_set_bit (live, uregno);
3576 }
3577 }
3578
3579 if (debug_insn == -1)
3580 {
3581 /* ??? We could probably do better here, replacing dead
3582 registers with their definitions. */
3583 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3584 df_insn_rescan_debug_internal (insn);
3585 }
3586 }
3587
3588 dead_debug_finish (&debug, NULL);
3589 }
3590
3591
3592 /* Compute register info: lifetime, bb, and number of defs and uses. */
3593 static void
3594 df_note_compute (bitmap all_blocks)
3595 {
3596 unsigned int bb_index;
3597 bitmap_iterator bi;
3598 bitmap_head live, do_not_gen, artificial_uses;
3599
3600 bitmap_initialize (&live, &df_bitmap_obstack);
3601 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3602 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3603
3604 #ifdef REG_DEAD_DEBUGGING
3605 if (dump_file)
3606 print_rtl_with_bb (dump_file, get_insns());
3607 #endif
3608
3609 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3610 {
3611 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3612 }
3613
3614 bitmap_clear (&live);
3615 bitmap_clear (&do_not_gen);
3616 bitmap_clear (&artificial_uses);
3617 }
3618
3619
3620 /* Free all storage associated with the problem. */
3621
3622 static void
3623 df_note_free (void)
3624 {
3625 free (df_note);
3626 }
3627
3628
3629 /* All of the information associated every instance of the problem. */
3630
3631 static struct df_problem problem_NOTE =
3632 {
3633 DF_NOTE, /* Problem id. */
3634 DF_NONE, /* Direction. */
3635 df_note_alloc, /* Allocate the problem specific data. */
3636 NULL, /* Reset global information. */
3637 NULL, /* Free basic block info. */
3638 df_note_compute, /* Local compute function. */
3639 NULL, /* Init the solution specific data. */
3640 NULL, /* Iterative solver. */
3641 NULL, /* Confluence operator 0. */
3642 NULL, /* Confluence operator n. */
3643 NULL, /* Transfer function. */
3644 NULL, /* Finalize function. */
3645 df_note_free, /* Free all of the problem information. */
3646 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3647 NULL, /* Debugging. */
3648 NULL, /* Debugging start block. */
3649 NULL, /* Debugging end block. */
3650 NULL, /* Incremental solution verify start. */
3651 NULL, /* Incremental solution verify end. */
3652 &problem_LR, /* Dependent problem. */
3653 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3654 TV_DF_NOTE, /* Timing variable. */
3655 false /* Reset blocks on dropping out of blocks_to_analyze. */
3656 };
3657
3658
3659 /* Create a new DATAFLOW instance and add it to an existing instance
3660 of DF. The returned structure is what is used to get at the
3661 solution. */
3662
3663 void
3664 df_note_add_problem (void)
3665 {
3666 df_add_problem (&problem_NOTE);
3667 }
3668
3669
3670
3671 \f
3672 /*----------------------------------------------------------------------------
3673 Functions for simulating the effects of single insns.
3674
3675 You can either simulate in the forwards direction, starting from
3676 the top of a block or the backwards direction from the end of the
3677 block. If you go backwards, defs are examined first to clear bits,
3678 then uses are examined to set bits. If you go forwards, defs are
3679 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3680 are examined to clear bits. In either case, the result of examining
3681 a def can be undone (respectively by a use or a REG_UNUSED note).
3682
3683 If you start at the top of the block, use one of DF_LIVE_IN or
3684 DF_LR_IN. If you start at the bottom of the block use one of
3685 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3686 THEY WILL BE DESTROYED.
3687 ----------------------------------------------------------------------------*/
3688
3689
3690 /* Find the set of DEFs for INSN. */
3691
3692 void
3693 df_simulate_find_defs (rtx insn, bitmap defs)
3694 {
3695 df_ref *def_rec;
3696 unsigned int uid = INSN_UID (insn);
3697
3698 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3699 {
3700 df_ref def = *def_rec;
3701 bitmap_set_bit (defs, DF_REF_REGNO (def));
3702 }
3703 }
3704
3705 /* Find the set of uses for INSN. This includes partial defs. */
3706
3707 static void
3708 df_simulate_find_uses (rtx insn, bitmap uses)
3709 {
3710 df_ref *rec;
3711 unsigned int uid = INSN_UID (insn);
3712
3713 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
3714 {
3715 df_ref def = *rec;
3716 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3717 bitmap_set_bit (uses, DF_REF_REGNO (def));
3718 }
3719 for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
3720 {
3721 df_ref use = *rec;
3722 bitmap_set_bit (uses, DF_REF_REGNO (use));
3723 }
3724 }
3725
3726 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3727
3728 void
3729 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3730 {
3731 df_ref *def_rec;
3732 unsigned int uid = INSN_UID (insn);
3733
3734 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3735 {
3736 df_ref def = *def_rec;
3737 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3738 bitmap_set_bit (defs, DF_REF_REGNO (def));
3739 }
3740 }
3741
3742
3743 /* Simulate the effects of the defs of INSN on LIVE. */
3744
3745 void
3746 df_simulate_defs (rtx insn, bitmap live)
3747 {
3748 df_ref *def_rec;
3749 unsigned int uid = INSN_UID (insn);
3750
3751 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3752 {
3753 df_ref def = *def_rec;
3754 unsigned int dregno = DF_REF_REGNO (def);
3755
3756 /* If the def is to only part of the reg, it does
3757 not kill the other defs that reach here. */
3758 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3759 bitmap_clear_bit (live, dregno);
3760 }
3761 }
3762
3763
3764 /* Simulate the effects of the uses of INSN on LIVE. */
3765
3766 void
3767 df_simulate_uses (rtx insn, bitmap live)
3768 {
3769 df_ref *use_rec;
3770 unsigned int uid = INSN_UID (insn);
3771
3772 if (DEBUG_INSN_P (insn))
3773 return;
3774
3775 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3776 {
3777 df_ref use = *use_rec;
3778 /* Add use to set of uses in this BB. */
3779 bitmap_set_bit (live, DF_REF_REGNO (use));
3780 }
3781 }
3782
3783
3784 /* Add back the always live regs in BB to LIVE. */
3785
3786 static inline void
3787 df_simulate_fixup_sets (basic_block bb, bitmap live)
3788 {
3789 /* These regs are considered always live so if they end up dying
3790 because of some def, we need to bring the back again. */
3791 if (bb_has_eh_pred (bb))
3792 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3793 else
3794 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3795 }
3796
3797
3798 /*----------------------------------------------------------------------------
3799 The following three functions are used only for BACKWARDS scanning:
3800 i.e. they process the defs before the uses.
3801
3802 df_simulate_initialize_backwards should be called first with a
3803 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3804 df_simulate_one_insn_backwards should be called for each insn in
3805 the block, starting with the last one. Finally,
3806 df_simulate_finalize_backwards can be called to get a new value
3807 of the sets at the top of the block (this is rarely used).
3808 ----------------------------------------------------------------------------*/
3809
3810 /* Apply the artificial uses and defs at the end of BB in a backwards
3811 direction. */
3812
3813 void
3814 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3815 {
3816 df_ref *def_rec;
3817 df_ref *use_rec;
3818 int bb_index = bb->index;
3819
3820 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3821 {
3822 df_ref def = *def_rec;
3823 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3824 bitmap_clear_bit (live, DF_REF_REGNO (def));
3825 }
3826
3827 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3828 {
3829 df_ref use = *use_rec;
3830 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3831 bitmap_set_bit (live, DF_REF_REGNO (use));
3832 }
3833 }
3834
3835
3836 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3837
3838 void
3839 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3840 {
3841 if (!NONDEBUG_INSN_P (insn))
3842 return;
3843
3844 df_simulate_defs (insn, live);
3845 df_simulate_uses (insn, live);
3846 df_simulate_fixup_sets (bb, live);
3847 }
3848
3849
3850 /* Apply the artificial uses and defs at the top of BB in a backwards
3851 direction. */
3852
3853 void
3854 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3855 {
3856 df_ref *def_rec;
3857 #ifdef EH_USES
3858 df_ref *use_rec;
3859 #endif
3860 int bb_index = bb->index;
3861
3862 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3863 {
3864 df_ref def = *def_rec;
3865 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3866 bitmap_clear_bit (live, DF_REF_REGNO (def));
3867 }
3868
3869 #ifdef EH_USES
3870 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3871 {
3872 df_ref use = *use_rec;
3873 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3874 bitmap_set_bit (live, DF_REF_REGNO (use));
3875 }
3876 #endif
3877 }
3878 /*----------------------------------------------------------------------------
3879 The following three functions are used only for FORWARDS scanning:
3880 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3881 Thus it is important to add the DF_NOTES problem to the stack of
3882 problems computed before using these functions.
3883
3884 df_simulate_initialize_forwards should be called first with a
3885 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3886 df_simulate_one_insn_forwards should be called for each insn in
3887 the block, starting with the first one.
3888 ----------------------------------------------------------------------------*/
3889
3890 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3891 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3892 defs live. */
3893
3894 void
3895 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3896 {
3897 df_ref *def_rec;
3898 int bb_index = bb->index;
3899
3900 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3901 {
3902 df_ref def = *def_rec;
3903 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3904 bitmap_set_bit (live, DF_REF_REGNO (def));
3905 }
3906 }
3907
3908 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3909
3910 void
3911 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3912 {
3913 rtx link;
3914 if (! INSN_P (insn))
3915 return;
3916
3917 /* Make sure that DF_NOTE really is an active df problem. */
3918 gcc_assert (df_note);
3919
3920 /* Note that this is the opposite as how the problem is defined, because
3921 in the LR problem defs _kill_ liveness. However, they do so backwards,
3922 while here the scan is performed forwards! So, first assume that the
3923 def is live, and if this is not true REG_UNUSED notes will rectify the
3924 situation. */
3925 df_simulate_find_noclobber_defs (insn, live);
3926
3927 /* Clear all of the registers that go dead. */
3928 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3929 {
3930 switch (REG_NOTE_KIND (link))
3931 {
3932 case REG_DEAD:
3933 case REG_UNUSED:
3934 {
3935 rtx reg = XEXP (link, 0);
3936 int regno = REGNO (reg);
3937 if (HARD_REGISTER_NUM_P (regno))
3938 bitmap_clear_range (live, regno,
3939 hard_regno_nregs[regno][GET_MODE (reg)]);
3940 else
3941 bitmap_clear_bit (live, regno);
3942 }
3943 break;
3944 default:
3945 break;
3946 }
3947 }
3948 df_simulate_fixup_sets (bb, live);
3949 }
3950 \f
3951 /* Used by the next two functions to encode information about the
3952 memory references we found. */
3953 #define MEMREF_NORMAL 1
3954 #define MEMREF_VOLATILE 2
3955
3956 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3957 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3958
3959 static int
3960 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3961 {
3962 rtx x = *px;
3963
3964 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3965 return MEMREF_VOLATILE;
3966
3967 if (!MEM_P (x))
3968 return 0;
3969 if (MEM_VOLATILE_P (x))
3970 return MEMREF_VOLATILE;
3971 if (MEM_READONLY_P (x))
3972 return 0;
3973
3974 return MEMREF_NORMAL;
3975 }
3976
3977 /* A subroutine of can_move_insns_across_p called through note_stores.
3978 DATA points to an integer in which we set either the bit for
3979 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3980 of either kind. */
3981
3982 static void
3983 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3984 void *data ATTRIBUTE_UNUSED)
3985 {
3986 int *pflags = (int *)data;
3987 if (GET_CODE (x) == SUBREG)
3988 x = XEXP (x, 0);
3989 /* Treat stores to SP as stores to memory, this will prevent problems
3990 when there are references to the stack frame. */
3991 if (x == stack_pointer_rtx)
3992 *pflags |= MEMREF_VOLATILE;
3993 if (!MEM_P (x))
3994 return;
3995 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3996 }
3997
3998 /* Scan BB backwards, using df_simulate functions to keep track of
3999 lifetimes, up to insn POINT. The result is stored in LIVE. */
4000
4001 void
4002 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
4003 {
4004 rtx insn;
4005 bitmap_copy (live, df_get_live_out (bb));
4006 df_simulate_initialize_backwards (bb, live);
4007
4008 /* Scan and update life information until we reach the point we're
4009 interested in. */
4010 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
4011 df_simulate_one_insn_backwards (bb, insn, live);
4012 }
4013
4014 /* Return true if it is safe to move a group of insns, described by
4015 the range FROM to TO, backwards across another group of insns,
4016 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
4017 are no insns between ACROSS_TO and FROM, but they may be in
4018 different basic blocks; MERGE_BB is the block from which the
4019 insns will be moved. The caller must pass in a regset MERGE_LIVE
4020 which specifies the registers live after TO.
4021
4022 This function may be called in one of two cases: either we try to
4023 move identical instructions from all successor blocks into their
4024 predecessor, or we try to move from only one successor block. If
4025 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
4026 the second case. It should contain a set of registers live at the
4027 end of ACROSS_TO which must not be clobbered by moving the insns.
4028 In that case, we're also more careful about moving memory references
4029 and trapping insns.
4030
4031 We return false if it is not safe to move the entire group, but it
4032 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
4033 is set to point at the last moveable insn in such a case. */
4034
4035 bool
4036 can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
4037 basic_block merge_bb, regset merge_live,
4038 regset other_branch_live, rtx *pmove_upto)
4039 {
4040 rtx insn, next, max_to;
4041 bitmap merge_set, merge_use, local_merge_live;
4042 bitmap test_set, test_use;
4043 unsigned i, fail = 0;
4044 bitmap_iterator bi;
4045 int memrefs_in_across = 0;
4046 int mem_sets_in_across = 0;
4047 bool trapping_insns_in_across = false;
4048
4049 if (pmove_upto != NULL)
4050 *pmove_upto = NULL_RTX;
4051
4052 /* Find real bounds, ignoring debug insns. */
4053 while (!NONDEBUG_INSN_P (from) && from != to)
4054 from = NEXT_INSN (from);
4055 while (!NONDEBUG_INSN_P (to) && from != to)
4056 to = PREV_INSN (to);
4057
4058 for (insn = across_to; ; insn = next)
4059 {
4060 if (NONDEBUG_INSN_P (insn))
4061 {
4062 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
4063 NULL);
4064 note_stores (PATTERN (insn), find_memory_stores,
4065 &mem_sets_in_across);
4066 /* This is used just to find sets of the stack pointer. */
4067 memrefs_in_across |= mem_sets_in_across;
4068 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
4069 }
4070 next = PREV_INSN (insn);
4071 if (insn == across_from)
4072 break;
4073 }
4074
4075 /* Collect:
4076 MERGE_SET = set of registers set in MERGE_BB
4077 MERGE_USE = set of registers used in MERGE_BB and live at its top
4078 MERGE_LIVE = set of registers live at the point inside the MERGE
4079 range that we've reached during scanning
4080 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
4081 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
4082 and live before ACROSS_FROM. */
4083
4084 merge_set = BITMAP_ALLOC (&reg_obstack);
4085 merge_use = BITMAP_ALLOC (&reg_obstack);
4086 local_merge_live = BITMAP_ALLOC (&reg_obstack);
4087 test_set = BITMAP_ALLOC (&reg_obstack);
4088 test_use = BITMAP_ALLOC (&reg_obstack);
4089
4090 /* Compute the set of registers set and used in the ACROSS range. */
4091 if (other_branch_live != NULL)
4092 bitmap_copy (test_use, other_branch_live);
4093 df_simulate_initialize_backwards (merge_bb, test_use);
4094 for (insn = across_to; ; insn = next)
4095 {
4096 if (NONDEBUG_INSN_P (insn))
4097 {
4098 df_simulate_find_defs (insn, test_set);
4099 df_simulate_defs (insn, test_use);
4100 df_simulate_uses (insn, test_use);
4101 }
4102 next = PREV_INSN (insn);
4103 if (insn == across_from)
4104 break;
4105 }
4106
4107 /* Compute an upper bound for the amount of insns moved, by finding
4108 the first insn in MERGE that sets a register in TEST_USE, or uses
4109 a register in TEST_SET. We also check for calls, trapping operations,
4110 and memory references. */
4111 max_to = NULL_RTX;
4112 for (insn = from; ; insn = next)
4113 {
4114 if (CALL_P (insn))
4115 break;
4116 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
4117 break;
4118 if (NONDEBUG_INSN_P (insn))
4119 {
4120 if (may_trap_or_fault_p (PATTERN (insn))
4121 && (trapping_insns_in_across || other_branch_live != NULL))
4122 break;
4123
4124 /* We cannot move memory stores past each other, or move memory
4125 reads past stores, at least not without tracking them and
4126 calling true_dependence on every pair.
4127
4128 If there is no other branch and no memory references or
4129 sets in the ACROSS range, we can move memory references
4130 freely, even volatile ones.
4131
4132 Otherwise, the rules are as follows: volatile memory
4133 references and stores can't be moved at all, and any type
4134 of memory reference can't be moved if there are volatile
4135 accesses or stores in the ACROSS range. That leaves
4136 normal reads, which can be moved, as the trapping case is
4137 dealt with elsewhere. */
4138 if (other_branch_live != NULL || memrefs_in_across != 0)
4139 {
4140 int mem_ref_flags = 0;
4141 int mem_set_flags = 0;
4142 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
4143 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
4144 NULL);
4145 /* Catch sets of the stack pointer. */
4146 mem_ref_flags |= mem_set_flags;
4147
4148 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
4149 break;
4150 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
4151 break;
4152 if (mem_set_flags != 0
4153 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
4154 break;
4155 }
4156 df_simulate_find_uses (insn, merge_use);
4157 /* We're only interested in uses which use a value live at
4158 the top, not one previously set in this block. */
4159 bitmap_and_compl_into (merge_use, merge_set);
4160 df_simulate_find_defs (insn, merge_set);
4161 if (bitmap_intersect_p (merge_set, test_use)
4162 || bitmap_intersect_p (merge_use, test_set))
4163 break;
4164 #ifdef HAVE_cc0
4165 if (!sets_cc0_p (insn))
4166 #endif
4167 max_to = insn;
4168 }
4169 next = NEXT_INSN (insn);
4170 if (insn == to)
4171 break;
4172 }
4173 if (max_to != to)
4174 fail = 1;
4175
4176 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
4177 goto out;
4178
4179 /* Now, lower this upper bound by also taking into account that
4180 a range of insns moved across ACROSS must not leave a register
4181 live at the end that will be clobbered in ACROSS. We need to
4182 find a point where TEST_SET & LIVE == 0.
4183
4184 Insns in the MERGE range that set registers which are also set
4185 in the ACROSS range may still be moved as long as we also move
4186 later insns which use the results of the set, and make the
4187 register dead again. This is verified by the condition stated
4188 above. We only need to test it for registers that are set in
4189 the moved region.
4190
4191 MERGE_LIVE is provided by the caller and holds live registers after
4192 TO. */
4193 bitmap_copy (local_merge_live, merge_live);
4194 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
4195 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
4196
4197 /* We're not interested in registers that aren't set in the moved
4198 region at all. */
4199 bitmap_and_into (local_merge_live, merge_set);
4200 for (;;)
4201 {
4202 if (NONDEBUG_INSN_P (insn))
4203 {
4204 if (!bitmap_intersect_p (test_set, local_merge_live)
4205 #ifdef HAVE_cc0
4206 && !sets_cc0_p (insn)
4207 #endif
4208 )
4209 {
4210 max_to = insn;
4211 break;
4212 }
4213
4214 df_simulate_one_insn_backwards (merge_bb, insn,
4215 local_merge_live);
4216 }
4217 if (insn == from)
4218 {
4219 fail = 1;
4220 goto out;
4221 }
4222 insn = PREV_INSN (insn);
4223 }
4224
4225 if (max_to != to)
4226 fail = 1;
4227
4228 if (pmove_upto)
4229 *pmove_upto = max_to;
4230
4231 /* For small register class machines, don't lengthen lifetimes of
4232 hard registers before reload. */
4233 if (! reload_completed
4234 && targetm.small_register_classes_for_mode_p (VOIDmode))
4235 {
4236 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4237 {
4238 if (i < FIRST_PSEUDO_REGISTER
4239 && ! fixed_regs[i]
4240 && ! global_regs[i])
4241 fail = 1;
4242 }
4243 }
4244
4245 out:
4246 BITMAP_FREE (merge_set);
4247 BITMAP_FREE (merge_use);
4248 BITMAP_FREE (local_merge_live);
4249 BITMAP_FREE (test_set);
4250 BITMAP_FREE (test_use);
4251
4252 return !fail;
4253 }
4254
4255 \f
4256 /*----------------------------------------------------------------------------
4257 MULTIPLE DEFINITIONS
4258
4259 Find the locations in the function reached by multiple definition sites
4260 for a live pseudo. In and out bitvectors are built for each basic
4261 block. They are restricted for efficiency to live registers.
4262
4263 The gen and kill sets for the problem are obvious. Together they
4264 include all defined registers in a basic block; the gen set includes
4265 registers where a partial or conditional or may-clobber definition is
4266 last in the BB, while the kill set includes registers with a complete
4267 definition coming last. However, the computation of the dataflow
4268 itself is interesting.
4269
4270 The idea behind it comes from SSA form's iterated dominance frontier
4271 criterion for inserting PHI functions. Just like in that case, we can use
4272 the dominance frontier to find places where multiple definitions meet;
4273 a register X defined in a basic block BB1 has multiple definitions in
4274 basic blocks in BB1's dominance frontier.
4275
4276 So, the in-set of a basic block BB2 is not just the union of the
4277 out-sets of BB2's predecessors, but includes some more bits that come
4278 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4279 the previous paragraph). I called this set the init-set of BB2.
4280
4281 (Note: I actually use the kill-set only to build the init-set.
4282 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4283
4284 For example, if you have
4285
4286 BB1 : r10 = 0
4287 r11 = 0
4288 if <...> goto BB2 else goto BB3;
4289
4290 BB2 : r10 = 1
4291 r12 = 1
4292 goto BB3;
4293
4294 BB3 :
4295
4296 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4297 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4298 not need to iterate the dominance frontier, because we do not insert
4299 anything like PHI functions there! Instead, dataflow will take care of
4300 propagating the information to BB3's successors.
4301 ---------------------------------------------------------------------------*/
4302
4303 /* Private data used to verify the solution for this problem. */
4304 struct df_md_problem_data
4305 {
4306 /* An obstack for the bitmaps we need for this problem. */
4307 bitmap_obstack md_bitmaps;
4308 };
4309
4310 /* Scratch var used by transfer functions. This is used to do md analysis
4311 only for live registers. */
4312 static bitmap_head df_md_scratch;
4313
4314
4315 static void
4316 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4317 void *vbb_info)
4318 {
4319 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4320 if (bb_info)
4321 {
4322 bitmap_clear (&bb_info->kill);
4323 bitmap_clear (&bb_info->gen);
4324 bitmap_clear (&bb_info->init);
4325 bitmap_clear (&bb_info->in);
4326 bitmap_clear (&bb_info->out);
4327 }
4328 }
4329
4330
4331 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4332 not touched unless the block is new. */
4333
4334 static void
4335 df_md_alloc (bitmap all_blocks)
4336 {
4337 unsigned int bb_index;
4338 bitmap_iterator bi;
4339 struct df_md_problem_data *problem_data;
4340
4341 df_grow_bb_info (df_md);
4342 if (df_md->problem_data)
4343 problem_data = (struct df_md_problem_data *) df_md->problem_data;
4344 else
4345 {
4346 problem_data = XNEW (struct df_md_problem_data);
4347 df_md->problem_data = problem_data;
4348 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4349 }
4350 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4351
4352 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4353 {
4354 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4355 /* When bitmaps are already initialized, just clear them. */
4356 if (bb_info->init.obstack)
4357 {
4358 bitmap_clear (&bb_info->init);
4359 bitmap_clear (&bb_info->gen);
4360 bitmap_clear (&bb_info->kill);
4361 bitmap_clear (&bb_info->in);
4362 bitmap_clear (&bb_info->out);
4363 }
4364 else
4365 {
4366 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4367 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4368 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4369 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4370 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4371 }
4372 }
4373
4374 df_md->optional_p = true;
4375 }
4376
4377 /* Add the effect of the top artificial defs of BB to the multiple definitions
4378 bitmap LOCAL_MD. */
4379
4380 void
4381 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4382 {
4383 int bb_index = bb->index;
4384 df_ref *def_rec;
4385 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4386 {
4387 df_ref def = *def_rec;
4388 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4389 {
4390 unsigned int dregno = DF_REF_REGNO (def);
4391 if (DF_REF_FLAGS (def)
4392 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4393 bitmap_set_bit (local_md, dregno);
4394 else
4395 bitmap_clear_bit (local_md, dregno);
4396 }
4397 }
4398 }
4399
4400
4401 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4402 LOCAL_MD. */
4403
4404 void
4405 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4406 bitmap local_md)
4407 {
4408 unsigned uid = INSN_UID (insn);
4409 df_ref *def_rec;
4410
4411 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4412 {
4413 df_ref def = *def_rec;
4414 unsigned int dregno = DF_REF_REGNO (def);
4415 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4416 || (dregno >= FIRST_PSEUDO_REGISTER))
4417 {
4418 if (DF_REF_FLAGS (def)
4419 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4420 bitmap_set_bit (local_md, DF_REF_ID (def));
4421 else
4422 bitmap_clear_bit (local_md, DF_REF_ID (def));
4423 }
4424 }
4425 }
4426
4427 static void
4428 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4429 df_ref *def_rec,
4430 int top_flag)
4431 {
4432 df_ref def;
4433 bitmap_clear (&seen_in_insn);
4434
4435 while ((def = *def_rec++) != NULL)
4436 {
4437 unsigned int dregno = DF_REF_REGNO (def);
4438 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4439 || (dregno >= FIRST_PSEUDO_REGISTER))
4440 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4441 {
4442 if (!bitmap_bit_p (&seen_in_insn, dregno))
4443 {
4444 if (DF_REF_FLAGS (def)
4445 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4446 {
4447 bitmap_set_bit (&bb_info->gen, dregno);
4448 bitmap_clear_bit (&bb_info->kill, dregno);
4449 }
4450 else
4451 {
4452 /* When we find a clobber and a regular def,
4453 make sure the regular def wins. */
4454 bitmap_set_bit (&seen_in_insn, dregno);
4455 bitmap_set_bit (&bb_info->kill, dregno);
4456 bitmap_clear_bit (&bb_info->gen, dregno);
4457 }
4458 }
4459 }
4460 }
4461 }
4462
4463
4464 /* Compute local multiple def info for basic block BB. */
4465
4466 static void
4467 df_md_bb_local_compute (unsigned int bb_index)
4468 {
4469 basic_block bb = BASIC_BLOCK (bb_index);
4470 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4471 rtx insn;
4472
4473 /* Artificials are only hard regs. */
4474 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4475 df_md_bb_local_compute_process_def (bb_info,
4476 df_get_artificial_defs (bb_index),
4477 DF_REF_AT_TOP);
4478
4479 FOR_BB_INSNS (bb, insn)
4480 {
4481 unsigned int uid = INSN_UID (insn);
4482 if (!INSN_P (insn))
4483 continue;
4484
4485 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4486 }
4487
4488 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4489 df_md_bb_local_compute_process_def (bb_info,
4490 df_get_artificial_defs (bb_index),
4491 0);
4492 }
4493
4494 /* Compute local reaching def info for each basic block within BLOCKS. */
4495
4496 static void
4497 df_md_local_compute (bitmap all_blocks)
4498 {
4499 unsigned int bb_index, df_bb_index;
4500 bitmap_iterator bi1, bi2;
4501 basic_block bb;
4502 bitmap_head *frontiers;
4503
4504 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4505
4506 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4507 {
4508 df_md_bb_local_compute (bb_index);
4509 }
4510
4511 bitmap_clear (&seen_in_insn);
4512
4513 frontiers = XNEWVEC (bitmap_head, last_basic_block);
4514 FOR_ALL_BB (bb)
4515 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4516
4517 compute_dominance_frontiers (frontiers);
4518
4519 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4520 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4521 {
4522 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4523 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4524 {
4525 basic_block bb = BASIC_BLOCK (df_bb_index);
4526 if (bitmap_bit_p (all_blocks, df_bb_index))
4527 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4528 df_get_live_in (bb));
4529 }
4530 }
4531
4532 FOR_ALL_BB (bb)
4533 bitmap_clear (&frontiers[bb->index]);
4534 free (frontiers);
4535 }
4536
4537
4538 /* Reset the global solution for recalculation. */
4539
4540 static void
4541 df_md_reset (bitmap all_blocks)
4542 {
4543 unsigned int bb_index;
4544 bitmap_iterator bi;
4545
4546 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4547 {
4548 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4549 gcc_assert (bb_info);
4550 bitmap_clear (&bb_info->in);
4551 bitmap_clear (&bb_info->out);
4552 }
4553 }
4554
4555 static bool
4556 df_md_transfer_function (int bb_index)
4557 {
4558 basic_block bb = BASIC_BLOCK (bb_index);
4559 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4560 bitmap in = &bb_info->in;
4561 bitmap out = &bb_info->out;
4562 bitmap gen = &bb_info->gen;
4563 bitmap kill = &bb_info->kill;
4564
4565 /* We need to use a scratch set here so that the value returned from this
4566 function invocation properly reflects whether the sets changed in a
4567 significant way; i.e. not just because the live set was anded in. */
4568 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4569
4570 /* Multiple definitions of a register are not relevant if it is not
4571 live. Thus we trim the result to the places where it is live. */
4572 bitmap_and_into (in, df_get_live_in (bb));
4573
4574 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4575 }
4576
4577 /* Initialize the solution bit vectors for problem. */
4578
4579 static void
4580 df_md_init (bitmap all_blocks)
4581 {
4582 unsigned int bb_index;
4583 bitmap_iterator bi;
4584
4585 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4586 {
4587 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4588
4589 bitmap_copy (&bb_info->in, &bb_info->init);
4590 df_md_transfer_function (bb_index);
4591 }
4592 }
4593
4594 static void
4595 df_md_confluence_0 (basic_block bb)
4596 {
4597 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4598 bitmap_copy (&bb_info->in, &bb_info->init);
4599 }
4600
4601 /* In of target gets or of out of source. */
4602
4603 static bool
4604 df_md_confluence_n (edge e)
4605 {
4606 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4607 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4608
4609 if (e->flags & EDGE_FAKE)
4610 return false;
4611
4612 if (e->flags & EDGE_EH)
4613 return bitmap_ior_and_compl_into (op1, op2,
4614 regs_invalidated_by_call_regset);
4615 else
4616 return bitmap_ior_into (op1, op2);
4617 }
4618
4619 /* Free all storage associated with the problem. */
4620
4621 static void
4622 df_md_free (void)
4623 {
4624 struct df_md_problem_data *problem_data
4625 = (struct df_md_problem_data *) df_md->problem_data;
4626
4627 bitmap_obstack_release (&problem_data->md_bitmaps);
4628 free (problem_data);
4629 df_md->problem_data = NULL;
4630
4631 df_md->block_info_size = 0;
4632 free (df_md->block_info);
4633 df_md->block_info = NULL;
4634 free (df_md);
4635 }
4636
4637
4638 /* Debugging info at top of bb. */
4639
4640 static void
4641 df_md_top_dump (basic_block bb, FILE *file)
4642 {
4643 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4644 if (!bb_info)
4645 return;
4646
4647 fprintf (file, ";; md in \t");
4648 df_print_regset (file, &bb_info->in);
4649 fprintf (file, ";; md init \t");
4650 df_print_regset (file, &bb_info->init);
4651 fprintf (file, ";; md gen \t");
4652 df_print_regset (file, &bb_info->gen);
4653 fprintf (file, ";; md kill \t");
4654 df_print_regset (file, &bb_info->kill);
4655 }
4656
4657 /* Debugging info at bottom of bb. */
4658
4659 static void
4660 df_md_bottom_dump (basic_block bb, FILE *file)
4661 {
4662 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4663 if (!bb_info)
4664 return;
4665
4666 fprintf (file, ";; md out \t");
4667 df_print_regset (file, &bb_info->out);
4668 }
4669
4670 static struct df_problem problem_MD =
4671 {
4672 DF_MD, /* Problem id. */
4673 DF_FORWARD, /* Direction. */
4674 df_md_alloc, /* Allocate the problem specific data. */
4675 df_md_reset, /* Reset global information. */
4676 df_md_free_bb_info, /* Free basic block info. */
4677 df_md_local_compute, /* Local compute function. */
4678 df_md_init, /* Init the solution specific data. */
4679 df_worklist_dataflow, /* Worklist solver. */
4680 df_md_confluence_0, /* Confluence operator 0. */
4681 df_md_confluence_n, /* Confluence operator n. */
4682 df_md_transfer_function, /* Transfer function. */
4683 NULL, /* Finalize function. */
4684 df_md_free, /* Free all of the problem information. */
4685 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4686 NULL, /* Debugging. */
4687 df_md_top_dump, /* Debugging start block. */
4688 df_md_bottom_dump, /* Debugging end block. */
4689 NULL, /* Incremental solution verify start. */
4690 NULL, /* Incremental solution verify end. */
4691 NULL, /* Dependent problem. */
4692 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4693 TV_DF_MD, /* Timing variable. */
4694 false /* Reset blocks on dropping out of blocks_to_analyze. */
4695 };
4696
4697 /* Create a new MD instance and add it to the existing instance
4698 of DF. */
4699
4700 void
4701 df_md_add_problem (void)
4702 {
4703 df_add_problem (&problem_MD);
4704 }
4705
4706
4707