bind_c_array_params_2.f90: Add "-mno-explicit-relocs" for alpha*-*-* targets.
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "alloc-pool.h"
36 #include "flags.h"
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
39 #include "sbitmap.h"
40 #include "bitmap.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "df.h"
44 #include "except.h"
45 #include "dce.h"
46 #include "vecprim.h"
47 #include "dumpfile.h"
48
49 /* Note that turning REG_DEAD_DEBUGGING on will cause
50 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
51 addresses in the dumps. */
52 #define REG_DEAD_DEBUGGING 0
53
54 #define DF_SPARSE_THRESHOLD 32
55
56 static bitmap_head seen_in_block;
57 static bitmap_head seen_in_insn;
58
59 \f
60 /*----------------------------------------------------------------------------
61 Public functions access functions for the dataflow problems.
62 ----------------------------------------------------------------------------*/
63 /* Get the live at out set for BB no matter what problem happens to be
64 defined. This function is used by the register allocators who
65 choose different dataflow problems depending on the optimization
66 level. */
67
68 bitmap
69 df_get_live_out (basic_block bb)
70 {
71 gcc_assert (df_lr);
72
73 if (df_live)
74 return DF_LIVE_OUT (bb);
75 else
76 return DF_LR_OUT (bb);
77 }
78
79 /* Get the live at in set for BB no matter what problem happens to be
80 defined. This function is used by the register allocators who
81 choose different dataflow problems depending on the optimization
82 level. */
83
84 bitmap
85 df_get_live_in (basic_block bb)
86 {
87 gcc_assert (df_lr);
88
89 if (df_live)
90 return DF_LIVE_IN (bb);
91 else
92 return DF_LR_IN (bb);
93 }
94
95 /*----------------------------------------------------------------------------
96 Utility functions.
97 ----------------------------------------------------------------------------*/
98
99 /* Generic versions to get the void* version of the block info. Only
100 used inside the problem instance vectors. */
101
102 /* Dump a def-use or use-def chain for REF to FILE. */
103
104 void
105 df_chain_dump (struct df_link *link, FILE *file)
106 {
107 fprintf (file, "{ ");
108 for (; link; link = link->next)
109 {
110 fprintf (file, "%c%d(bb %d insn %d) ",
111 DF_REF_REG_DEF_P (link->ref)
112 ? 'd'
113 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
114 DF_REF_ID (link->ref),
115 DF_REF_BBNO (link->ref),
116 DF_REF_IS_ARTIFICIAL (link->ref)
117 ? -1 : DF_REF_INSN_UID (link->ref));
118 }
119 fprintf (file, "}");
120 }
121
122
123 /* Print some basic block info as part of df_dump. */
124
125 void
126 df_print_bb_index (basic_block bb, FILE *file)
127 {
128 edge e;
129 edge_iterator ei;
130
131 fprintf (file, "\n( ");
132 FOR_EACH_EDGE (e, ei, bb->preds)
133 {
134 basic_block pred = e->src;
135 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
136 }
137 fprintf (file, ")->[%d]->( ", bb->index);
138 FOR_EACH_EDGE (e, ei, bb->succs)
139 {
140 basic_block succ = e->dest;
141 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
142 }
143 fprintf (file, ")\n");
144 }
145
146 \f
147 /*----------------------------------------------------------------------------
148 REACHING DEFINITIONS
149
150 Find the locations in the function where each definition site for a
151 pseudo reaches. In and out bitvectors are built for each basic
152 block. The id field in the ref is used to index into these sets.
153 See df.h for details.
154 ----------------------------------------------------------------------------*/
155
156 /* This problem plays a large number of games for the sake of
157 efficiency.
158
159 1) The order of the bits in the bitvectors. After the scanning
160 phase, all of the defs are sorted. All of the defs for the reg 0
161 are first, followed by all defs for reg 1 and so on.
162
163 2) There are two kill sets, one if the number of defs is less or
164 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
165 greater.
166
167 <= : Data is built directly in the kill set.
168
169 > : One level of indirection is used to keep from generating long
170 strings of 1 bits in the kill sets. Bitvectors that are indexed
171 by the regnum are used to represent that there is a killing def
172 for the register. The confluence and transfer functions use
173 these along with the bitmap_clear_range call to remove ranges of
174 bits without actually generating a knockout vector.
175
176 The kill and sparse_kill and the dense_invalidated_by_call and
177 sparse_invalidated_by_call both play this game. */
178
179 /* Private data used to compute the solution for this problem. These
180 data structures are not accessible outside of this module. */
181 struct df_rd_problem_data
182 {
183 /* The set of defs to regs invalidated by call. */
184 bitmap_head sparse_invalidated_by_call;
185 /* The set of defs to regs invalidate by call for rd. */
186 bitmap_head dense_invalidated_by_call;
187 /* An obstack for the bitmaps we need for this problem. */
188 bitmap_obstack rd_bitmaps;
189 };
190
191
192 /* Free basic block info. */
193
194 static void
195 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
196 void *vbb_info)
197 {
198 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
199 if (bb_info)
200 {
201 bitmap_clear (&bb_info->kill);
202 bitmap_clear (&bb_info->sparse_kill);
203 bitmap_clear (&bb_info->gen);
204 bitmap_clear (&bb_info->in);
205 bitmap_clear (&bb_info->out);
206 }
207 }
208
209
210 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
211 not touched unless the block is new. */
212
213 static void
214 df_rd_alloc (bitmap all_blocks)
215 {
216 unsigned int bb_index;
217 bitmap_iterator bi;
218 struct df_rd_problem_data *problem_data;
219
220 if (df_rd->problem_data)
221 {
222 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
223 bitmap_clear (&problem_data->sparse_invalidated_by_call);
224 bitmap_clear (&problem_data->dense_invalidated_by_call);
225 }
226 else
227 {
228 problem_data = XNEW (struct df_rd_problem_data);
229 df_rd->problem_data = problem_data;
230
231 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
232 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
233 &problem_data->rd_bitmaps);
234 bitmap_initialize (&problem_data->dense_invalidated_by_call,
235 &problem_data->rd_bitmaps);
236 }
237
238 df_grow_bb_info (df_rd);
239
240 /* Because of the clustering of all use sites for the same pseudo,
241 we have to process all of the blocks before doing the
242 analysis. */
243
244 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
245 {
246 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
247
248 /* When bitmaps are already initialized, just clear them. */
249 if (bb_info->kill.obstack)
250 {
251 bitmap_clear (&bb_info->kill);
252 bitmap_clear (&bb_info->sparse_kill);
253 bitmap_clear (&bb_info->gen);
254 }
255 else
256 {
257 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
258 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
259 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
260 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
261 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
262 }
263 }
264 df_rd->optional_p = true;
265 }
266
267
268 /* Add the effect of the top artificial defs of BB to the reaching definitions
269 bitmap LOCAL_RD. */
270
271 void
272 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
273 {
274 int bb_index = bb->index;
275 df_ref *def_rec;
276 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
277 {
278 df_ref def = *def_rec;
279 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
280 {
281 unsigned int dregno = DF_REF_REGNO (def);
282 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
283 bitmap_clear_range (local_rd,
284 DF_DEFS_BEGIN (dregno),
285 DF_DEFS_COUNT (dregno));
286 bitmap_set_bit (local_rd, DF_REF_ID (def));
287 }
288 }
289 }
290
291 /* Add the effect of the defs of INSN to the reaching definitions bitmap
292 LOCAL_RD. */
293
294 void
295 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
296 bitmap local_rd)
297 {
298 unsigned uid = INSN_UID (insn);
299 df_ref *def_rec;
300
301 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
302 {
303 df_ref def = *def_rec;
304 unsigned int dregno = DF_REF_REGNO (def);
305 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
306 || (dregno >= FIRST_PSEUDO_REGISTER))
307 {
308 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
309 bitmap_clear_range (local_rd,
310 DF_DEFS_BEGIN (dregno),
311 DF_DEFS_COUNT (dregno));
312 if (!(DF_REF_FLAGS (def)
313 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
314 bitmap_set_bit (local_rd, DF_REF_ID (def));
315 }
316 }
317 }
318
319 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
320 more complicated than just simulating, because we must produce the
321 gen and kill sets and hence deal with the two possible representations
322 of kill sets. */
323
324 static void
325 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
326 df_ref *def_rec,
327 int top_flag)
328 {
329 while (*def_rec)
330 {
331 df_ref def = *def_rec;
332 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
333 {
334 unsigned int regno = DF_REF_REGNO (def);
335 unsigned int begin = DF_DEFS_BEGIN (regno);
336 unsigned int n_defs = DF_DEFS_COUNT (regno);
337
338 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
339 || (regno >= FIRST_PSEUDO_REGISTER))
340 {
341 /* Only the last def(s) for a regno in the block has any
342 effect. */
343 if (!bitmap_bit_p (&seen_in_block, regno))
344 {
345 /* The first def for regno in insn gets to knock out the
346 defs from other instructions. */
347 if ((!bitmap_bit_p (&seen_in_insn, regno))
348 /* If the def is to only part of the reg, it does
349 not kill the other defs that reach here. */
350 && (!(DF_REF_FLAGS (def) &
351 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
352 {
353 if (n_defs > DF_SPARSE_THRESHOLD)
354 {
355 bitmap_set_bit (&bb_info->sparse_kill, regno);
356 bitmap_clear_range(&bb_info->gen, begin, n_defs);
357 }
358 else
359 {
360 bitmap_set_range (&bb_info->kill, begin, n_defs);
361 bitmap_clear_range (&bb_info->gen, begin, n_defs);
362 }
363 }
364
365 bitmap_set_bit (&seen_in_insn, regno);
366 /* All defs for regno in the instruction may be put into
367 the gen set. */
368 if (!(DF_REF_FLAGS (def)
369 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
370 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
371 }
372 }
373 }
374 def_rec++;
375 }
376 }
377
378 /* Compute local reaching def info for basic block BB. */
379
380 static void
381 df_rd_bb_local_compute (unsigned int bb_index)
382 {
383 basic_block bb = BASIC_BLOCK (bb_index);
384 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
385 rtx insn;
386
387 bitmap_clear (&seen_in_block);
388 bitmap_clear (&seen_in_insn);
389
390 /* Artificials are only hard regs. */
391 if (!(df->changeable_flags & DF_NO_HARD_REGS))
392 df_rd_bb_local_compute_process_def (bb_info,
393 df_get_artificial_defs (bb_index),
394 0);
395
396 FOR_BB_INSNS_REVERSE (bb, insn)
397 {
398 unsigned int uid = INSN_UID (insn);
399
400 if (!INSN_P (insn))
401 continue;
402
403 df_rd_bb_local_compute_process_def (bb_info,
404 DF_INSN_UID_DEFS (uid), 0);
405
406 /* This complex dance with the two bitmaps is required because
407 instructions can assign twice to the same pseudo. This
408 generally happens with calls that will have one def for the
409 result and another def for the clobber. If only one vector
410 is used and the clobber goes first, the result will be
411 lost. */
412 bitmap_ior_into (&seen_in_block, &seen_in_insn);
413 bitmap_clear (&seen_in_insn);
414 }
415
416 /* Process the artificial defs at the top of the block last since we
417 are going backwards through the block and these are logically at
418 the start. */
419 if (!(df->changeable_flags & DF_NO_HARD_REGS))
420 df_rd_bb_local_compute_process_def (bb_info,
421 df_get_artificial_defs (bb_index),
422 DF_REF_AT_TOP);
423 }
424
425
426 /* Compute local reaching def info for each basic block within BLOCKS. */
427
428 static void
429 df_rd_local_compute (bitmap all_blocks)
430 {
431 unsigned int bb_index;
432 bitmap_iterator bi;
433 unsigned int regno;
434 struct df_rd_problem_data *problem_data
435 = (struct df_rd_problem_data *) df_rd->problem_data;
436 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
437 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
438
439 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
440 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
441
442 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
443
444 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
445 {
446 df_rd_bb_local_compute (bb_index);
447 }
448
449 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
450 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
451 {
452 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
453 bitmap_set_bit (sparse_invalidated, regno);
454 else
455 bitmap_set_range (dense_invalidated,
456 DF_DEFS_BEGIN (regno),
457 DF_DEFS_COUNT (regno));
458 }
459
460 bitmap_clear (&seen_in_block);
461 bitmap_clear (&seen_in_insn);
462 }
463
464
465 /* Initialize the solution bit vectors for problem. */
466
467 static void
468 df_rd_init_solution (bitmap all_blocks)
469 {
470 unsigned int bb_index;
471 bitmap_iterator bi;
472
473 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
474 {
475 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
476
477 bitmap_copy (&bb_info->out, &bb_info->gen);
478 bitmap_clear (&bb_info->in);
479 }
480 }
481
482 /* In of target gets or of out of source. */
483
484 static bool
485 df_rd_confluence_n (edge e)
486 {
487 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
488 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
489 bool changed = false;
490
491 if (e->flags & EDGE_FAKE)
492 return false;
493
494 if (e->flags & EDGE_EH)
495 {
496 struct df_rd_problem_data *problem_data
497 = (struct df_rd_problem_data *) df_rd->problem_data;
498 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
499 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
500 bitmap_iterator bi;
501 unsigned int regno;
502 bitmap_head tmp;
503
504 bitmap_initialize (&tmp, &df_bitmap_obstack);
505 bitmap_copy (&tmp, op2);
506 bitmap_and_compl_into (&tmp, dense_invalidated);
507
508 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
509 {
510 bitmap_clear_range (&tmp,
511 DF_DEFS_BEGIN (regno),
512 DF_DEFS_COUNT (regno));
513 }
514 changed |= bitmap_ior_into (op1, &tmp);
515 bitmap_clear (&tmp);
516 return changed;
517 }
518 else
519 return bitmap_ior_into (op1, op2);
520 }
521
522
523 /* Transfer function. */
524
525 static bool
526 df_rd_transfer_function (int bb_index)
527 {
528 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
529 unsigned int regno;
530 bitmap_iterator bi;
531 bitmap in = &bb_info->in;
532 bitmap out = &bb_info->out;
533 bitmap gen = &bb_info->gen;
534 bitmap kill = &bb_info->kill;
535 bitmap sparse_kill = &bb_info->sparse_kill;
536
537 if (bitmap_empty_p (sparse_kill))
538 return bitmap_ior_and_compl (out, gen, in, kill);
539 else
540 {
541 struct df_rd_problem_data *problem_data;
542 bool changed = false;
543 bitmap_head tmp;
544
545 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
546 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
547 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
548 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
549
550 bitmap_copy (&tmp, in);
551 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
552 {
553 bitmap_clear_range (&tmp,
554 DF_DEFS_BEGIN (regno),
555 DF_DEFS_COUNT (regno));
556 }
557 bitmap_and_compl_into (&tmp, kill);
558 bitmap_ior_into (&tmp, gen);
559 changed = !bitmap_equal_p (&tmp, out);
560 if (changed)
561 {
562 bitmap_clear (out);
563 bb_info->out = tmp;
564 }
565 else
566 bitmap_clear (&tmp);
567 return changed;
568 }
569 }
570
571
572 /* Free all storage associated with the problem. */
573
574 static void
575 df_rd_free (void)
576 {
577 struct df_rd_problem_data *problem_data
578 = (struct df_rd_problem_data *) df_rd->problem_data;
579
580 if (problem_data)
581 {
582 bitmap_obstack_release (&problem_data->rd_bitmaps);
583
584 df_rd->block_info_size = 0;
585 free (df_rd->block_info);
586 df_rd->block_info = NULL;
587 free (df_rd->problem_data);
588 }
589 free (df_rd);
590 }
591
592
593 /* Debugging info. */
594
595 static void
596 df_rd_start_dump (FILE *file)
597 {
598 struct df_rd_problem_data *problem_data
599 = (struct df_rd_problem_data *) df_rd->problem_data;
600 unsigned int m = DF_REG_SIZE(df);
601 unsigned int regno;
602
603 if (!df_rd->block_info)
604 return;
605
606 fprintf (file, ";; Reaching defs:\n\n");
607
608 fprintf (file, " sparse invalidated \t");
609 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
610 fprintf (file, " dense invalidated \t");
611 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
612
613 for (regno = 0; regno < m; regno++)
614 if (DF_DEFS_COUNT (regno))
615 fprintf (file, "%d[%d,%d] ", regno,
616 DF_DEFS_BEGIN (regno),
617 DF_DEFS_COUNT (regno));
618 fprintf (file, "\n");
619
620 }
621
622
623 /* Debugging info at top of bb. */
624
625 static void
626 df_rd_top_dump (basic_block bb, FILE *file)
627 {
628 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
629 if (!bb_info)
630 return;
631
632 fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (&bb_info->in));
633 dump_bitmap (file, &bb_info->in);
634 fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (&bb_info->gen));
635 dump_bitmap (file, &bb_info->gen);
636 fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (&bb_info->kill));
637 dump_bitmap (file, &bb_info->kill);
638 }
639
640
641 /* Debugging info at top of bb. */
642
643 static void
644 df_rd_bottom_dump (basic_block bb, FILE *file)
645 {
646 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
647 if (!bb_info)
648 return;
649
650 fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (&bb_info->out));
651 dump_bitmap (file, &bb_info->out);
652 }
653
654 /* All of the information associated with every instance of the problem. */
655
656 static struct df_problem problem_RD =
657 {
658 DF_RD, /* Problem id. */
659 DF_FORWARD, /* Direction. */
660 df_rd_alloc, /* Allocate the problem specific data. */
661 NULL, /* Reset global information. */
662 df_rd_free_bb_info, /* Free basic block info. */
663 df_rd_local_compute, /* Local compute function. */
664 df_rd_init_solution, /* Init the solution specific data. */
665 df_worklist_dataflow, /* Worklist solver. */
666 NULL, /* Confluence operator 0. */
667 df_rd_confluence_n, /* Confluence operator n. */
668 df_rd_transfer_function, /* Transfer function. */
669 NULL, /* Finalize function. */
670 df_rd_free, /* Free all of the problem information. */
671 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
672 df_rd_start_dump, /* Debugging. */
673 df_rd_top_dump, /* Debugging start block. */
674 df_rd_bottom_dump, /* Debugging end block. */
675 NULL, /* Incremental solution verify start. */
676 NULL, /* Incremental solution verify end. */
677 NULL, /* Dependent problem. */
678 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
679 TV_DF_RD, /* Timing variable. */
680 true /* Reset blocks on dropping out of blocks_to_analyze. */
681 };
682
683
684
685 /* Create a new RD instance and add it to the existing instance
686 of DF. */
687
688 void
689 df_rd_add_problem (void)
690 {
691 df_add_problem (&problem_RD);
692 }
693
694
695 \f
696 /*----------------------------------------------------------------------------
697 LIVE REGISTERS
698
699 Find the locations in the function where any use of a pseudo can
700 reach in the backwards direction. In and out bitvectors are built
701 for each basic block. The regno is used to index into these sets.
702 See df.h for details.
703 ----------------------------------------------------------------------------*/
704
705 /* Private data used to verify the solution for this problem. */
706 struct df_lr_problem_data
707 {
708 bitmap_head *in;
709 bitmap_head *out;
710 /* An obstack for the bitmaps we need for this problem. */
711 bitmap_obstack lr_bitmaps;
712 };
713
714 /* Free basic block info. */
715
716 static void
717 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
718 void *vbb_info)
719 {
720 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
721 if (bb_info)
722 {
723 bitmap_clear (&bb_info->use);
724 bitmap_clear (&bb_info->def);
725 bitmap_clear (&bb_info->in);
726 bitmap_clear (&bb_info->out);
727 }
728 }
729
730
731 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
732 not touched unless the block is new. */
733
734 static void
735 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
736 {
737 unsigned int bb_index;
738 bitmap_iterator bi;
739 struct df_lr_problem_data *problem_data;
740
741 df_grow_bb_info (df_lr);
742 if (df_lr->problem_data)
743 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
744 else
745 {
746 problem_data = XNEW (struct df_lr_problem_data);
747 df_lr->problem_data = problem_data;
748
749 problem_data->out = NULL;
750 problem_data->in = NULL;
751 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
752 }
753
754 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
755 {
756 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
757
758 /* When bitmaps are already initialized, just clear them. */
759 if (bb_info->use.obstack)
760 {
761 bitmap_clear (&bb_info->def);
762 bitmap_clear (&bb_info->use);
763 }
764 else
765 {
766 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
767 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
768 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
769 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
770 }
771 }
772
773 df_lr->optional_p = false;
774 }
775
776
777 /* Reset the global solution for recalculation. */
778
779 static void
780 df_lr_reset (bitmap all_blocks)
781 {
782 unsigned int bb_index;
783 bitmap_iterator bi;
784
785 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
786 {
787 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
788 gcc_assert (bb_info);
789 bitmap_clear (&bb_info->in);
790 bitmap_clear (&bb_info->out);
791 }
792 }
793
794
795 /* Compute local live register info for basic block BB. */
796
797 static void
798 df_lr_bb_local_compute (unsigned int bb_index)
799 {
800 basic_block bb = BASIC_BLOCK (bb_index);
801 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
802 rtx insn;
803 df_ref *def_rec;
804 df_ref *use_rec;
805
806 /* Process the registers set in an exception handler. */
807 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
808 {
809 df_ref def = *def_rec;
810 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
811 {
812 unsigned int dregno = DF_REF_REGNO (def);
813 bitmap_set_bit (&bb_info->def, dregno);
814 bitmap_clear_bit (&bb_info->use, dregno);
815 }
816 }
817
818 /* Process the hardware registers that are always live. */
819 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
820 {
821 df_ref use = *use_rec;
822 /* Add use to set of uses in this BB. */
823 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
824 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
825 }
826
827 FOR_BB_INSNS_REVERSE (bb, insn)
828 {
829 unsigned int uid = INSN_UID (insn);
830
831 if (!NONDEBUG_INSN_P (insn))
832 continue;
833
834 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
835 {
836 df_ref def = *def_rec;
837 /* If the def is to only part of the reg, it does
838 not kill the other defs that reach here. */
839 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
840 {
841 unsigned int dregno = DF_REF_REGNO (def);
842 bitmap_set_bit (&bb_info->def, dregno);
843 bitmap_clear_bit (&bb_info->use, dregno);
844 }
845 }
846
847 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
848 {
849 df_ref use = *use_rec;
850 /* Add use to set of uses in this BB. */
851 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
852 }
853 }
854
855 /* Process the registers set in an exception handler or the hard
856 frame pointer if this block is the target of a non local
857 goto. */
858 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
859 {
860 df_ref def = *def_rec;
861 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
862 {
863 unsigned int dregno = DF_REF_REGNO (def);
864 bitmap_set_bit (&bb_info->def, dregno);
865 bitmap_clear_bit (&bb_info->use, dregno);
866 }
867 }
868
869 #ifdef EH_USES
870 /* Process the uses that are live into an exception handler. */
871 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
872 {
873 df_ref use = *use_rec;
874 /* Add use to set of uses in this BB. */
875 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
876 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
877 }
878 #endif
879
880 /* If the df_live problem is not defined, such as at -O0 and -O1, we
881 still need to keep the luids up to date. This is normally done
882 in the df_live problem since this problem has a forwards
883 scan. */
884 if (!df_live)
885 df_recompute_luids (bb);
886 }
887
888
889 /* Compute local live register info for each basic block within BLOCKS. */
890
891 static void
892 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
893 {
894 unsigned int bb_index;
895 bitmap_iterator bi;
896
897 bitmap_clear (&df->hardware_regs_used);
898
899 /* The all-important stack pointer must always be live. */
900 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
901
902 /* Before reload, there are a few registers that must be forced
903 live everywhere -- which might not already be the case for
904 blocks within infinite loops. */
905 if (!reload_completed)
906 {
907 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
908 /* Any reference to any pseudo before reload is a potential
909 reference of the frame pointer. */
910 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
911
912 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
913 /* Pseudos with argument area equivalences may require
914 reloading via the argument pointer. */
915 if (fixed_regs[ARG_POINTER_REGNUM])
916 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
917 #endif
918
919 /* Any constant, or pseudo with constant equivalences, may
920 require reloading from memory using the pic register. */
921 if (pic_offset_table_regnum != INVALID_REGNUM
922 && fixed_regs[pic_offset_table_regnum])
923 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
924 }
925
926 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
927 {
928 if (bb_index == EXIT_BLOCK)
929 {
930 /* The exit block is special for this problem and its bits are
931 computed from thin air. */
932 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
933 bitmap_copy (&bb_info->use, df->exit_block_uses);
934 }
935 else
936 df_lr_bb_local_compute (bb_index);
937 }
938
939 bitmap_clear (df_lr->out_of_date_transfer_functions);
940 }
941
942
943 /* Initialize the solution vectors. */
944
945 static void
946 df_lr_init (bitmap all_blocks)
947 {
948 unsigned int bb_index;
949 bitmap_iterator bi;
950
951 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
952 {
953 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
954 bitmap_copy (&bb_info->in, &bb_info->use);
955 bitmap_clear (&bb_info->out);
956 }
957 }
958
959
960 /* Confluence function that processes infinite loops. This might be a
961 noreturn function that throws. And even if it isn't, getting the
962 unwind info right helps debugging. */
963 static void
964 df_lr_confluence_0 (basic_block bb)
965 {
966 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
967 if (bb != EXIT_BLOCK_PTR)
968 bitmap_copy (op1, &df->hardware_regs_used);
969 }
970
971
972 /* Confluence function that ignores fake edges. */
973
974 static bool
975 df_lr_confluence_n (edge e)
976 {
977 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
978 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
979 bool changed = false;
980
981 /* Call-clobbered registers die across exception and call edges. */
982 /* ??? Abnormal call edges ignored for the moment, as this gets
983 confused by sibling call edges, which crashes reg-stack. */
984 if (e->flags & EDGE_EH)
985 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
986 else
987 changed = bitmap_ior_into (op1, op2);
988
989 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
990 return changed;
991 }
992
993
994 /* Transfer function. */
995
996 static bool
997 df_lr_transfer_function (int bb_index)
998 {
999 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1000 bitmap in = &bb_info->in;
1001 bitmap out = &bb_info->out;
1002 bitmap use = &bb_info->use;
1003 bitmap def = &bb_info->def;
1004
1005 return bitmap_ior_and_compl (in, use, out, def);
1006 }
1007
1008
1009 /* Run the fast dce as a side effect of building LR. */
1010
1011 static void
1012 df_lr_finalize (bitmap all_blocks)
1013 {
1014 df_lr->solutions_dirty = false;
1015 if (df->changeable_flags & DF_LR_RUN_DCE)
1016 {
1017 run_fast_df_dce ();
1018
1019 /* If dce deletes some instructions, we need to recompute the lr
1020 solution before proceeding further. The problem is that fast
1021 dce is a pessimestic dataflow algorithm. In the case where
1022 it deletes a statement S inside of a loop, the uses inside of
1023 S may not be deleted from the dataflow solution because they
1024 were carried around the loop. While it is conservatively
1025 correct to leave these extra bits, the standards of df
1026 require that we maintain the best possible (least fixed
1027 point) solution. The only way to do that is to redo the
1028 iteration from the beginning. See PR35805 for an
1029 example. */
1030 if (df_lr->solutions_dirty)
1031 {
1032 df_clear_flags (DF_LR_RUN_DCE);
1033 df_lr_alloc (all_blocks);
1034 df_lr_local_compute (all_blocks);
1035 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1036 df_lr_finalize (all_blocks);
1037 df_set_flags (DF_LR_RUN_DCE);
1038 }
1039 }
1040 }
1041
1042
1043 /* Free all storage associated with the problem. */
1044
1045 static void
1046 df_lr_free (void)
1047 {
1048 struct df_lr_problem_data *problem_data
1049 = (struct df_lr_problem_data *) df_lr->problem_data;
1050 if (df_lr->block_info)
1051 {
1052
1053 df_lr->block_info_size = 0;
1054 free (df_lr->block_info);
1055 df_lr->block_info = NULL;
1056 bitmap_obstack_release (&problem_data->lr_bitmaps);
1057 free (df_lr->problem_data);
1058 df_lr->problem_data = NULL;
1059 }
1060
1061 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1062 free (df_lr);
1063 }
1064
1065
1066 /* Debugging info at top of bb. */
1067
1068 static void
1069 df_lr_top_dump (basic_block bb, FILE *file)
1070 {
1071 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1072 struct df_lr_problem_data *problem_data;
1073 if (!bb_info)
1074 return;
1075
1076 fprintf (file, ";; lr in \t");
1077 df_print_regset (file, &bb_info->in);
1078 if (df_lr->problem_data)
1079 {
1080 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1081 if (problem_data->in)
1082 {
1083 fprintf (file, ";; old in \t");
1084 df_print_regset (file, &problem_data->in[bb->index]);
1085 }
1086 }
1087 fprintf (file, ";; lr use \t");
1088 df_print_regset (file, &bb_info->use);
1089 fprintf (file, ";; lr def \t");
1090 df_print_regset (file, &bb_info->def);
1091 }
1092
1093
1094 /* Debugging info at bottom of bb. */
1095
1096 static void
1097 df_lr_bottom_dump (basic_block bb, FILE *file)
1098 {
1099 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1100 struct df_lr_problem_data *problem_data;
1101 if (!bb_info)
1102 return;
1103
1104 fprintf (file, ";; lr out \t");
1105 df_print_regset (file, &bb_info->out);
1106 if (df_lr->problem_data)
1107 {
1108 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1109 if (problem_data->out)
1110 {
1111 fprintf (file, ";; old out \t");
1112 df_print_regset (file, &problem_data->out[bb->index]);
1113 }
1114 }
1115 }
1116
1117
1118 /* Build the datastructure to verify that the solution to the dataflow
1119 equations is not dirty. */
1120
1121 static void
1122 df_lr_verify_solution_start (void)
1123 {
1124 basic_block bb;
1125 struct df_lr_problem_data *problem_data;
1126 if (df_lr->solutions_dirty)
1127 return;
1128
1129 /* Set it true so that the solution is recomputed. */
1130 df_lr->solutions_dirty = true;
1131
1132 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1133 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1134 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1135
1136 FOR_ALL_BB (bb)
1137 {
1138 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1139 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1140 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1141 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1142 }
1143 }
1144
1145
1146 /* Compare the saved datastructure and the new solution to the dataflow
1147 equations. */
1148
1149 static void
1150 df_lr_verify_solution_end (void)
1151 {
1152 struct df_lr_problem_data *problem_data;
1153 basic_block bb;
1154
1155 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1156
1157 if (!problem_data->out)
1158 return;
1159
1160 if (df_lr->solutions_dirty)
1161 /* Do not check if the solution is still dirty. See the comment
1162 in df_lr_finalize for details. */
1163 df_lr->solutions_dirty = false;
1164 else
1165 FOR_ALL_BB (bb)
1166 {
1167 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1168 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1169 {
1170 /*df_dump (stderr);*/
1171 gcc_unreachable ();
1172 }
1173 }
1174
1175 /* Cannot delete them immediately because you may want to dump them
1176 if the comparison fails. */
1177 FOR_ALL_BB (bb)
1178 {
1179 bitmap_clear (&problem_data->in[bb->index]);
1180 bitmap_clear (&problem_data->out[bb->index]);
1181 }
1182
1183 free (problem_data->in);
1184 free (problem_data->out);
1185 problem_data->in = NULL;
1186 problem_data->out = NULL;
1187 }
1188
1189
1190 /* All of the information associated with every instance of the problem. */
1191
1192 static struct df_problem problem_LR =
1193 {
1194 DF_LR, /* Problem id. */
1195 DF_BACKWARD, /* Direction. */
1196 df_lr_alloc, /* Allocate the problem specific data. */
1197 df_lr_reset, /* Reset global information. */
1198 df_lr_free_bb_info, /* Free basic block info. */
1199 df_lr_local_compute, /* Local compute function. */
1200 df_lr_init, /* Init the solution specific data. */
1201 df_worklist_dataflow, /* Worklist solver. */
1202 df_lr_confluence_0, /* Confluence operator 0. */
1203 df_lr_confluence_n, /* Confluence operator n. */
1204 df_lr_transfer_function, /* Transfer function. */
1205 df_lr_finalize, /* Finalize function. */
1206 df_lr_free, /* Free all of the problem information. */
1207 NULL, /* Remove this problem from the stack of dataflow problems. */
1208 NULL, /* Debugging. */
1209 df_lr_top_dump, /* Debugging start block. */
1210 df_lr_bottom_dump, /* Debugging end block. */
1211 df_lr_verify_solution_start,/* Incremental solution verify start. */
1212 df_lr_verify_solution_end, /* Incremental solution verify end. */
1213 NULL, /* Dependent problem. */
1214 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1215 TV_DF_LR, /* Timing variable. */
1216 false /* Reset blocks on dropping out of blocks_to_analyze. */
1217 };
1218
1219
1220 /* Create a new DATAFLOW instance and add it to an existing instance
1221 of DF. The returned structure is what is used to get at the
1222 solution. */
1223
1224 void
1225 df_lr_add_problem (void)
1226 {
1227 df_add_problem (&problem_LR);
1228 /* These will be initialized when df_scan_blocks processes each
1229 block. */
1230 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1231 }
1232
1233
1234 /* Verify that all of the lr related info is consistent and
1235 correct. */
1236
1237 void
1238 df_lr_verify_transfer_functions (void)
1239 {
1240 basic_block bb;
1241 bitmap_head saved_def;
1242 bitmap_head saved_use;
1243 bitmap_head all_blocks;
1244
1245 if (!df)
1246 return;
1247
1248 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1249 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1250 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1251
1252 FOR_ALL_BB (bb)
1253 {
1254 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1255 bitmap_set_bit (&all_blocks, bb->index);
1256
1257 if (bb_info)
1258 {
1259 /* Make a copy of the transfer functions and then compute
1260 new ones to see if the transfer functions have
1261 changed. */
1262 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1263 bb->index))
1264 {
1265 bitmap_copy (&saved_def, &bb_info->def);
1266 bitmap_copy (&saved_use, &bb_info->use);
1267 bitmap_clear (&bb_info->def);
1268 bitmap_clear (&bb_info->use);
1269
1270 df_lr_bb_local_compute (bb->index);
1271 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1272 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1273 }
1274 }
1275 else
1276 {
1277 /* If we do not have basic block info, the block must be in
1278 the list of dirty blocks or else some one has added a
1279 block behind our backs. */
1280 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1281 bb->index));
1282 }
1283 /* Make sure no one created a block without following
1284 procedures. */
1285 gcc_assert (df_scan_get_bb_info (bb->index));
1286 }
1287
1288 /* Make sure there are no dirty bits in blocks that have been deleted. */
1289 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1290 &all_blocks));
1291
1292 bitmap_clear (&saved_def);
1293 bitmap_clear (&saved_use);
1294 bitmap_clear (&all_blocks);
1295 }
1296
1297
1298 \f
1299 /*----------------------------------------------------------------------------
1300 LIVE AND MUST-INITIALIZED REGISTERS.
1301
1302 This problem first computes the IN and OUT bitvectors for the
1303 must-initialized registers problems, which is a forward problem.
1304 It gives the set of registers for which we MUST have an available
1305 definition on any path from the entry block to the entry/exit of
1306 a basic block. Sets generate a definition, while clobbers kill
1307 a definition.
1308
1309 In and out bitvectors are built for each basic block and are indexed by
1310 regnum (see df.h for details). In and out bitvectors in struct
1311 df_live_bb_info actually refers to the must-initialized problem;
1312
1313 Then, the in and out sets for the LIVE problem itself are computed.
1314 These are the logical AND of the IN and OUT sets from the LR problem
1315 and the must-initialized problem.
1316 ----------------------------------------------------------------------------*/
1317
1318 /* Private data used to verify the solution for this problem. */
1319 struct df_live_problem_data
1320 {
1321 bitmap_head *in;
1322 bitmap_head *out;
1323 /* An obstack for the bitmaps we need for this problem. */
1324 bitmap_obstack live_bitmaps;
1325 };
1326
1327 /* Scratch var used by transfer functions. This is used to implement
1328 an optimization to reduce the amount of space used to compute the
1329 combined lr and live analysis. */
1330 static bitmap_head df_live_scratch;
1331
1332
1333 /* Free basic block info. */
1334
1335 static void
1336 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1337 void *vbb_info)
1338 {
1339 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1340 if (bb_info)
1341 {
1342 bitmap_clear (&bb_info->gen);
1343 bitmap_clear (&bb_info->kill);
1344 bitmap_clear (&bb_info->in);
1345 bitmap_clear (&bb_info->out);
1346 }
1347 }
1348
1349
1350 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1351 not touched unless the block is new. */
1352
1353 static void
1354 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1355 {
1356 unsigned int bb_index;
1357 bitmap_iterator bi;
1358 struct df_live_problem_data *problem_data;
1359
1360 if (df_live->problem_data)
1361 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1362 else
1363 {
1364 problem_data = XNEW (struct df_live_problem_data);
1365 df_live->problem_data = problem_data;
1366
1367 problem_data->out = NULL;
1368 problem_data->in = NULL;
1369 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1370 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1371 }
1372
1373 df_grow_bb_info (df_live);
1374
1375 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1376 {
1377 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1378
1379 /* When bitmaps are already initialized, just clear them. */
1380 if (bb_info->kill.obstack)
1381 {
1382 bitmap_clear (&bb_info->kill);
1383 bitmap_clear (&bb_info->gen);
1384 }
1385 else
1386 {
1387 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1388 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1389 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1390 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1391 }
1392 }
1393 df_live->optional_p = (optimize <= 1);
1394 }
1395
1396
1397 /* Reset the global solution for recalculation. */
1398
1399 static void
1400 df_live_reset (bitmap all_blocks)
1401 {
1402 unsigned int bb_index;
1403 bitmap_iterator bi;
1404
1405 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1406 {
1407 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1408 gcc_assert (bb_info);
1409 bitmap_clear (&bb_info->in);
1410 bitmap_clear (&bb_info->out);
1411 }
1412 }
1413
1414
1415 /* Compute local uninitialized register info for basic block BB. */
1416
1417 static void
1418 df_live_bb_local_compute (unsigned int bb_index)
1419 {
1420 basic_block bb = BASIC_BLOCK (bb_index);
1421 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1422 rtx insn;
1423 df_ref *def_rec;
1424 int luid = 0;
1425
1426 FOR_BB_INSNS (bb, insn)
1427 {
1428 unsigned int uid = INSN_UID (insn);
1429 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1430
1431 /* Inserting labels does not always trigger the incremental
1432 rescanning. */
1433 if (!insn_info)
1434 {
1435 gcc_assert (!INSN_P (insn));
1436 insn_info = df_insn_create_insn_record (insn);
1437 }
1438
1439 DF_INSN_INFO_LUID (insn_info) = luid;
1440 if (!INSN_P (insn))
1441 continue;
1442
1443 luid++;
1444 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1445 {
1446 df_ref def = *def_rec;
1447 unsigned int regno = DF_REF_REGNO (def);
1448
1449 if (DF_REF_FLAGS_IS_SET (def,
1450 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1451 /* All partial or conditional def
1452 seen are included in the gen set. */
1453 bitmap_set_bit (&bb_info->gen, regno);
1454 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1455 /* Only must clobbers for the entire reg destroy the
1456 value. */
1457 bitmap_set_bit (&bb_info->kill, regno);
1458 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1459 bitmap_set_bit (&bb_info->gen, regno);
1460 }
1461 }
1462
1463 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1464 {
1465 df_ref def = *def_rec;
1466 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1467 }
1468 }
1469
1470
1471 /* Compute local uninitialized register info. */
1472
1473 static void
1474 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1475 {
1476 unsigned int bb_index;
1477 bitmap_iterator bi;
1478
1479 df_grow_insn_info ();
1480
1481 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1482 0, bb_index, bi)
1483 {
1484 df_live_bb_local_compute (bb_index);
1485 }
1486
1487 bitmap_clear (df_live->out_of_date_transfer_functions);
1488 }
1489
1490
1491 /* Initialize the solution vectors. */
1492
1493 static void
1494 df_live_init (bitmap all_blocks)
1495 {
1496 unsigned int bb_index;
1497 bitmap_iterator bi;
1498
1499 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1500 {
1501 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1502 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1503
1504 /* No register may reach a location where it is not used. Thus
1505 we trim the rr result to the places where it is used. */
1506 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1507 bitmap_clear (&bb_info->in);
1508 }
1509 }
1510
1511 /* Forward confluence function that ignores fake edges. */
1512
1513 static bool
1514 df_live_confluence_n (edge e)
1515 {
1516 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1517 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1518
1519 if (e->flags & EDGE_FAKE)
1520 return false;
1521
1522 return bitmap_ior_into (op1, op2);
1523 }
1524
1525
1526 /* Transfer function for the forwards must-initialized problem. */
1527
1528 static bool
1529 df_live_transfer_function (int bb_index)
1530 {
1531 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1532 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1533 bitmap in = &bb_info->in;
1534 bitmap out = &bb_info->out;
1535 bitmap gen = &bb_info->gen;
1536 bitmap kill = &bb_info->kill;
1537
1538 /* We need to use a scratch set here so that the value returned from this
1539 function invocation properly reflects whether the sets changed in a
1540 significant way; i.e. not just because the lr set was anded in. */
1541 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1542 /* No register may reach a location where it is not used. Thus
1543 we trim the rr result to the places where it is used. */
1544 bitmap_and_into (in, &bb_lr_info->in);
1545
1546 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1547 }
1548
1549
1550 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1551
1552 static void
1553 df_live_finalize (bitmap all_blocks)
1554 {
1555
1556 if (df_live->solutions_dirty)
1557 {
1558 bitmap_iterator bi;
1559 unsigned int bb_index;
1560
1561 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1562 {
1563 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1564 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1565
1566 /* No register may reach a location where it is not used. Thus
1567 we trim the rr result to the places where it is used. */
1568 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1569 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1570 }
1571
1572 df_live->solutions_dirty = false;
1573 }
1574 }
1575
1576
1577 /* Free all storage associated with the problem. */
1578
1579 static void
1580 df_live_free (void)
1581 {
1582 struct df_live_problem_data *problem_data
1583 = (struct df_live_problem_data *) df_live->problem_data;
1584 if (df_live->block_info)
1585 {
1586 df_live->block_info_size = 0;
1587 free (df_live->block_info);
1588 df_live->block_info = NULL;
1589 bitmap_clear (&df_live_scratch);
1590 bitmap_obstack_release (&problem_data->live_bitmaps);
1591 free (problem_data);
1592 df_live->problem_data = NULL;
1593 }
1594 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1595 free (df_live);
1596 }
1597
1598
1599 /* Debugging info at top of bb. */
1600
1601 static void
1602 df_live_top_dump (basic_block bb, FILE *file)
1603 {
1604 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1605 struct df_live_problem_data *problem_data;
1606
1607 if (!bb_info)
1608 return;
1609
1610 fprintf (file, ";; live in \t");
1611 df_print_regset (file, &bb_info->in);
1612 if (df_live->problem_data)
1613 {
1614 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1615 if (problem_data->in)
1616 {
1617 fprintf (file, ";; old in \t");
1618 df_print_regset (file, &problem_data->in[bb->index]);
1619 }
1620 }
1621 fprintf (file, ";; live gen \t");
1622 df_print_regset (file, &bb_info->gen);
1623 fprintf (file, ";; live kill\t");
1624 df_print_regset (file, &bb_info->kill);
1625 }
1626
1627
1628 /* Debugging info at bottom of bb. */
1629
1630 static void
1631 df_live_bottom_dump (basic_block bb, FILE *file)
1632 {
1633 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1634 struct df_live_problem_data *problem_data;
1635
1636 if (!bb_info)
1637 return;
1638
1639 fprintf (file, ";; live out \t");
1640 df_print_regset (file, &bb_info->out);
1641 if (df_live->problem_data)
1642 {
1643 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1644 if (problem_data->out)
1645 {
1646 fprintf (file, ";; old out \t");
1647 df_print_regset (file, &problem_data->out[bb->index]);
1648 }
1649 }
1650 }
1651
1652
1653 /* Build the datastructure to verify that the solution to the dataflow
1654 equations is not dirty. */
1655
1656 static void
1657 df_live_verify_solution_start (void)
1658 {
1659 basic_block bb;
1660 struct df_live_problem_data *problem_data;
1661 if (df_live->solutions_dirty)
1662 return;
1663
1664 /* Set it true so that the solution is recomputed. */
1665 df_live->solutions_dirty = true;
1666
1667 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1668 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1669 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1670
1671 FOR_ALL_BB (bb)
1672 {
1673 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1674 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1675 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1676 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1677 }
1678 }
1679
1680
1681 /* Compare the saved datastructure and the new solution to the dataflow
1682 equations. */
1683
1684 static void
1685 df_live_verify_solution_end (void)
1686 {
1687 struct df_live_problem_data *problem_data;
1688 basic_block bb;
1689
1690 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1691 if (!problem_data->out)
1692 return;
1693
1694 FOR_ALL_BB (bb)
1695 {
1696 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1697 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1698 {
1699 /*df_dump (stderr);*/
1700 gcc_unreachable ();
1701 }
1702 }
1703
1704 /* Cannot delete them immediately because you may want to dump them
1705 if the comparison fails. */
1706 FOR_ALL_BB (bb)
1707 {
1708 bitmap_clear (&problem_data->in[bb->index]);
1709 bitmap_clear (&problem_data->out[bb->index]);
1710 }
1711
1712 free (problem_data->in);
1713 free (problem_data->out);
1714 free (problem_data);
1715 df_live->problem_data = NULL;
1716 }
1717
1718
1719 /* All of the information associated with every instance of the problem. */
1720
1721 static struct df_problem problem_LIVE =
1722 {
1723 DF_LIVE, /* Problem id. */
1724 DF_FORWARD, /* Direction. */
1725 df_live_alloc, /* Allocate the problem specific data. */
1726 df_live_reset, /* Reset global information. */
1727 df_live_free_bb_info, /* Free basic block info. */
1728 df_live_local_compute, /* Local compute function. */
1729 df_live_init, /* Init the solution specific data. */
1730 df_worklist_dataflow, /* Worklist solver. */
1731 NULL, /* Confluence operator 0. */
1732 df_live_confluence_n, /* Confluence operator n. */
1733 df_live_transfer_function, /* Transfer function. */
1734 df_live_finalize, /* Finalize function. */
1735 df_live_free, /* Free all of the problem information. */
1736 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1737 NULL, /* Debugging. */
1738 df_live_top_dump, /* Debugging start block. */
1739 df_live_bottom_dump, /* Debugging end block. */
1740 df_live_verify_solution_start,/* Incremental solution verify start. */
1741 df_live_verify_solution_end, /* Incremental solution verify end. */
1742 &problem_LR, /* Dependent problem. */
1743 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1744 TV_DF_LIVE, /* Timing variable. */
1745 false /* Reset blocks on dropping out of blocks_to_analyze. */
1746 };
1747
1748
1749 /* Create a new DATAFLOW instance and add it to an existing instance
1750 of DF. The returned structure is what is used to get at the
1751 solution. */
1752
1753 void
1754 df_live_add_problem (void)
1755 {
1756 df_add_problem (&problem_LIVE);
1757 /* These will be initialized when df_scan_blocks processes each
1758 block. */
1759 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1760 }
1761
1762
1763 /* Set all of the blocks as dirty. This needs to be done if this
1764 problem is added after all of the insns have been scanned. */
1765
1766 void
1767 df_live_set_all_dirty (void)
1768 {
1769 basic_block bb;
1770 FOR_ALL_BB (bb)
1771 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1772 bb->index);
1773 }
1774
1775
1776 /* Verify that all of the lr related info is consistent and
1777 correct. */
1778
1779 void
1780 df_live_verify_transfer_functions (void)
1781 {
1782 basic_block bb;
1783 bitmap_head saved_gen;
1784 bitmap_head saved_kill;
1785 bitmap_head all_blocks;
1786
1787 if (!df)
1788 return;
1789
1790 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1791 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1792 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1793
1794 df_grow_insn_info ();
1795
1796 FOR_ALL_BB (bb)
1797 {
1798 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1799 bitmap_set_bit (&all_blocks, bb->index);
1800
1801 if (bb_info)
1802 {
1803 /* Make a copy of the transfer functions and then compute
1804 new ones to see if the transfer functions have
1805 changed. */
1806 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1807 bb->index))
1808 {
1809 bitmap_copy (&saved_gen, &bb_info->gen);
1810 bitmap_copy (&saved_kill, &bb_info->kill);
1811 bitmap_clear (&bb_info->gen);
1812 bitmap_clear (&bb_info->kill);
1813
1814 df_live_bb_local_compute (bb->index);
1815 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1816 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1817 }
1818 }
1819 else
1820 {
1821 /* If we do not have basic block info, the block must be in
1822 the list of dirty blocks or else some one has added a
1823 block behind our backs. */
1824 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1825 bb->index));
1826 }
1827 /* Make sure no one created a block without following
1828 procedures. */
1829 gcc_assert (df_scan_get_bb_info (bb->index));
1830 }
1831
1832 /* Make sure there are no dirty bits in blocks that have been deleted. */
1833 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1834 &all_blocks));
1835 bitmap_clear (&saved_gen);
1836 bitmap_clear (&saved_kill);
1837 bitmap_clear (&all_blocks);
1838 }
1839 \f
1840 /*----------------------------------------------------------------------------
1841 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1842
1843 Link either the defs to the uses and / or the uses to the defs.
1844
1845 These problems are set up like the other dataflow problems so that
1846 they nicely fit into the framework. They are much simpler and only
1847 involve a single traversal of instructions and an examination of
1848 the reaching defs information (the dependent problem).
1849 ----------------------------------------------------------------------------*/
1850
1851 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1852
1853 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1854
1855 struct df_link *
1856 df_chain_create (df_ref src, df_ref dst)
1857 {
1858 struct df_link *head = DF_REF_CHAIN (src);
1859 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1860
1861 DF_REF_CHAIN (src) = link;
1862 link->next = head;
1863 link->ref = dst;
1864 return link;
1865 }
1866
1867
1868 /* Delete any du or ud chains that start at REF and point to
1869 TARGET. */
1870 static void
1871 df_chain_unlink_1 (df_ref ref, df_ref target)
1872 {
1873 struct df_link *chain = DF_REF_CHAIN (ref);
1874 struct df_link *prev = NULL;
1875
1876 while (chain)
1877 {
1878 if (chain->ref == target)
1879 {
1880 if (prev)
1881 prev->next = chain->next;
1882 else
1883 DF_REF_CHAIN (ref) = chain->next;
1884 pool_free (df_chain->block_pool, chain);
1885 return;
1886 }
1887 prev = chain;
1888 chain = chain->next;
1889 }
1890 }
1891
1892
1893 /* Delete a du or ud chain that leave or point to REF. */
1894
1895 void
1896 df_chain_unlink (df_ref ref)
1897 {
1898 struct df_link *chain = DF_REF_CHAIN (ref);
1899 while (chain)
1900 {
1901 struct df_link *next = chain->next;
1902 /* Delete the other side if it exists. */
1903 df_chain_unlink_1 (chain->ref, ref);
1904 pool_free (df_chain->block_pool, chain);
1905 chain = next;
1906 }
1907 DF_REF_CHAIN (ref) = NULL;
1908 }
1909
1910
1911 /* Copy the du or ud chain starting at FROM_REF and attach it to
1912 TO_REF. */
1913
1914 void
1915 df_chain_copy (df_ref to_ref,
1916 struct df_link *from_ref)
1917 {
1918 while (from_ref)
1919 {
1920 df_chain_create (to_ref, from_ref->ref);
1921 from_ref = from_ref->next;
1922 }
1923 }
1924
1925
1926 /* Remove this problem from the stack of dataflow problems. */
1927
1928 static void
1929 df_chain_remove_problem (void)
1930 {
1931 bitmap_iterator bi;
1932 unsigned int bb_index;
1933
1934 /* Wholesale destruction of the old chains. */
1935 if (df_chain->block_pool)
1936 free_alloc_pool (df_chain->block_pool);
1937
1938 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1939 {
1940 rtx insn;
1941 df_ref *def_rec;
1942 df_ref *use_rec;
1943 basic_block bb = BASIC_BLOCK (bb_index);
1944
1945 if (df_chain_problem_p (DF_DU_CHAIN))
1946 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1947 DF_REF_CHAIN (*def_rec) = NULL;
1948 if (df_chain_problem_p (DF_UD_CHAIN))
1949 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1950 DF_REF_CHAIN (*use_rec) = NULL;
1951
1952 FOR_BB_INSNS (bb, insn)
1953 {
1954 unsigned int uid = INSN_UID (insn);
1955
1956 if (INSN_P (insn))
1957 {
1958 if (df_chain_problem_p (DF_DU_CHAIN))
1959 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
1960 DF_REF_CHAIN (*def_rec) = NULL;
1961 if (df_chain_problem_p (DF_UD_CHAIN))
1962 {
1963 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
1964 DF_REF_CHAIN (*use_rec) = NULL;
1965 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
1966 DF_REF_CHAIN (*use_rec) = NULL;
1967 }
1968 }
1969 }
1970 }
1971
1972 bitmap_clear (df_chain->out_of_date_transfer_functions);
1973 df_chain->block_pool = NULL;
1974 }
1975
1976
1977 /* Remove the chain problem completely. */
1978
1979 static void
1980 df_chain_fully_remove_problem (void)
1981 {
1982 df_chain_remove_problem ();
1983 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
1984 free (df_chain);
1985 }
1986
1987
1988 /* Create def-use or use-def chains. */
1989
1990 static void
1991 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1992 {
1993 df_chain_remove_problem ();
1994 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
1995 sizeof (struct df_link), 50);
1996 df_chain->optional_p = true;
1997 }
1998
1999
2000 /* Reset all of the chains when the set of basic blocks changes. */
2001
2002 static void
2003 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2004 {
2005 df_chain_remove_problem ();
2006 }
2007
2008
2009 /* Create the chains for a list of USEs. */
2010
2011 static void
2012 df_chain_create_bb_process_use (bitmap local_rd,
2013 df_ref *use_rec,
2014 int top_flag)
2015 {
2016 bitmap_iterator bi;
2017 unsigned int def_index;
2018
2019 while (*use_rec)
2020 {
2021 df_ref use = *use_rec;
2022 unsigned int uregno = DF_REF_REGNO (use);
2023 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2024 || (uregno >= FIRST_PSEUDO_REGISTER))
2025 {
2026 /* Do not want to go through this for an uninitialized var. */
2027 int count = DF_DEFS_COUNT (uregno);
2028 if (count)
2029 {
2030 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2031 {
2032 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2033 unsigned int last_index = first_index + count - 1;
2034
2035 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2036 {
2037 df_ref def;
2038 if (def_index > last_index)
2039 break;
2040
2041 def = DF_DEFS_GET (def_index);
2042 if (df_chain_problem_p (DF_DU_CHAIN))
2043 df_chain_create (def, use);
2044 if (df_chain_problem_p (DF_UD_CHAIN))
2045 df_chain_create (use, def);
2046 }
2047 }
2048 }
2049 }
2050
2051 use_rec++;
2052 }
2053 }
2054
2055
2056 /* Create chains from reaching defs bitmaps for basic block BB. */
2057
2058 static void
2059 df_chain_create_bb (unsigned int bb_index)
2060 {
2061 basic_block bb = BASIC_BLOCK (bb_index);
2062 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2063 rtx insn;
2064 bitmap_head cpy;
2065
2066 bitmap_initialize (&cpy, &bitmap_default_obstack);
2067 bitmap_copy (&cpy, &bb_info->in);
2068 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2069
2070 /* Since we are going forwards, process the artificial uses first
2071 then the artificial defs second. */
2072
2073 #ifdef EH_USES
2074 /* Create the chains for the artificial uses from the EH_USES at the
2075 beginning of the block. */
2076
2077 /* Artificials are only hard regs. */
2078 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2079 df_chain_create_bb_process_use (&cpy,
2080 df_get_artificial_uses (bb->index),
2081 DF_REF_AT_TOP);
2082 #endif
2083
2084 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2085
2086 /* Process the regular instructions next. */
2087 FOR_BB_INSNS (bb, insn)
2088 if (INSN_P (insn))
2089 {
2090 unsigned int uid = INSN_UID (insn);
2091
2092 /* First scan the uses and link them up with the defs that remain
2093 in the cpy vector. */
2094 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2095 if (df->changeable_flags & DF_EQ_NOTES)
2096 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2097
2098 /* Since we are going forwards, process the defs second. */
2099 df_rd_simulate_one_insn (bb, insn, &cpy);
2100 }
2101
2102 /* Create the chains for the artificial uses of the hard registers
2103 at the end of the block. */
2104 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2105 df_chain_create_bb_process_use (&cpy,
2106 df_get_artificial_uses (bb->index),
2107 0);
2108
2109 bitmap_clear (&cpy);
2110 }
2111
2112 /* Create def-use chains from reaching use bitmaps for basic blocks
2113 in BLOCKS. */
2114
2115 static void
2116 df_chain_finalize (bitmap all_blocks)
2117 {
2118 unsigned int bb_index;
2119 bitmap_iterator bi;
2120
2121 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2122 {
2123 df_chain_create_bb (bb_index);
2124 }
2125 }
2126
2127
2128 /* Free all storage associated with the problem. */
2129
2130 static void
2131 df_chain_free (void)
2132 {
2133 free_alloc_pool (df_chain->block_pool);
2134 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2135 free (df_chain);
2136 }
2137
2138
2139 /* Debugging info. */
2140
2141 static void
2142 df_chain_top_dump (basic_block bb, FILE *file)
2143 {
2144 if (df_chain_problem_p (DF_DU_CHAIN))
2145 {
2146 rtx insn;
2147 df_ref *def_rec = df_get_artificial_defs (bb->index);
2148 if (*def_rec)
2149 {
2150
2151 fprintf (file, ";; DU chains for artificial defs\n");
2152 while (*def_rec)
2153 {
2154 df_ref def = *def_rec;
2155 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2156 df_chain_dump (DF_REF_CHAIN (def), file);
2157 fprintf (file, "\n");
2158 def_rec++;
2159 }
2160 }
2161
2162 FOR_BB_INSNS (bb, insn)
2163 {
2164 if (INSN_P (insn))
2165 {
2166 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2167 def_rec = DF_INSN_INFO_DEFS (insn_info);
2168 if (*def_rec)
2169 {
2170 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2171 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2172
2173 while (*def_rec)
2174 {
2175 df_ref def = *def_rec;
2176 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2177 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2178 fprintf (file, "read/write ");
2179 df_chain_dump (DF_REF_CHAIN (def), file);
2180 fprintf (file, "\n");
2181 def_rec++;
2182 }
2183 }
2184 }
2185 }
2186 }
2187 }
2188
2189
2190 static void
2191 df_chain_bottom_dump (basic_block bb, FILE *file)
2192 {
2193 if (df_chain_problem_p (DF_UD_CHAIN))
2194 {
2195 rtx insn;
2196 df_ref *use_rec = df_get_artificial_uses (bb->index);
2197
2198 if (*use_rec)
2199 {
2200 fprintf (file, ";; UD chains for artificial uses\n");
2201 while (*use_rec)
2202 {
2203 df_ref use = *use_rec;
2204 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2205 df_chain_dump (DF_REF_CHAIN (use), file);
2206 fprintf (file, "\n");
2207 use_rec++;
2208 }
2209 }
2210
2211 FOR_BB_INSNS (bb, insn)
2212 {
2213 if (INSN_P (insn))
2214 {
2215 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2216 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2217 use_rec = DF_INSN_INFO_USES (insn_info);
2218 if (*use_rec || *eq_use_rec)
2219 {
2220 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2221 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2222
2223 while (*use_rec)
2224 {
2225 df_ref use = *use_rec;
2226 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2227 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2228 fprintf (file, "read/write ");
2229 df_chain_dump (DF_REF_CHAIN (use), file);
2230 fprintf (file, "\n");
2231 use_rec++;
2232 }
2233 while (*eq_use_rec)
2234 {
2235 df_ref use = *eq_use_rec;
2236 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2237 df_chain_dump (DF_REF_CHAIN (use), file);
2238 fprintf (file, "\n");
2239 eq_use_rec++;
2240 }
2241 }
2242 }
2243 }
2244 }
2245 }
2246
2247
2248 static struct df_problem problem_CHAIN =
2249 {
2250 DF_CHAIN, /* Problem id. */
2251 DF_NONE, /* Direction. */
2252 df_chain_alloc, /* Allocate the problem specific data. */
2253 df_chain_reset, /* Reset global information. */
2254 NULL, /* Free basic block info. */
2255 NULL, /* Local compute function. */
2256 NULL, /* Init the solution specific data. */
2257 NULL, /* Iterative solver. */
2258 NULL, /* Confluence operator 0. */
2259 NULL, /* Confluence operator n. */
2260 NULL, /* Transfer function. */
2261 df_chain_finalize, /* Finalize function. */
2262 df_chain_free, /* Free all of the problem information. */
2263 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2264 NULL, /* Debugging. */
2265 df_chain_top_dump, /* Debugging start block. */
2266 df_chain_bottom_dump, /* Debugging end block. */
2267 NULL, /* Incremental solution verify start. */
2268 NULL, /* Incremental solution verify end. */
2269 &problem_RD, /* Dependent problem. */
2270 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2271 TV_DF_CHAIN, /* Timing variable. */
2272 false /* Reset blocks on dropping out of blocks_to_analyze. */
2273 };
2274
2275
2276 /* Create a new DATAFLOW instance and add it to an existing instance
2277 of DF. The returned structure is what is used to get at the
2278 solution. */
2279
2280 void
2281 df_chain_add_problem (unsigned int chain_flags)
2282 {
2283 df_add_problem (&problem_CHAIN);
2284 df_chain->local_flags = chain_flags;
2285 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2286 }
2287
2288 #undef df_chain_problem_p
2289
2290 \f
2291 /*----------------------------------------------------------------------------
2292 WORD LEVEL LIVE REGISTERS
2293
2294 Find the locations in the function where any use of a pseudo can
2295 reach in the backwards direction. In and out bitvectors are built
2296 for each basic block. We only track pseudo registers that have a
2297 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2298 contain two bits corresponding to each of the subwords.
2299
2300 ----------------------------------------------------------------------------*/
2301
2302 /* Private data used to verify the solution for this problem. */
2303 struct df_word_lr_problem_data
2304 {
2305 /* An obstack for the bitmaps we need for this problem. */
2306 bitmap_obstack word_lr_bitmaps;
2307 };
2308
2309
2310 /* Free basic block info. */
2311
2312 static void
2313 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2314 void *vbb_info)
2315 {
2316 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2317 if (bb_info)
2318 {
2319 bitmap_clear (&bb_info->use);
2320 bitmap_clear (&bb_info->def);
2321 bitmap_clear (&bb_info->in);
2322 bitmap_clear (&bb_info->out);
2323 }
2324 }
2325
2326
2327 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2328 not touched unless the block is new. */
2329
2330 static void
2331 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2332 {
2333 unsigned int bb_index;
2334 bitmap_iterator bi;
2335 basic_block bb;
2336 struct df_word_lr_problem_data *problem_data
2337 = XNEW (struct df_word_lr_problem_data);
2338
2339 df_word_lr->problem_data = problem_data;
2340
2341 df_grow_bb_info (df_word_lr);
2342
2343 /* Create the mapping from regnos to slots. This does not change
2344 unless the problem is destroyed and recreated. In particular, if
2345 we end up deleting the only insn that used a subreg, we do not
2346 want to redo the mapping because this would invalidate everything
2347 else. */
2348
2349 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2350
2351 FOR_EACH_BB (bb)
2352 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2353
2354 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2355 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2356
2357 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2358 {
2359 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2360
2361 /* When bitmaps are already initialized, just clear them. */
2362 if (bb_info->use.obstack)
2363 {
2364 bitmap_clear (&bb_info->def);
2365 bitmap_clear (&bb_info->use);
2366 }
2367 else
2368 {
2369 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2370 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2371 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2372 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2373 }
2374 }
2375
2376 df_word_lr->optional_p = true;
2377 }
2378
2379
2380 /* Reset the global solution for recalculation. */
2381
2382 static void
2383 df_word_lr_reset (bitmap all_blocks)
2384 {
2385 unsigned int bb_index;
2386 bitmap_iterator bi;
2387
2388 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2389 {
2390 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2391 gcc_assert (bb_info);
2392 bitmap_clear (&bb_info->in);
2393 bitmap_clear (&bb_info->out);
2394 }
2395 }
2396
2397 /* Examine REF, and if it is for a reg we're interested in, set or
2398 clear the bits corresponding to its subwords from the bitmap
2399 according to IS_SET. LIVE is the bitmap we should update. We do
2400 not track hard regs or pseudos of any size other than 2 *
2401 UNITS_PER_WORD.
2402 We return true if we changed the bitmap, or if we encountered a register
2403 we're not tracking. */
2404
2405 bool
2406 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2407 {
2408 rtx orig_reg = DF_REF_REG (ref);
2409 rtx reg = orig_reg;
2410 enum machine_mode reg_mode;
2411 unsigned regno;
2412 /* Left at -1 for whole accesses. */
2413 int which_subword = -1;
2414 bool changed = false;
2415
2416 if (GET_CODE (reg) == SUBREG)
2417 reg = SUBREG_REG (orig_reg);
2418 regno = REGNO (reg);
2419 reg_mode = GET_MODE (reg);
2420 if (regno < FIRST_PSEUDO_REGISTER
2421 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2422 return true;
2423
2424 if (GET_CODE (orig_reg) == SUBREG
2425 && df_read_modify_subreg_p (orig_reg))
2426 {
2427 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2428 if (subreg_lowpart_p (orig_reg))
2429 which_subword = 0;
2430 else
2431 which_subword = 1;
2432 }
2433 if (is_set)
2434 {
2435 if (which_subword != 1)
2436 changed |= bitmap_set_bit (live, regno * 2);
2437 if (which_subword != 0)
2438 changed |= bitmap_set_bit (live, regno * 2 + 1);
2439 }
2440 else
2441 {
2442 if (which_subword != 1)
2443 changed |= bitmap_clear_bit (live, regno * 2);
2444 if (which_subword != 0)
2445 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2446 }
2447 return changed;
2448 }
2449
2450 /* Compute local live register info for basic block BB. */
2451
2452 static void
2453 df_word_lr_bb_local_compute (unsigned int bb_index)
2454 {
2455 basic_block bb = BASIC_BLOCK (bb_index);
2456 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2457 rtx insn;
2458 df_ref *def_rec;
2459 df_ref *use_rec;
2460
2461 /* Ensure that artificial refs don't contain references to pseudos. */
2462 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2463 {
2464 df_ref def = *def_rec;
2465 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2466 }
2467
2468 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2469 {
2470 df_ref use = *use_rec;
2471 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2472 }
2473
2474 FOR_BB_INSNS_REVERSE (bb, insn)
2475 {
2476 unsigned int uid = INSN_UID (insn);
2477
2478 if (!NONDEBUG_INSN_P (insn))
2479 continue;
2480 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2481 {
2482 df_ref def = *def_rec;
2483 /* If the def is to only part of the reg, it does
2484 not kill the other defs that reach here. */
2485 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2486 {
2487 df_word_lr_mark_ref (def, true, &bb_info->def);
2488 df_word_lr_mark_ref (def, false, &bb_info->use);
2489 }
2490 }
2491 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2492 {
2493 df_ref use = *use_rec;
2494 df_word_lr_mark_ref (use, true, &bb_info->use);
2495 }
2496 }
2497 }
2498
2499
2500 /* Compute local live register info for each basic block within BLOCKS. */
2501
2502 static void
2503 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2504 {
2505 unsigned int bb_index;
2506 bitmap_iterator bi;
2507
2508 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2509 {
2510 if (bb_index == EXIT_BLOCK)
2511 {
2512 unsigned regno;
2513 bitmap_iterator bi;
2514 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2515 regno, bi)
2516 gcc_unreachable ();
2517 }
2518 else
2519 df_word_lr_bb_local_compute (bb_index);
2520 }
2521
2522 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2523 }
2524
2525
2526 /* Initialize the solution vectors. */
2527
2528 static void
2529 df_word_lr_init (bitmap all_blocks)
2530 {
2531 unsigned int bb_index;
2532 bitmap_iterator bi;
2533
2534 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2535 {
2536 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2537 bitmap_copy (&bb_info->in, &bb_info->use);
2538 bitmap_clear (&bb_info->out);
2539 }
2540 }
2541
2542
2543 /* Confluence function that ignores fake edges. */
2544
2545 static bool
2546 df_word_lr_confluence_n (edge e)
2547 {
2548 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2549 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2550
2551 return bitmap_ior_into (op1, op2);
2552 }
2553
2554
2555 /* Transfer function. */
2556
2557 static bool
2558 df_word_lr_transfer_function (int bb_index)
2559 {
2560 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2561 bitmap in = &bb_info->in;
2562 bitmap out = &bb_info->out;
2563 bitmap use = &bb_info->use;
2564 bitmap def = &bb_info->def;
2565
2566 return bitmap_ior_and_compl (in, use, out, def);
2567 }
2568
2569
2570 /* Free all storage associated with the problem. */
2571
2572 static void
2573 df_word_lr_free (void)
2574 {
2575 struct df_word_lr_problem_data *problem_data
2576 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2577
2578 if (df_word_lr->block_info)
2579 {
2580 df_word_lr->block_info_size = 0;
2581 free (df_word_lr->block_info);
2582 df_word_lr->block_info = NULL;
2583 }
2584
2585 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2586 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2587 free (problem_data);
2588 free (df_word_lr);
2589 }
2590
2591
2592 /* Debugging info at top of bb. */
2593
2594 static void
2595 df_word_lr_top_dump (basic_block bb, FILE *file)
2596 {
2597 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2598 if (!bb_info)
2599 return;
2600
2601 fprintf (file, ";; blr in \t");
2602 df_print_word_regset (file, &bb_info->in);
2603 fprintf (file, ";; blr use \t");
2604 df_print_word_regset (file, &bb_info->use);
2605 fprintf (file, ";; blr def \t");
2606 df_print_word_regset (file, &bb_info->def);
2607 }
2608
2609
2610 /* Debugging info at bottom of bb. */
2611
2612 static void
2613 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2614 {
2615 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2616 if (!bb_info)
2617 return;
2618
2619 fprintf (file, ";; blr out \t");
2620 df_print_word_regset (file, &bb_info->out);
2621 }
2622
2623
2624 /* All of the information associated with every instance of the problem. */
2625
2626 static struct df_problem problem_WORD_LR =
2627 {
2628 DF_WORD_LR, /* Problem id. */
2629 DF_BACKWARD, /* Direction. */
2630 df_word_lr_alloc, /* Allocate the problem specific data. */
2631 df_word_lr_reset, /* Reset global information. */
2632 df_word_lr_free_bb_info, /* Free basic block info. */
2633 df_word_lr_local_compute, /* Local compute function. */
2634 df_word_lr_init, /* Init the solution specific data. */
2635 df_worklist_dataflow, /* Worklist solver. */
2636 NULL, /* Confluence operator 0. */
2637 df_word_lr_confluence_n, /* Confluence operator n. */
2638 df_word_lr_transfer_function, /* Transfer function. */
2639 NULL, /* Finalize function. */
2640 df_word_lr_free, /* Free all of the problem information. */
2641 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2642 NULL, /* Debugging. */
2643 df_word_lr_top_dump, /* Debugging start block. */
2644 df_word_lr_bottom_dump, /* Debugging end block. */
2645 NULL, /* Incremental solution verify start. */
2646 NULL, /* Incremental solution verify end. */
2647 NULL, /* Dependent problem. */
2648 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2649 TV_DF_WORD_LR, /* Timing variable. */
2650 false /* Reset blocks on dropping out of blocks_to_analyze. */
2651 };
2652
2653
2654 /* Create a new DATAFLOW instance and add it to an existing instance
2655 of DF. The returned structure is what is used to get at the
2656 solution. */
2657
2658 void
2659 df_word_lr_add_problem (void)
2660 {
2661 df_add_problem (&problem_WORD_LR);
2662 /* These will be initialized when df_scan_blocks processes each
2663 block. */
2664 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2665 }
2666
2667
2668 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2669 any bits, which is used by the caller to determine whether a set is
2670 necessary. We also return true if there are other reasons not to delete
2671 an insn. */
2672
2673 bool
2674 df_word_lr_simulate_defs (rtx insn, bitmap live)
2675 {
2676 bool changed = false;
2677 df_ref *def_rec;
2678 unsigned int uid = INSN_UID (insn);
2679
2680 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2681 {
2682 df_ref def = *def_rec;
2683 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2684 changed = true;
2685 else
2686 changed |= df_word_lr_mark_ref (*def_rec, false, live);
2687 }
2688 return changed;
2689 }
2690
2691
2692 /* Simulate the effects of the uses of INSN on LIVE. */
2693
2694 void
2695 df_word_lr_simulate_uses (rtx insn, bitmap live)
2696 {
2697 df_ref *use_rec;
2698 unsigned int uid = INSN_UID (insn);
2699
2700 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2701 df_word_lr_mark_ref (*use_rec, true, live);
2702 }
2703 \f
2704 /*----------------------------------------------------------------------------
2705 This problem computes REG_DEAD and REG_UNUSED notes.
2706 ----------------------------------------------------------------------------*/
2707
2708 static void
2709 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2710 {
2711 df_note->optional_p = true;
2712 }
2713
2714 /* This is only used if REG_DEAD_DEBUGGING is in effect. */
2715 static void
2716 df_print_note (const char *prefix, rtx insn, rtx note)
2717 {
2718 if (dump_file)
2719 {
2720 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2721 print_rtl (dump_file, note);
2722 fprintf (dump_file, "\n");
2723 }
2724 }
2725
2726
2727 /* After reg-stack, the x86 floating point stack regs are difficult to
2728 analyze because of all of the pushes, pops and rotations. Thus, we
2729 just leave the notes alone. */
2730
2731 #ifdef STACK_REGS
2732 static inline bool
2733 df_ignore_stack_reg (int regno)
2734 {
2735 return regstack_completed
2736 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2737 }
2738 #else
2739 static inline bool
2740 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2741 {
2742 return false;
2743 }
2744 #endif
2745
2746
2747 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add
2748 them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. Remove also
2749 REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
2750 as the bitmap of currently live registers. */
2751
2752 static void
2753 df_kill_notes (rtx insn, bitmap live)
2754 {
2755 rtx *pprev = &REG_NOTES (insn);
2756 rtx link = *pprev;
2757
2758 while (link)
2759 {
2760 switch (REG_NOTE_KIND (link))
2761 {
2762 case REG_DEAD:
2763 /* After reg-stack, we need to ignore any unused notes
2764 for the stack registers. */
2765 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2766 {
2767 pprev = &XEXP (link, 1);
2768 link = *pprev;
2769 }
2770 else
2771 {
2772 rtx next = XEXP (link, 1);
2773 if (REG_DEAD_DEBUGGING)
2774 df_print_note ("deleting: ", insn, link);
2775 free_EXPR_LIST_node (link);
2776 *pprev = link = next;
2777 }
2778 break;
2779
2780 case REG_UNUSED:
2781 /* After reg-stack, we need to ignore any unused notes
2782 for the stack registers. */
2783 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2784 {
2785 pprev = &XEXP (link, 1);
2786 link = *pprev;
2787 }
2788 else
2789 {
2790 rtx next = XEXP (link, 1);
2791 if (REG_DEAD_DEBUGGING)
2792 df_print_note ("deleting: ", insn, link);
2793 free_EXPR_LIST_node (link);
2794 *pprev = link = next;
2795 }
2796 break;
2797
2798 case REG_EQUAL:
2799 case REG_EQUIV:
2800 {
2801 /* Remove the notes that refer to dead registers. As we have at most
2802 one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
2803 so we need to purge the complete EQ_USES vector when removing
2804 the note using df_notes_rescan. */
2805 df_ref *use_rec;
2806 bool deleted = false;
2807
2808 for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
2809 {
2810 df_ref use = *use_rec;
2811 if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
2812 && DF_REF_LOC (use)
2813 && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
2814 && ! bitmap_bit_p (live, DF_REF_REGNO (use))
2815 && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
2816 {
2817 deleted = true;
2818 break;
2819 }
2820 }
2821 if (deleted)
2822 {
2823 rtx next;
2824 if (REG_DEAD_DEBUGGING)
2825 df_print_note ("deleting: ", insn, link);
2826 next = XEXP (link, 1);
2827 free_EXPR_LIST_node (link);
2828 *pprev = link = next;
2829 df_notes_rescan (insn);
2830 }
2831 else
2832 {
2833 pprev = &XEXP (link, 1);
2834 link = *pprev;
2835 }
2836 break;
2837 }
2838 default:
2839 pprev = &XEXP (link, 1);
2840 link = *pprev;
2841 break;
2842 }
2843 }
2844 }
2845
2846
2847 /* Set a NOTE_TYPE note for REG in INSN. */
2848
2849 static inline void
2850 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2851 {
2852 gcc_checking_assert (!DEBUG_INSN_P (insn));
2853 add_reg_note (insn, note_type, reg);
2854 }
2855
2856 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2857 arguments. Return true if the register value described by MWS's
2858 mw_reg is known to be completely unused, and if mw_reg can therefore
2859 be used in a REG_UNUSED note. */
2860
2861 static bool
2862 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2863 bitmap live, bitmap artificial_uses)
2864 {
2865 unsigned int r;
2866
2867 /* If MWS describes a partial reference, create REG_UNUSED notes for
2868 individual hard registers. */
2869 if (mws->flags & DF_REF_PARTIAL)
2870 return false;
2871
2872 /* Likewise if some part of the register is used. */
2873 for (r = mws->start_regno; r <= mws->end_regno; r++)
2874 if (bitmap_bit_p (live, r)
2875 || bitmap_bit_p (artificial_uses, r))
2876 return false;
2877
2878 gcc_assert (REG_P (mws->mw_reg));
2879 return true;
2880 }
2881
2882
2883 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2884 based on the bits in LIVE. Do not generate notes for registers in
2885 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2886 not generated if the reg is both read and written by the
2887 instruction.
2888 */
2889
2890 static void
2891 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2892 bitmap live, bitmap do_not_gen,
2893 bitmap artificial_uses,
2894 struct dead_debug *debug)
2895 {
2896 unsigned int r;
2897
2898 if (REG_DEAD_DEBUGGING && dump_file)
2899 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
2900 mws->start_regno, mws->end_regno);
2901
2902 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
2903 {
2904 unsigned int regno = mws->start_regno;
2905 df_set_note (REG_UNUSED, insn, mws->mw_reg);
2906 dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
2907
2908 if (REG_DEAD_DEBUGGING)
2909 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2910
2911 bitmap_set_bit (do_not_gen, regno);
2912 /* Only do this if the value is totally dead. */
2913 }
2914 else
2915 for (r = mws->start_regno; r <= mws->end_regno; r++)
2916 {
2917 if (!bitmap_bit_p (live, r)
2918 && !bitmap_bit_p (artificial_uses, r))
2919 {
2920 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
2921 dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
2922 if (REG_DEAD_DEBUGGING)
2923 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
2924 }
2925 bitmap_set_bit (do_not_gen, r);
2926 }
2927 }
2928
2929
2930 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
2931 arguments. Return true if the register value described by MWS's
2932 mw_reg is known to be completely dead, and if mw_reg can therefore
2933 be used in a REG_DEAD note. */
2934
2935 static bool
2936 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
2937 bitmap live, bitmap artificial_uses,
2938 bitmap do_not_gen)
2939 {
2940 unsigned int r;
2941
2942 /* If MWS describes a partial reference, create REG_DEAD notes for
2943 individual hard registers. */
2944 if (mws->flags & DF_REF_PARTIAL)
2945 return false;
2946
2947 /* Likewise if some part of the register is not dead. */
2948 for (r = mws->start_regno; r <= mws->end_regno; r++)
2949 if (bitmap_bit_p (live, r)
2950 || bitmap_bit_p (artificial_uses, r)
2951 || bitmap_bit_p (do_not_gen, r))
2952 return false;
2953
2954 gcc_assert (REG_P (mws->mw_reg));
2955 return true;
2956 }
2957
2958 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
2959 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
2960 from being set if the instruction both reads and writes the
2961 register. */
2962
2963 static void
2964 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2965 bitmap live, bitmap do_not_gen,
2966 bitmap artificial_uses, bool *added_notes_p)
2967 {
2968 unsigned int r;
2969 bool is_debug = *added_notes_p;
2970
2971 *added_notes_p = false;
2972
2973 if (REG_DEAD_DEBUGGING && dump_file)
2974 {
2975 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
2976 mws->start_regno, mws->end_regno);
2977 df_print_regset (dump_file, do_not_gen);
2978 fprintf (dump_file, " live =");
2979 df_print_regset (dump_file, live);
2980 fprintf (dump_file, " artificial uses =");
2981 df_print_regset (dump_file, artificial_uses);
2982 }
2983
2984 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
2985 {
2986 if (is_debug)
2987 {
2988 *added_notes_p = true;
2989 return;
2990 }
2991 /* Add a dead note for the entire multi word register. */
2992 df_set_note (REG_DEAD, insn, mws->mw_reg);
2993 if (REG_DEAD_DEBUGGING)
2994 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2995 }
2996 else
2997 {
2998 for (r = mws->start_regno; r <= mws->end_regno; r++)
2999 if (!bitmap_bit_p (live, r)
3000 && !bitmap_bit_p (artificial_uses, r)
3001 && !bitmap_bit_p (do_not_gen, r))
3002 {
3003 if (is_debug)
3004 {
3005 *added_notes_p = true;
3006 return;
3007 }
3008 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
3009 if (REG_DEAD_DEBUGGING)
3010 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3011 }
3012 }
3013 return;
3014 }
3015
3016
3017 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3018 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3019
3020 static void
3021 df_create_unused_note (rtx insn, df_ref def,
3022 bitmap live, bitmap artificial_uses,
3023 struct dead_debug *debug)
3024 {
3025 unsigned int dregno = DF_REF_REGNO (def);
3026
3027 if (REG_DEAD_DEBUGGING && dump_file)
3028 {
3029 fprintf (dump_file, " regular looking at def ");
3030 df_ref_debug (def, dump_file);
3031 }
3032
3033 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3034 || bitmap_bit_p (live, dregno)
3035 || bitmap_bit_p (artificial_uses, dregno)
3036 || df_ignore_stack_reg (dregno)))
3037 {
3038 rtx reg = (DF_REF_LOC (def))
3039 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3040 df_set_note (REG_UNUSED, insn, reg);
3041 dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3042 if (REG_DEAD_DEBUGGING)
3043 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3044 }
3045
3046 return;
3047 }
3048
3049
3050 /* Initialize DEBUG to an empty list, and clear USED, if given. */
3051 void
3052 dead_debug_init (struct dead_debug *debug, bitmap used)
3053 {
3054 debug->head = NULL;
3055 debug->used = used;
3056 debug->to_rescan = NULL;
3057 if (used)
3058 bitmap_clear (used);
3059 }
3060
3061 /* Reset all debug uses in HEAD, and clear DEBUG->to_rescan bits of
3062 each reset insn. DEBUG is not otherwise modified. If HEAD is
3063 DEBUG->head, DEBUG->head will be set to NULL at the end.
3064 Otherwise, entries from DEBUG->head that pertain to reset insns
3065 will be removed, and only then rescanned. */
3066
3067 static void
3068 dead_debug_reset_uses (struct dead_debug *debug, struct dead_debug_use *head)
3069 {
3070 bool got_head = (debug->head == head);
3071 bitmap rescan;
3072 struct dead_debug_use **tailp = &debug->head;
3073 struct dead_debug_use *cur;
3074 bitmap_iterator bi;
3075 unsigned int uid;
3076
3077 if (got_head)
3078 rescan = NULL;
3079 else
3080 rescan = BITMAP_ALLOC (NULL);
3081
3082 while (head)
3083 {
3084 struct dead_debug_use *next = head->next;
3085 rtx insn;
3086
3087 insn = DF_REF_INSN (head->use);
3088 if (!next || DF_REF_INSN (next->use) != insn)
3089 {
3090 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3091 if (got_head)
3092 df_insn_rescan_debug_internal (insn);
3093 else
3094 bitmap_set_bit (rescan, INSN_UID (insn));
3095 if (debug->to_rescan)
3096 bitmap_clear_bit (debug->to_rescan, INSN_UID (insn));
3097 }
3098 XDELETE (head);
3099 head = next;
3100 }
3101
3102 if (got_head)
3103 {
3104 debug->head = NULL;
3105 return;
3106 }
3107
3108 while ((cur = *tailp))
3109 if (bitmap_bit_p (rescan, INSN_UID (DF_REF_INSN (cur->use))))
3110 {
3111 *tailp = cur->next;
3112 XDELETE (cur);
3113 }
3114 else
3115 tailp = &cur->next;
3116
3117 EXECUTE_IF_SET_IN_BITMAP (rescan, 0, uid, bi)
3118 {
3119 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
3120 if (insn_info)
3121 df_insn_rescan_debug_internal (insn_info->insn);
3122 }
3123
3124 BITMAP_FREE (rescan);
3125 }
3126
3127 /* Reset all debug insns with pending uses. Release the bitmap in it,
3128 unless it is USED. USED must be the same bitmap passed to
3129 dead_debug_init. */
3130 void
3131 dead_debug_finish (struct dead_debug *debug, bitmap used)
3132 {
3133 if (debug->used != used)
3134 BITMAP_FREE (debug->used);
3135
3136 dead_debug_reset_uses (debug, debug->head);
3137
3138 if (debug->to_rescan)
3139 {
3140 bitmap_iterator bi;
3141 unsigned int uid;
3142
3143 EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi)
3144 {
3145 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
3146 if (insn_info)
3147 df_insn_rescan (insn_info->insn);
3148 }
3149 BITMAP_FREE (debug->to_rescan);
3150 }
3151 }
3152
3153 /* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug
3154 insn. Create a bitmap for DEBUG as needed. */
3155 void
3156 dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno)
3157 {
3158 struct dead_debug_use *newddu = XNEW (struct dead_debug_use);
3159
3160 newddu->use = use;
3161 newddu->next = debug->head;
3162 debug->head = newddu;
3163
3164 if (!debug->used)
3165 debug->used = BITMAP_ALLOC (NULL);
3166
3167 /* ??? If we dealt with split multi-registers below, we should set
3168 all registers for the used mode in case of hardware
3169 registers. */
3170 bitmap_set_bit (debug->used, uregno);
3171 }
3172
3173 /* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
3174 before or after INSN (depending on WHERE), that binds a debug temp
3175 to the widest-mode use of UREGNO, if WHERE is *_WITH_REG, or the
3176 value stored in UREGNO by INSN otherwise, and replace all uses of
3177 UREGNO in DEBUG with uses of the debug temp. INSN must be where
3178 UREGNO dies, if WHERE is *_BEFORE_*, or where it is set otherwise.
3179 Return the number of debug insns emitted. */
3180 int
3181 dead_debug_insert_temp (struct dead_debug *debug, unsigned int uregno,
3182 rtx insn, enum debug_temp_where where)
3183 {
3184 struct dead_debug_use **tailp = &debug->head;
3185 struct dead_debug_use *cur;
3186 struct dead_debug_use *uses = NULL;
3187 struct dead_debug_use **usesp = &uses;
3188 rtx reg = NULL;
3189 rtx breg;
3190 rtx dval;
3191 rtx bind;
3192
3193 if (!debug->used || !bitmap_clear_bit (debug->used, uregno))
3194 return 0;
3195
3196 /* Move all uses of uregno from debug->head to uses, setting mode to
3197 the widest referenced mode. */
3198 while ((cur = *tailp))
3199 {
3200 if (DF_REF_REGNO (cur->use) == uregno)
3201 {
3202 *usesp = cur;
3203 usesp = &cur->next;
3204 *tailp = cur->next;
3205 cur->next = NULL;
3206 if (!reg
3207 || (GET_MODE_BITSIZE (GET_MODE (reg))
3208 < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use)))))
3209 reg = *DF_REF_REAL_LOC (cur->use);
3210 }
3211 else
3212 tailp = &(*tailp)->next;
3213 }
3214
3215 /* We may have dangling bits in debug->used for registers that were part
3216 of a multi-register use, one component of which has been reset. */
3217 if (reg == NULL)
3218 {
3219 gcc_checking_assert (!uses);
3220 return 0;
3221 }
3222
3223 gcc_checking_assert (uses);
3224
3225 breg = reg;
3226 /* Recover the expression INSN stores in REG. */
3227 if (where == DEBUG_TEMP_BEFORE_WITH_VALUE)
3228 {
3229 rtx set = single_set (insn);
3230 rtx dest, src;
3231
3232 if (set)
3233 {
3234 dest = SET_DEST (set);
3235 src = SET_SRC (set);
3236 /* Lose if the REG-setting insn is a CALL. */
3237 if (GET_CODE (src) == CALL)
3238 {
3239 while (uses)
3240 {
3241 cur = uses->next;
3242 XDELETE (uses);
3243 uses = cur;
3244 }
3245 return 0;
3246 }
3247 }
3248
3249 /* ??? Should we try to extract it from a PARALLEL? */
3250 if (!set)
3251 breg = NULL;
3252 /* Cool, it's the same REG, we can use SRC. */
3253 else if (dest == reg)
3254 breg = copy_rtx (src);
3255 else if (REG_P (dest))
3256 {
3257 /* Hmm... Something's fishy, we should be setting REG here. */
3258 if (REGNO (dest) != REGNO (reg))
3259 breg = NULL;
3260 /* If we're not overwriting all the hardware registers that
3261 setting REG in its mode would, we won't know what to bind
3262 the debug temp to. ??? We could bind the debug_expr to a
3263 CONCAT or PARALLEL with the split multi-registers, and
3264 replace them as we found the corresponding sets. */
3265 else if (REGNO (reg) < FIRST_PSEUDO_REGISTER
3266 && (hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]
3267 != hard_regno_nregs[REGNO (reg)][GET_MODE (dest)]))
3268 breg = NULL;
3269 /* Ok, it's the same (hardware) REG, but with a different
3270 mode, so SUBREG it. */
3271 else
3272 breg = lowpart_subreg (GET_MODE (reg), copy_rtx (src),
3273 GET_MODE (dest));
3274 }
3275 else if (GET_CODE (dest) == SUBREG)
3276 {
3277 /* We should be setting REG here. Lose. */
3278 if (REGNO (SUBREG_REG (dest)) != REGNO (reg))
3279 breg = NULL;
3280 /* Lose if we're setting something other than the lowpart of
3281 REG. */
3282 else if (!subreg_lowpart_p (dest))
3283 breg = NULL;
3284 /* If we're not overwriting all the hardware registers that
3285 setting REG in its mode would, we won't know what to bind
3286 the debug temp to. */
3287 else if (REGNO (reg) < FIRST_PSEUDO_REGISTER
3288 && (hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]
3289 != hard_regno_nregs[REGNO (reg)][GET_MODE (dest)]))
3290 breg = NULL;
3291 /* Yay, we can use SRC, just adjust its mode. */
3292 else
3293 breg = lowpart_subreg (GET_MODE (reg), copy_rtx (src),
3294 GET_MODE (dest));
3295 }
3296 /* Oh well, we're out of luck. */
3297 else
3298 breg = NULL;
3299
3300 /* We couldn't figure out the value stored in REG, so reset all
3301 of its pending debug uses. */
3302 if (!breg)
3303 {
3304 dead_debug_reset_uses (debug, uses);
3305 return 0;
3306 }
3307 }
3308
3309 /* If there's a single (debug) use of an otherwise unused REG, and
3310 the debug use is not part of a larger expression, then it
3311 probably doesn't make sense to introduce a new debug temp. */
3312 if (where == DEBUG_TEMP_AFTER_WITH_REG && !uses->next)
3313 {
3314 rtx next = DF_REF_INSN (uses->use);
3315
3316 if (DEBUG_INSN_P (next) && reg == INSN_VAR_LOCATION_LOC (next))
3317 {
3318 XDELETE (uses);
3319 return 0;
3320 }
3321 }
3322
3323 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3324 dval = make_debug_expr_from_rtl (reg);
3325
3326 /* Emit a debug bind insn before the insn in which reg dies. */
3327 bind = gen_rtx_VAR_LOCATION (GET_MODE (reg),
3328 DEBUG_EXPR_TREE_DECL (dval), breg,
3329 VAR_INIT_STATUS_INITIALIZED);
3330
3331 if (where == DEBUG_TEMP_AFTER_WITH_REG)
3332 bind = emit_debug_insn_after (bind, insn);
3333 else
3334 bind = emit_debug_insn_before (bind, insn);
3335 df_insn_rescan (bind);
3336
3337 /* Adjust all uses. */
3338 while ((cur = uses))
3339 {
3340 if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg))
3341 *DF_REF_REAL_LOC (cur->use) = dval;
3342 else
3343 *DF_REF_REAL_LOC (cur->use)
3344 = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
3345 /* ??? Should we simplify subreg of subreg? */
3346 if (debug->to_rescan == NULL)
3347 debug->to_rescan = BITMAP_ALLOC (NULL);
3348 bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use)));
3349 uses = cur->next;
3350 XDELETE (cur);
3351 }
3352
3353 return 1;
3354 }
3355
3356 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3357 info: lifetime, bb, and number of defs and uses for basic block
3358 BB. The three bitvectors are scratch regs used here. */
3359
3360 static void
3361 df_note_bb_compute (unsigned int bb_index,
3362 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3363 {
3364 basic_block bb = BASIC_BLOCK (bb_index);
3365 rtx insn;
3366 df_ref *def_rec;
3367 df_ref *use_rec;
3368 struct dead_debug debug;
3369
3370 dead_debug_init (&debug, NULL);
3371
3372 bitmap_copy (live, df_get_live_out (bb));
3373 bitmap_clear (artificial_uses);
3374
3375 if (REG_DEAD_DEBUGGING && dump_file)
3376 {
3377 fprintf (dump_file, "live at bottom ");
3378 df_print_regset (dump_file, live);
3379 }
3380
3381 /* Process the artificial defs and uses at the bottom of the block
3382 to begin processing. */
3383 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3384 {
3385 df_ref def = *def_rec;
3386
3387 if (REG_DEAD_DEBUGGING && dump_file)
3388 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3389
3390 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3391 bitmap_clear_bit (live, DF_REF_REGNO (def));
3392 }
3393
3394 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3395 {
3396 df_ref use = *use_rec;
3397 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3398 {
3399 unsigned int regno = DF_REF_REGNO (use);
3400 bitmap_set_bit (live, regno);
3401
3402 /* Notes are not generated for any of the artificial registers
3403 at the bottom of the block. */
3404 bitmap_set_bit (artificial_uses, regno);
3405 }
3406 }
3407
3408 if (REG_DEAD_DEBUGGING && dump_file)
3409 {
3410 fprintf (dump_file, "live before artificials out ");
3411 df_print_regset (dump_file, live);
3412 }
3413
3414 FOR_BB_INSNS_REVERSE (bb, insn)
3415 {
3416 unsigned int uid = INSN_UID (insn);
3417 struct df_mw_hardreg **mws_rec;
3418 int debug_insn;
3419
3420 if (!INSN_P (insn))
3421 continue;
3422
3423 debug_insn = DEBUG_INSN_P (insn);
3424
3425 bitmap_clear (do_not_gen);
3426 df_kill_notes (insn, live);
3427
3428 /* Process the defs. */
3429 if (CALL_P (insn))
3430 {
3431 if (REG_DEAD_DEBUGGING && dump_file)
3432 {
3433 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3434 df_print_regset (dump_file, live);
3435 }
3436
3437 /* We only care about real sets for calls. Clobbers cannot
3438 be depended on to really die. */
3439 mws_rec = DF_INSN_UID_MWS (uid);
3440 while (*mws_rec)
3441 {
3442 struct df_mw_hardreg *mws = *mws_rec;
3443 if ((DF_MWS_REG_DEF_P (mws))
3444 && !df_ignore_stack_reg (mws->start_regno))
3445 df_set_unused_notes_for_mw (insn,
3446 mws, live, do_not_gen,
3447 artificial_uses, &debug);
3448 mws_rec++;
3449 }
3450
3451 /* All of the defs except the return value are some sort of
3452 clobber. This code is for the return. */
3453 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3454 {
3455 df_ref def = *def_rec;
3456 unsigned int dregno = DF_REF_REGNO (def);
3457 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3458 {
3459 df_create_unused_note (insn,
3460 def, live, artificial_uses, &debug);
3461 bitmap_set_bit (do_not_gen, dregno);
3462 }
3463
3464 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3465 bitmap_clear_bit (live, dregno);
3466 }
3467 }
3468 else
3469 {
3470 /* Regular insn. */
3471 mws_rec = DF_INSN_UID_MWS (uid);
3472 while (*mws_rec)
3473 {
3474 struct df_mw_hardreg *mws = *mws_rec;
3475 if (DF_MWS_REG_DEF_P (mws))
3476 df_set_unused_notes_for_mw (insn,
3477 mws, live, do_not_gen,
3478 artificial_uses, &debug);
3479 mws_rec++;
3480 }
3481
3482 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3483 {
3484 df_ref def = *def_rec;
3485 unsigned int dregno = DF_REF_REGNO (def);
3486 df_create_unused_note (insn,
3487 def, live, artificial_uses, &debug);
3488
3489 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3490 bitmap_set_bit (do_not_gen, dregno);
3491
3492 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3493 bitmap_clear_bit (live, dregno);
3494 }
3495 }
3496
3497 /* Process the uses. */
3498 mws_rec = DF_INSN_UID_MWS (uid);
3499 while (*mws_rec)
3500 {
3501 struct df_mw_hardreg *mws = *mws_rec;
3502 if (DF_MWS_REG_USE_P (mws)
3503 && !df_ignore_stack_reg (mws->start_regno))
3504 {
3505 bool really_add_notes = debug_insn != 0;
3506
3507 df_set_dead_notes_for_mw (insn,
3508 mws, live, do_not_gen,
3509 artificial_uses,
3510 &really_add_notes);
3511
3512 if (really_add_notes)
3513 debug_insn = -1;
3514 }
3515 mws_rec++;
3516 }
3517
3518 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3519 {
3520 df_ref use = *use_rec;
3521 unsigned int uregno = DF_REF_REGNO (use);
3522
3523 if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
3524 {
3525 fprintf (dump_file, " regular looking at use ");
3526 df_ref_debug (use, dump_file);
3527 }
3528
3529 if (!bitmap_bit_p (live, uregno))
3530 {
3531 if (debug_insn)
3532 {
3533 if (debug_insn > 0)
3534 {
3535 /* We won't add REG_UNUSED or REG_DEAD notes for
3536 these, so we don't have to mess with them in
3537 debug insns either. */
3538 if (!bitmap_bit_p (artificial_uses, uregno)
3539 && !df_ignore_stack_reg (uregno))
3540 dead_debug_add (&debug, use, uregno);
3541 continue;
3542 }
3543 break;
3544 }
3545 else
3546 dead_debug_insert_temp (&debug, uregno, insn,
3547 DEBUG_TEMP_BEFORE_WITH_REG);
3548
3549 if ( (!(DF_REF_FLAGS (use)
3550 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3551 && (!bitmap_bit_p (do_not_gen, uregno))
3552 && (!bitmap_bit_p (artificial_uses, uregno))
3553 && (!df_ignore_stack_reg (uregno)))
3554 {
3555 rtx reg = (DF_REF_LOC (use))
3556 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3557 df_set_note (REG_DEAD, insn, reg);
3558
3559 if (REG_DEAD_DEBUGGING)
3560 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3561 }
3562 /* This register is now live. */
3563 bitmap_set_bit (live, uregno);
3564 }
3565 }
3566
3567 if (debug_insn == -1)
3568 {
3569 /* ??? We could probably do better here, replacing dead
3570 registers with their definitions. */
3571 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3572 df_insn_rescan_debug_internal (insn);
3573 }
3574 }
3575
3576 dead_debug_finish (&debug, NULL);
3577 }
3578
3579
3580 /* Compute register info: lifetime, bb, and number of defs and uses. */
3581 static void
3582 df_note_compute (bitmap all_blocks)
3583 {
3584 unsigned int bb_index;
3585 bitmap_iterator bi;
3586 bitmap_head live, do_not_gen, artificial_uses;
3587
3588 bitmap_initialize (&live, &df_bitmap_obstack);
3589 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3590 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3591
3592 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3593 {
3594 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3595 }
3596
3597 bitmap_clear (&live);
3598 bitmap_clear (&do_not_gen);
3599 bitmap_clear (&artificial_uses);
3600 }
3601
3602
3603 /* Free all storage associated with the problem. */
3604
3605 static void
3606 df_note_free (void)
3607 {
3608 free (df_note);
3609 }
3610
3611
3612 /* All of the information associated every instance of the problem. */
3613
3614 static struct df_problem problem_NOTE =
3615 {
3616 DF_NOTE, /* Problem id. */
3617 DF_NONE, /* Direction. */
3618 df_note_alloc, /* Allocate the problem specific data. */
3619 NULL, /* Reset global information. */
3620 NULL, /* Free basic block info. */
3621 df_note_compute, /* Local compute function. */
3622 NULL, /* Init the solution specific data. */
3623 NULL, /* Iterative solver. */
3624 NULL, /* Confluence operator 0. */
3625 NULL, /* Confluence operator n. */
3626 NULL, /* Transfer function. */
3627 NULL, /* Finalize function. */
3628 df_note_free, /* Free all of the problem information. */
3629 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3630 NULL, /* Debugging. */
3631 NULL, /* Debugging start block. */
3632 NULL, /* Debugging end block. */
3633 NULL, /* Incremental solution verify start. */
3634 NULL, /* Incremental solution verify end. */
3635 &problem_LR, /* Dependent problem. */
3636 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3637 TV_DF_NOTE, /* Timing variable. */
3638 false /* Reset blocks on dropping out of blocks_to_analyze. */
3639 };
3640
3641
3642 /* Create a new DATAFLOW instance and add it to an existing instance
3643 of DF. The returned structure is what is used to get at the
3644 solution. */
3645
3646 void
3647 df_note_add_problem (void)
3648 {
3649 df_add_problem (&problem_NOTE);
3650 }
3651
3652
3653
3654 \f
3655 /*----------------------------------------------------------------------------
3656 Functions for simulating the effects of single insns.
3657
3658 You can either simulate in the forwards direction, starting from
3659 the top of a block or the backwards direction from the end of the
3660 block. If you go backwards, defs are examined first to clear bits,
3661 then uses are examined to set bits. If you go forwards, defs are
3662 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3663 are examined to clear bits. In either case, the result of examining
3664 a def can be undone (respectively by a use or a REG_UNUSED note).
3665
3666 If you start at the top of the block, use one of DF_LIVE_IN or
3667 DF_LR_IN. If you start at the bottom of the block use one of
3668 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3669 THEY WILL BE DESTROYED.
3670 ----------------------------------------------------------------------------*/
3671
3672
3673 /* Find the set of DEFs for INSN. */
3674
3675 void
3676 df_simulate_find_defs (rtx insn, bitmap defs)
3677 {
3678 df_ref *def_rec;
3679 unsigned int uid = INSN_UID (insn);
3680
3681 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3682 {
3683 df_ref def = *def_rec;
3684 bitmap_set_bit (defs, DF_REF_REGNO (def));
3685 }
3686 }
3687
3688 /* Find the set of uses for INSN. This includes partial defs. */
3689
3690 static void
3691 df_simulate_find_uses (rtx insn, bitmap uses)
3692 {
3693 df_ref *rec;
3694 unsigned int uid = INSN_UID (insn);
3695
3696 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
3697 {
3698 df_ref def = *rec;
3699 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3700 bitmap_set_bit (uses, DF_REF_REGNO (def));
3701 }
3702 for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
3703 {
3704 df_ref use = *rec;
3705 bitmap_set_bit (uses, DF_REF_REGNO (use));
3706 }
3707 }
3708
3709 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3710
3711 void
3712 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3713 {
3714 df_ref *def_rec;
3715 unsigned int uid = INSN_UID (insn);
3716
3717 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3718 {
3719 df_ref def = *def_rec;
3720 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3721 bitmap_set_bit (defs, DF_REF_REGNO (def));
3722 }
3723 }
3724
3725
3726 /* Simulate the effects of the defs of INSN on LIVE. */
3727
3728 void
3729 df_simulate_defs (rtx insn, bitmap live)
3730 {
3731 df_ref *def_rec;
3732 unsigned int uid = INSN_UID (insn);
3733
3734 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3735 {
3736 df_ref def = *def_rec;
3737 unsigned int dregno = DF_REF_REGNO (def);
3738
3739 /* If the def is to only part of the reg, it does
3740 not kill the other defs that reach here. */
3741 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3742 bitmap_clear_bit (live, dregno);
3743 }
3744 }
3745
3746
3747 /* Simulate the effects of the uses of INSN on LIVE. */
3748
3749 void
3750 df_simulate_uses (rtx insn, bitmap live)
3751 {
3752 df_ref *use_rec;
3753 unsigned int uid = INSN_UID (insn);
3754
3755 if (DEBUG_INSN_P (insn))
3756 return;
3757
3758 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3759 {
3760 df_ref use = *use_rec;
3761 /* Add use to set of uses in this BB. */
3762 bitmap_set_bit (live, DF_REF_REGNO (use));
3763 }
3764 }
3765
3766
3767 /* Add back the always live regs in BB to LIVE. */
3768
3769 static inline void
3770 df_simulate_fixup_sets (basic_block bb, bitmap live)
3771 {
3772 /* These regs are considered always live so if they end up dying
3773 because of some def, we need to bring the back again. */
3774 if (bb_has_eh_pred (bb))
3775 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3776 else
3777 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3778 }
3779
3780
3781 /*----------------------------------------------------------------------------
3782 The following three functions are used only for BACKWARDS scanning:
3783 i.e. they process the defs before the uses.
3784
3785 df_simulate_initialize_backwards should be called first with a
3786 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3787 df_simulate_one_insn_backwards should be called for each insn in
3788 the block, starting with the last one. Finally,
3789 df_simulate_finalize_backwards can be called to get a new value
3790 of the sets at the top of the block (this is rarely used).
3791 ----------------------------------------------------------------------------*/
3792
3793 /* Apply the artificial uses and defs at the end of BB in a backwards
3794 direction. */
3795
3796 void
3797 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3798 {
3799 df_ref *def_rec;
3800 df_ref *use_rec;
3801 int bb_index = bb->index;
3802
3803 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3804 {
3805 df_ref def = *def_rec;
3806 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3807 bitmap_clear_bit (live, DF_REF_REGNO (def));
3808 }
3809
3810 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3811 {
3812 df_ref use = *use_rec;
3813 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3814 bitmap_set_bit (live, DF_REF_REGNO (use));
3815 }
3816 }
3817
3818
3819 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3820
3821 void
3822 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3823 {
3824 if (!NONDEBUG_INSN_P (insn))
3825 return;
3826
3827 df_simulate_defs (insn, live);
3828 df_simulate_uses (insn, live);
3829 df_simulate_fixup_sets (bb, live);
3830 }
3831
3832
3833 /* Apply the artificial uses and defs at the top of BB in a backwards
3834 direction. */
3835
3836 void
3837 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3838 {
3839 df_ref *def_rec;
3840 #ifdef EH_USES
3841 df_ref *use_rec;
3842 #endif
3843 int bb_index = bb->index;
3844
3845 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3846 {
3847 df_ref def = *def_rec;
3848 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3849 bitmap_clear_bit (live, DF_REF_REGNO (def));
3850 }
3851
3852 #ifdef EH_USES
3853 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3854 {
3855 df_ref use = *use_rec;
3856 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3857 bitmap_set_bit (live, DF_REF_REGNO (use));
3858 }
3859 #endif
3860 }
3861 /*----------------------------------------------------------------------------
3862 The following three functions are used only for FORWARDS scanning:
3863 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3864 Thus it is important to add the DF_NOTES problem to the stack of
3865 problems computed before using these functions.
3866
3867 df_simulate_initialize_forwards should be called first with a
3868 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3869 df_simulate_one_insn_forwards should be called for each insn in
3870 the block, starting with the first one.
3871 ----------------------------------------------------------------------------*/
3872
3873 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3874 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3875 defs live. */
3876
3877 void
3878 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3879 {
3880 df_ref *def_rec;
3881 int bb_index = bb->index;
3882
3883 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3884 {
3885 df_ref def = *def_rec;
3886 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3887 bitmap_set_bit (live, DF_REF_REGNO (def));
3888 }
3889 }
3890
3891 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3892
3893 void
3894 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3895 {
3896 rtx link;
3897 if (! INSN_P (insn))
3898 return;
3899
3900 /* Make sure that DF_NOTE really is an active df problem. */
3901 gcc_assert (df_note);
3902
3903 /* Note that this is the opposite as how the problem is defined, because
3904 in the LR problem defs _kill_ liveness. However, they do so backwards,
3905 while here the scan is performed forwards! So, first assume that the
3906 def is live, and if this is not true REG_UNUSED notes will rectify the
3907 situation. */
3908 df_simulate_find_noclobber_defs (insn, live);
3909
3910 /* Clear all of the registers that go dead. */
3911 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3912 {
3913 switch (REG_NOTE_KIND (link))
3914 {
3915 case REG_DEAD:
3916 case REG_UNUSED:
3917 {
3918 rtx reg = XEXP (link, 0);
3919 int regno = REGNO (reg);
3920 if (HARD_REGISTER_NUM_P (regno))
3921 bitmap_clear_range (live, regno,
3922 hard_regno_nregs[regno][GET_MODE (reg)]);
3923 else
3924 bitmap_clear_bit (live, regno);
3925 }
3926 break;
3927 default:
3928 break;
3929 }
3930 }
3931 df_simulate_fixup_sets (bb, live);
3932 }
3933 \f
3934 /* Used by the next two functions to encode information about the
3935 memory references we found. */
3936 #define MEMREF_NORMAL 1
3937 #define MEMREF_VOLATILE 2
3938
3939 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3940 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3941
3942 static int
3943 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3944 {
3945 rtx x = *px;
3946
3947 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3948 return MEMREF_VOLATILE;
3949
3950 if (!MEM_P (x))
3951 return 0;
3952 if (MEM_VOLATILE_P (x))
3953 return MEMREF_VOLATILE;
3954 if (MEM_READONLY_P (x))
3955 return 0;
3956
3957 return MEMREF_NORMAL;
3958 }
3959
3960 /* A subroutine of can_move_insns_across_p called through note_stores.
3961 DATA points to an integer in which we set either the bit for
3962 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3963 of either kind. */
3964
3965 static void
3966 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3967 void *data ATTRIBUTE_UNUSED)
3968 {
3969 int *pflags = (int *)data;
3970 if (GET_CODE (x) == SUBREG)
3971 x = XEXP (x, 0);
3972 /* Treat stores to SP as stores to memory, this will prevent problems
3973 when there are references to the stack frame. */
3974 if (x == stack_pointer_rtx)
3975 *pflags |= MEMREF_VOLATILE;
3976 if (!MEM_P (x))
3977 return;
3978 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3979 }
3980
3981 /* Scan BB backwards, using df_simulate functions to keep track of
3982 lifetimes, up to insn POINT. The result is stored in LIVE. */
3983
3984 void
3985 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
3986 {
3987 rtx insn;
3988 bitmap_copy (live, df_get_live_out (bb));
3989 df_simulate_initialize_backwards (bb, live);
3990
3991 /* Scan and update life information until we reach the point we're
3992 interested in. */
3993 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
3994 df_simulate_one_insn_backwards (bb, insn, live);
3995 }
3996
3997 /* Return true if it is safe to move a group of insns, described by
3998 the range FROM to TO, backwards across another group of insns,
3999 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
4000 are no insns between ACROSS_TO and FROM, but they may be in
4001 different basic blocks; MERGE_BB is the block from which the
4002 insns will be moved. The caller must pass in a regset MERGE_LIVE
4003 which specifies the registers live after TO.
4004
4005 This function may be called in one of two cases: either we try to
4006 move identical instructions from all successor blocks into their
4007 predecessor, or we try to move from only one successor block. If
4008 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
4009 the second case. It should contain a set of registers live at the
4010 end of ACROSS_TO which must not be clobbered by moving the insns.
4011 In that case, we're also more careful about moving memory references
4012 and trapping insns.
4013
4014 We return false if it is not safe to move the entire group, but it
4015 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
4016 is set to point at the last moveable insn in such a case. */
4017
4018 bool
4019 can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
4020 basic_block merge_bb, regset merge_live,
4021 regset other_branch_live, rtx *pmove_upto)
4022 {
4023 rtx insn, next, max_to;
4024 bitmap merge_set, merge_use, local_merge_live;
4025 bitmap test_set, test_use;
4026 unsigned i, fail = 0;
4027 bitmap_iterator bi;
4028 int memrefs_in_across = 0;
4029 int mem_sets_in_across = 0;
4030 bool trapping_insns_in_across = false;
4031
4032 if (pmove_upto != NULL)
4033 *pmove_upto = NULL_RTX;
4034
4035 /* Find real bounds, ignoring debug insns. */
4036 while (!NONDEBUG_INSN_P (from) && from != to)
4037 from = NEXT_INSN (from);
4038 while (!NONDEBUG_INSN_P (to) && from != to)
4039 to = PREV_INSN (to);
4040
4041 for (insn = across_to; ; insn = next)
4042 {
4043 if (CALL_P (insn))
4044 {
4045 if (RTL_CONST_OR_PURE_CALL_P (insn))
4046 /* Pure functions can read from memory. Const functions can
4047 read from arguments that the ABI has forced onto the stack.
4048 Neither sort of read can be volatile. */
4049 memrefs_in_across |= MEMREF_NORMAL;
4050 else
4051 {
4052 memrefs_in_across |= MEMREF_VOLATILE;
4053 mem_sets_in_across |= MEMREF_VOLATILE;
4054 }
4055 }
4056 if (NONDEBUG_INSN_P (insn))
4057 {
4058 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
4059 NULL);
4060 note_stores (PATTERN (insn), find_memory_stores,
4061 &mem_sets_in_across);
4062 /* This is used just to find sets of the stack pointer. */
4063 memrefs_in_across |= mem_sets_in_across;
4064 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
4065 }
4066 next = PREV_INSN (insn);
4067 if (insn == across_from)
4068 break;
4069 }
4070
4071 /* Collect:
4072 MERGE_SET = set of registers set in MERGE_BB
4073 MERGE_USE = set of registers used in MERGE_BB and live at its top
4074 MERGE_LIVE = set of registers live at the point inside the MERGE
4075 range that we've reached during scanning
4076 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
4077 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
4078 and live before ACROSS_FROM. */
4079
4080 merge_set = BITMAP_ALLOC (&reg_obstack);
4081 merge_use = BITMAP_ALLOC (&reg_obstack);
4082 local_merge_live = BITMAP_ALLOC (&reg_obstack);
4083 test_set = BITMAP_ALLOC (&reg_obstack);
4084 test_use = BITMAP_ALLOC (&reg_obstack);
4085
4086 /* Compute the set of registers set and used in the ACROSS range. */
4087 if (other_branch_live != NULL)
4088 bitmap_copy (test_use, other_branch_live);
4089 df_simulate_initialize_backwards (merge_bb, test_use);
4090 for (insn = across_to; ; insn = next)
4091 {
4092 if (NONDEBUG_INSN_P (insn))
4093 {
4094 df_simulate_find_defs (insn, test_set);
4095 df_simulate_defs (insn, test_use);
4096 df_simulate_uses (insn, test_use);
4097 }
4098 next = PREV_INSN (insn);
4099 if (insn == across_from)
4100 break;
4101 }
4102
4103 /* Compute an upper bound for the amount of insns moved, by finding
4104 the first insn in MERGE that sets a register in TEST_USE, or uses
4105 a register in TEST_SET. We also check for calls, trapping operations,
4106 and memory references. */
4107 max_to = NULL_RTX;
4108 for (insn = from; ; insn = next)
4109 {
4110 if (CALL_P (insn))
4111 break;
4112 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
4113 break;
4114 if (NONDEBUG_INSN_P (insn))
4115 {
4116 if (may_trap_or_fault_p (PATTERN (insn))
4117 && (trapping_insns_in_across || other_branch_live != NULL))
4118 break;
4119
4120 /* We cannot move memory stores past each other, or move memory
4121 reads past stores, at least not without tracking them and
4122 calling true_dependence on every pair.
4123
4124 If there is no other branch and no memory references or
4125 sets in the ACROSS range, we can move memory references
4126 freely, even volatile ones.
4127
4128 Otherwise, the rules are as follows: volatile memory
4129 references and stores can't be moved at all, and any type
4130 of memory reference can't be moved if there are volatile
4131 accesses or stores in the ACROSS range. That leaves
4132 normal reads, which can be moved, as the trapping case is
4133 dealt with elsewhere. */
4134 if (other_branch_live != NULL || memrefs_in_across != 0)
4135 {
4136 int mem_ref_flags = 0;
4137 int mem_set_flags = 0;
4138 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
4139 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
4140 NULL);
4141 /* Catch sets of the stack pointer. */
4142 mem_ref_flags |= mem_set_flags;
4143
4144 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
4145 break;
4146 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
4147 break;
4148 if (mem_set_flags != 0
4149 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
4150 break;
4151 }
4152 df_simulate_find_uses (insn, merge_use);
4153 /* We're only interested in uses which use a value live at
4154 the top, not one previously set in this block. */
4155 bitmap_and_compl_into (merge_use, merge_set);
4156 df_simulate_find_defs (insn, merge_set);
4157 if (bitmap_intersect_p (merge_set, test_use)
4158 || bitmap_intersect_p (merge_use, test_set))
4159 break;
4160 #ifdef HAVE_cc0
4161 if (!sets_cc0_p (insn))
4162 #endif
4163 max_to = insn;
4164 }
4165 next = NEXT_INSN (insn);
4166 if (insn == to)
4167 break;
4168 }
4169 if (max_to != to)
4170 fail = 1;
4171
4172 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
4173 goto out;
4174
4175 /* Now, lower this upper bound by also taking into account that
4176 a range of insns moved across ACROSS must not leave a register
4177 live at the end that will be clobbered in ACROSS. We need to
4178 find a point where TEST_SET & LIVE == 0.
4179
4180 Insns in the MERGE range that set registers which are also set
4181 in the ACROSS range may still be moved as long as we also move
4182 later insns which use the results of the set, and make the
4183 register dead again. This is verified by the condition stated
4184 above. We only need to test it for registers that are set in
4185 the moved region.
4186
4187 MERGE_LIVE is provided by the caller and holds live registers after
4188 TO. */
4189 bitmap_copy (local_merge_live, merge_live);
4190 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
4191 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
4192
4193 /* We're not interested in registers that aren't set in the moved
4194 region at all. */
4195 bitmap_and_into (local_merge_live, merge_set);
4196 for (;;)
4197 {
4198 if (NONDEBUG_INSN_P (insn))
4199 {
4200 if (!bitmap_intersect_p (test_set, local_merge_live)
4201 #ifdef HAVE_cc0
4202 && !sets_cc0_p (insn)
4203 #endif
4204 )
4205 {
4206 max_to = insn;
4207 break;
4208 }
4209
4210 df_simulate_one_insn_backwards (merge_bb, insn,
4211 local_merge_live);
4212 }
4213 if (insn == from)
4214 {
4215 fail = 1;
4216 goto out;
4217 }
4218 insn = PREV_INSN (insn);
4219 }
4220
4221 if (max_to != to)
4222 fail = 1;
4223
4224 if (pmove_upto)
4225 *pmove_upto = max_to;
4226
4227 /* For small register class machines, don't lengthen lifetimes of
4228 hard registers before reload. */
4229 if (! reload_completed
4230 && targetm.small_register_classes_for_mode_p (VOIDmode))
4231 {
4232 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4233 {
4234 if (i < FIRST_PSEUDO_REGISTER
4235 && ! fixed_regs[i]
4236 && ! global_regs[i])
4237 fail = 1;
4238 }
4239 }
4240
4241 out:
4242 BITMAP_FREE (merge_set);
4243 BITMAP_FREE (merge_use);
4244 BITMAP_FREE (local_merge_live);
4245 BITMAP_FREE (test_set);
4246 BITMAP_FREE (test_use);
4247
4248 return !fail;
4249 }
4250
4251 \f
4252 /*----------------------------------------------------------------------------
4253 MULTIPLE DEFINITIONS
4254
4255 Find the locations in the function reached by multiple definition sites
4256 for a live pseudo. In and out bitvectors are built for each basic
4257 block. They are restricted for efficiency to live registers.
4258
4259 The gen and kill sets for the problem are obvious. Together they
4260 include all defined registers in a basic block; the gen set includes
4261 registers where a partial or conditional or may-clobber definition is
4262 last in the BB, while the kill set includes registers with a complete
4263 definition coming last. However, the computation of the dataflow
4264 itself is interesting.
4265
4266 The idea behind it comes from SSA form's iterated dominance frontier
4267 criterion for inserting PHI functions. Just like in that case, we can use
4268 the dominance frontier to find places where multiple definitions meet;
4269 a register X defined in a basic block BB1 has multiple definitions in
4270 basic blocks in BB1's dominance frontier.
4271
4272 So, the in-set of a basic block BB2 is not just the union of the
4273 out-sets of BB2's predecessors, but includes some more bits that come
4274 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4275 the previous paragraph). I called this set the init-set of BB2.
4276
4277 (Note: I actually use the kill-set only to build the init-set.
4278 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4279
4280 For example, if you have
4281
4282 BB1 : r10 = 0
4283 r11 = 0
4284 if <...> goto BB2 else goto BB3;
4285
4286 BB2 : r10 = 1
4287 r12 = 1
4288 goto BB3;
4289
4290 BB3 :
4291
4292 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4293 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4294 not need to iterate the dominance frontier, because we do not insert
4295 anything like PHI functions there! Instead, dataflow will take care of
4296 propagating the information to BB3's successors.
4297 ---------------------------------------------------------------------------*/
4298
4299 /* Private data used to verify the solution for this problem. */
4300 struct df_md_problem_data
4301 {
4302 /* An obstack for the bitmaps we need for this problem. */
4303 bitmap_obstack md_bitmaps;
4304 };
4305
4306 /* Scratch var used by transfer functions. This is used to do md analysis
4307 only for live registers. */
4308 static bitmap_head df_md_scratch;
4309
4310
4311 static void
4312 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4313 void *vbb_info)
4314 {
4315 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4316 if (bb_info)
4317 {
4318 bitmap_clear (&bb_info->kill);
4319 bitmap_clear (&bb_info->gen);
4320 bitmap_clear (&bb_info->init);
4321 bitmap_clear (&bb_info->in);
4322 bitmap_clear (&bb_info->out);
4323 }
4324 }
4325
4326
4327 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4328 not touched unless the block is new. */
4329
4330 static void
4331 df_md_alloc (bitmap all_blocks)
4332 {
4333 unsigned int bb_index;
4334 bitmap_iterator bi;
4335 struct df_md_problem_data *problem_data;
4336
4337 df_grow_bb_info (df_md);
4338 if (df_md->problem_data)
4339 problem_data = (struct df_md_problem_data *) df_md->problem_data;
4340 else
4341 {
4342 problem_data = XNEW (struct df_md_problem_data);
4343 df_md->problem_data = problem_data;
4344 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4345 }
4346 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4347
4348 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4349 {
4350 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4351 /* When bitmaps are already initialized, just clear them. */
4352 if (bb_info->init.obstack)
4353 {
4354 bitmap_clear (&bb_info->init);
4355 bitmap_clear (&bb_info->gen);
4356 bitmap_clear (&bb_info->kill);
4357 bitmap_clear (&bb_info->in);
4358 bitmap_clear (&bb_info->out);
4359 }
4360 else
4361 {
4362 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4363 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4364 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4365 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4366 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4367 }
4368 }
4369
4370 df_md->optional_p = true;
4371 }
4372
4373 /* Add the effect of the top artificial defs of BB to the multiple definitions
4374 bitmap LOCAL_MD. */
4375
4376 void
4377 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4378 {
4379 int bb_index = bb->index;
4380 df_ref *def_rec;
4381 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4382 {
4383 df_ref def = *def_rec;
4384 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4385 {
4386 unsigned int dregno = DF_REF_REGNO (def);
4387 if (DF_REF_FLAGS (def)
4388 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4389 bitmap_set_bit (local_md, dregno);
4390 else
4391 bitmap_clear_bit (local_md, dregno);
4392 }
4393 }
4394 }
4395
4396
4397 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4398 LOCAL_MD. */
4399
4400 void
4401 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4402 bitmap local_md)
4403 {
4404 unsigned uid = INSN_UID (insn);
4405 df_ref *def_rec;
4406
4407 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4408 {
4409 df_ref def = *def_rec;
4410 unsigned int dregno = DF_REF_REGNO (def);
4411 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4412 || (dregno >= FIRST_PSEUDO_REGISTER))
4413 {
4414 if (DF_REF_FLAGS (def)
4415 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4416 bitmap_set_bit (local_md, DF_REF_ID (def));
4417 else
4418 bitmap_clear_bit (local_md, DF_REF_ID (def));
4419 }
4420 }
4421 }
4422
4423 static void
4424 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4425 df_ref *def_rec,
4426 int top_flag)
4427 {
4428 df_ref def;
4429 bitmap_clear (&seen_in_insn);
4430
4431 while ((def = *def_rec++) != NULL)
4432 {
4433 unsigned int dregno = DF_REF_REGNO (def);
4434 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4435 || (dregno >= FIRST_PSEUDO_REGISTER))
4436 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4437 {
4438 if (!bitmap_bit_p (&seen_in_insn, dregno))
4439 {
4440 if (DF_REF_FLAGS (def)
4441 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4442 {
4443 bitmap_set_bit (&bb_info->gen, dregno);
4444 bitmap_clear_bit (&bb_info->kill, dregno);
4445 }
4446 else
4447 {
4448 /* When we find a clobber and a regular def,
4449 make sure the regular def wins. */
4450 bitmap_set_bit (&seen_in_insn, dregno);
4451 bitmap_set_bit (&bb_info->kill, dregno);
4452 bitmap_clear_bit (&bb_info->gen, dregno);
4453 }
4454 }
4455 }
4456 }
4457 }
4458
4459
4460 /* Compute local multiple def info for basic block BB. */
4461
4462 static void
4463 df_md_bb_local_compute (unsigned int bb_index)
4464 {
4465 basic_block bb = BASIC_BLOCK (bb_index);
4466 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4467 rtx insn;
4468
4469 /* Artificials are only hard regs. */
4470 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4471 df_md_bb_local_compute_process_def (bb_info,
4472 df_get_artificial_defs (bb_index),
4473 DF_REF_AT_TOP);
4474
4475 FOR_BB_INSNS (bb, insn)
4476 {
4477 unsigned int uid = INSN_UID (insn);
4478 if (!INSN_P (insn))
4479 continue;
4480
4481 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4482 }
4483
4484 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4485 df_md_bb_local_compute_process_def (bb_info,
4486 df_get_artificial_defs (bb_index),
4487 0);
4488 }
4489
4490 /* Compute local reaching def info for each basic block within BLOCKS. */
4491
4492 static void
4493 df_md_local_compute (bitmap all_blocks)
4494 {
4495 unsigned int bb_index, df_bb_index;
4496 bitmap_iterator bi1, bi2;
4497 basic_block bb;
4498 bitmap_head *frontiers;
4499
4500 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4501
4502 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4503 {
4504 df_md_bb_local_compute (bb_index);
4505 }
4506
4507 bitmap_clear (&seen_in_insn);
4508
4509 frontiers = XNEWVEC (bitmap_head, last_basic_block);
4510 FOR_ALL_BB (bb)
4511 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4512
4513 compute_dominance_frontiers (frontiers);
4514
4515 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4516 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4517 {
4518 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4519 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4520 {
4521 basic_block bb = BASIC_BLOCK (df_bb_index);
4522 if (bitmap_bit_p (all_blocks, df_bb_index))
4523 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4524 df_get_live_in (bb));
4525 }
4526 }
4527
4528 FOR_ALL_BB (bb)
4529 bitmap_clear (&frontiers[bb->index]);
4530 free (frontiers);
4531 }
4532
4533
4534 /* Reset the global solution for recalculation. */
4535
4536 static void
4537 df_md_reset (bitmap all_blocks)
4538 {
4539 unsigned int bb_index;
4540 bitmap_iterator bi;
4541
4542 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4543 {
4544 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4545 gcc_assert (bb_info);
4546 bitmap_clear (&bb_info->in);
4547 bitmap_clear (&bb_info->out);
4548 }
4549 }
4550
4551 static bool
4552 df_md_transfer_function (int bb_index)
4553 {
4554 basic_block bb = BASIC_BLOCK (bb_index);
4555 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4556 bitmap in = &bb_info->in;
4557 bitmap out = &bb_info->out;
4558 bitmap gen = &bb_info->gen;
4559 bitmap kill = &bb_info->kill;
4560
4561 /* We need to use a scratch set here so that the value returned from this
4562 function invocation properly reflects whether the sets changed in a
4563 significant way; i.e. not just because the live set was anded in. */
4564 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4565
4566 /* Multiple definitions of a register are not relevant if it is not
4567 live. Thus we trim the result to the places where it is live. */
4568 bitmap_and_into (in, df_get_live_in (bb));
4569
4570 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4571 }
4572
4573 /* Initialize the solution bit vectors for problem. */
4574
4575 static void
4576 df_md_init (bitmap all_blocks)
4577 {
4578 unsigned int bb_index;
4579 bitmap_iterator bi;
4580
4581 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4582 {
4583 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4584
4585 bitmap_copy (&bb_info->in, &bb_info->init);
4586 df_md_transfer_function (bb_index);
4587 }
4588 }
4589
4590 static void
4591 df_md_confluence_0 (basic_block bb)
4592 {
4593 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4594 bitmap_copy (&bb_info->in, &bb_info->init);
4595 }
4596
4597 /* In of target gets or of out of source. */
4598
4599 static bool
4600 df_md_confluence_n (edge e)
4601 {
4602 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4603 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4604
4605 if (e->flags & EDGE_FAKE)
4606 return false;
4607
4608 if (e->flags & EDGE_EH)
4609 return bitmap_ior_and_compl_into (op1, op2,
4610 regs_invalidated_by_call_regset);
4611 else
4612 return bitmap_ior_into (op1, op2);
4613 }
4614
4615 /* Free all storage associated with the problem. */
4616
4617 static void
4618 df_md_free (void)
4619 {
4620 struct df_md_problem_data *problem_data
4621 = (struct df_md_problem_data *) df_md->problem_data;
4622
4623 bitmap_obstack_release (&problem_data->md_bitmaps);
4624 free (problem_data);
4625 df_md->problem_data = NULL;
4626
4627 df_md->block_info_size = 0;
4628 free (df_md->block_info);
4629 df_md->block_info = NULL;
4630 free (df_md);
4631 }
4632
4633
4634 /* Debugging info at top of bb. */
4635
4636 static void
4637 df_md_top_dump (basic_block bb, FILE *file)
4638 {
4639 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4640 if (!bb_info)
4641 return;
4642
4643 fprintf (file, ";; md in \t");
4644 df_print_regset (file, &bb_info->in);
4645 fprintf (file, ";; md init \t");
4646 df_print_regset (file, &bb_info->init);
4647 fprintf (file, ";; md gen \t");
4648 df_print_regset (file, &bb_info->gen);
4649 fprintf (file, ";; md kill \t");
4650 df_print_regset (file, &bb_info->kill);
4651 }
4652
4653 /* Debugging info at bottom of bb. */
4654
4655 static void
4656 df_md_bottom_dump (basic_block bb, FILE *file)
4657 {
4658 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4659 if (!bb_info)
4660 return;
4661
4662 fprintf (file, ";; md out \t");
4663 df_print_regset (file, &bb_info->out);
4664 }
4665
4666 static struct df_problem problem_MD =
4667 {
4668 DF_MD, /* Problem id. */
4669 DF_FORWARD, /* Direction. */
4670 df_md_alloc, /* Allocate the problem specific data. */
4671 df_md_reset, /* Reset global information. */
4672 df_md_free_bb_info, /* Free basic block info. */
4673 df_md_local_compute, /* Local compute function. */
4674 df_md_init, /* Init the solution specific data. */
4675 df_worklist_dataflow, /* Worklist solver. */
4676 df_md_confluence_0, /* Confluence operator 0. */
4677 df_md_confluence_n, /* Confluence operator n. */
4678 df_md_transfer_function, /* Transfer function. */
4679 NULL, /* Finalize function. */
4680 df_md_free, /* Free all of the problem information. */
4681 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4682 NULL, /* Debugging. */
4683 df_md_top_dump, /* Debugging start block. */
4684 df_md_bottom_dump, /* Debugging end block. */
4685 NULL, /* Incremental solution verify start. */
4686 NULL, /* Incremental solution verify end. */
4687 NULL, /* Dependent problem. */
4688 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4689 TV_DF_MD, /* Timing variable. */
4690 false /* Reset blocks on dropping out of blocks_to_analyze. */
4691 };
4692
4693 /* Create a new MD instance and add it to the existing instance
4694 of DF. */
4695
4696 void
4697 df_md_add_problem (void)
4698 {
4699 df_add_problem (&problem_MD);
4700 }
4701
4702
4703