Remove DF_REF_INSN scaffolding
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999-2014 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "sbitmap.h"
39 #include "bitmap.h"
40 #include "target.h"
41 #include "timevar.h"
42 #include "df.h"
43 #include "except.h"
44 #include "dce.h"
45 #include "valtrack.h"
46 #include "dumpfile.h"
47
48 /* Note that turning REG_DEAD_DEBUGGING on will cause
49 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
50 addresses in the dumps. */
51 #define REG_DEAD_DEBUGGING 0
52
53 #define DF_SPARSE_THRESHOLD 32
54
55 static bitmap_head seen_in_block;
56 static bitmap_head seen_in_insn;
57
58 /*----------------------------------------------------------------------------
59 Utility functions.
60 ----------------------------------------------------------------------------*/
61
62 /* Generic versions to get the void* version of the block info. Only
63 used inside the problem instance vectors. */
64
65 /* Dump a def-use or use-def chain for REF to FILE. */
66
67 void
68 df_chain_dump (struct df_link *link, FILE *file)
69 {
70 fprintf (file, "{ ");
71 for (; link; link = link->next)
72 {
73 fprintf (file, "%c%d(bb %d insn %d) ",
74 DF_REF_REG_DEF_P (link->ref)
75 ? 'd'
76 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
77 DF_REF_ID (link->ref),
78 DF_REF_BBNO (link->ref),
79 DF_REF_IS_ARTIFICIAL (link->ref)
80 ? -1 : DF_REF_INSN_UID (link->ref));
81 }
82 fprintf (file, "}");
83 }
84
85
86 /* Print some basic block info as part of df_dump. */
87
88 void
89 df_print_bb_index (basic_block bb, FILE *file)
90 {
91 edge e;
92 edge_iterator ei;
93
94 fprintf (file, "\n( ");
95 FOR_EACH_EDGE (e, ei, bb->preds)
96 {
97 basic_block pred = e->src;
98 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
99 }
100 fprintf (file, ")->[%d]->( ", bb->index);
101 FOR_EACH_EDGE (e, ei, bb->succs)
102 {
103 basic_block succ = e->dest;
104 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
105 }
106 fprintf (file, ")\n");
107 }
108
109 \f
110 /*----------------------------------------------------------------------------
111 REACHING DEFINITIONS
112
113 Find the locations in the function where each definition site for a
114 pseudo reaches. In and out bitvectors are built for each basic
115 block. The id field in the ref is used to index into these sets.
116 See df.h for details.
117
118 If the DF_RD_PRUNE_DEAD_DEFS changeable flag is set, only DEFs reaching
119 existing uses are included in the global reaching DEFs set, or in other
120 words only DEFs that are still live. This is a kind of pruned version
121 of the traditional reaching definitions problem that is much less
122 complex to compute and produces enough information to compute UD-chains.
123 In this context, live must be interpreted in the DF_LR sense: Uses that
124 are upward exposed but maybe not initialized on all paths through the
125 CFG. For a USE that is not reached by a DEF on all paths, we still want
126 to make those DEFs that do reach the USE visible, and pruning based on
127 DF_LIVE would make that impossible.
128 ----------------------------------------------------------------------------*/
129
130 /* This problem plays a large number of games for the sake of
131 efficiency.
132
133 1) The order of the bits in the bitvectors. After the scanning
134 phase, all of the defs are sorted. All of the defs for the reg 0
135 are first, followed by all defs for reg 1 and so on.
136
137 2) There are two kill sets, one if the number of defs is less or
138 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
139 greater.
140
141 <= : Data is built directly in the kill set.
142
143 > : One level of indirection is used to keep from generating long
144 strings of 1 bits in the kill sets. Bitvectors that are indexed
145 by the regnum are used to represent that there is a killing def
146 for the register. The confluence and transfer functions use
147 these along with the bitmap_clear_range call to remove ranges of
148 bits without actually generating a knockout vector.
149
150 The kill and sparse_kill and the dense_invalidated_by_call and
151 sparse_invalidated_by_call both play this game. */
152
153 /* Private data used to compute the solution for this problem. These
154 data structures are not accessible outside of this module. */
155 struct df_rd_problem_data
156 {
157 /* The set of defs to regs invalidated by call. */
158 bitmap_head sparse_invalidated_by_call;
159 /* The set of defs to regs invalidate by call for rd. */
160 bitmap_head dense_invalidated_by_call;
161 /* An obstack for the bitmaps we need for this problem. */
162 bitmap_obstack rd_bitmaps;
163 };
164
165
166 /* Free basic block info. */
167
168 static void
169 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
170 void *vbb_info)
171 {
172 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
173 if (bb_info)
174 {
175 bitmap_clear (&bb_info->kill);
176 bitmap_clear (&bb_info->sparse_kill);
177 bitmap_clear (&bb_info->gen);
178 bitmap_clear (&bb_info->in);
179 bitmap_clear (&bb_info->out);
180 }
181 }
182
183
184 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
185 not touched unless the block is new. */
186
187 static void
188 df_rd_alloc (bitmap all_blocks)
189 {
190 unsigned int bb_index;
191 bitmap_iterator bi;
192 struct df_rd_problem_data *problem_data;
193
194 if (df_rd->problem_data)
195 {
196 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
197 bitmap_clear (&problem_data->sparse_invalidated_by_call);
198 bitmap_clear (&problem_data->dense_invalidated_by_call);
199 }
200 else
201 {
202 problem_data = XNEW (struct df_rd_problem_data);
203 df_rd->problem_data = problem_data;
204
205 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
206 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
207 &problem_data->rd_bitmaps);
208 bitmap_initialize (&problem_data->dense_invalidated_by_call,
209 &problem_data->rd_bitmaps);
210 }
211
212 df_grow_bb_info (df_rd);
213
214 /* Because of the clustering of all use sites for the same pseudo,
215 we have to process all of the blocks before doing the analysis. */
216
217 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
218 {
219 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
220
221 /* When bitmaps are already initialized, just clear them. */
222 if (bb_info->kill.obstack)
223 {
224 bitmap_clear (&bb_info->kill);
225 bitmap_clear (&bb_info->sparse_kill);
226 bitmap_clear (&bb_info->gen);
227 }
228 else
229 {
230 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
231 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
232 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
233 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
234 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
235 }
236 }
237 df_rd->optional_p = true;
238 }
239
240
241 /* Add the effect of the top artificial defs of BB to the reaching definitions
242 bitmap LOCAL_RD. */
243
244 void
245 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
246 {
247 int bb_index = bb->index;
248 df_ref def;
249 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
250 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
251 {
252 unsigned int dregno = DF_REF_REGNO (def);
253 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
254 bitmap_clear_range (local_rd,
255 DF_DEFS_BEGIN (dregno),
256 DF_DEFS_COUNT (dregno));
257 bitmap_set_bit (local_rd, DF_REF_ID (def));
258 }
259 }
260
261 /* Add the effect of the defs of INSN to the reaching definitions bitmap
262 LOCAL_RD. */
263
264 void
265 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
266 bitmap local_rd)
267 {
268 df_ref def;
269
270 FOR_EACH_INSN_DEF (def, insn)
271 {
272 unsigned int dregno = DF_REF_REGNO (def);
273 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
274 || (dregno >= FIRST_PSEUDO_REGISTER))
275 {
276 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
277 bitmap_clear_range (local_rd,
278 DF_DEFS_BEGIN (dregno),
279 DF_DEFS_COUNT (dregno));
280 if (!(DF_REF_FLAGS (def)
281 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
282 bitmap_set_bit (local_rd, DF_REF_ID (def));
283 }
284 }
285 }
286
287 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
288 more complicated than just simulating, because we must produce the
289 gen and kill sets and hence deal with the two possible representations
290 of kill sets. */
291
292 static void
293 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
294 df_ref def,
295 int top_flag)
296 {
297 for (; def; def = DF_REF_NEXT_LOC (def))
298 {
299 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
300 {
301 unsigned int regno = DF_REF_REGNO (def);
302 unsigned int begin = DF_DEFS_BEGIN (regno);
303 unsigned int n_defs = DF_DEFS_COUNT (regno);
304
305 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
306 || (regno >= FIRST_PSEUDO_REGISTER))
307 {
308 /* Only the last def(s) for a regno in the block has any
309 effect. */
310 if (!bitmap_bit_p (&seen_in_block, regno))
311 {
312 /* The first def for regno in insn gets to knock out the
313 defs from other instructions. */
314 if ((!bitmap_bit_p (&seen_in_insn, regno))
315 /* If the def is to only part of the reg, it does
316 not kill the other defs that reach here. */
317 && (!(DF_REF_FLAGS (def) &
318 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
319 {
320 if (n_defs > DF_SPARSE_THRESHOLD)
321 {
322 bitmap_set_bit (&bb_info->sparse_kill, regno);
323 bitmap_clear_range (&bb_info->gen, begin, n_defs);
324 }
325 else
326 {
327 bitmap_set_range (&bb_info->kill, begin, n_defs);
328 bitmap_clear_range (&bb_info->gen, begin, n_defs);
329 }
330 }
331
332 bitmap_set_bit (&seen_in_insn, regno);
333 /* All defs for regno in the instruction may be put into
334 the gen set. */
335 if (!(DF_REF_FLAGS (def)
336 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
337 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
338 }
339 }
340 }
341 }
342 }
343
344 /* Compute local reaching def info for basic block BB. */
345
346 static void
347 df_rd_bb_local_compute (unsigned int bb_index)
348 {
349 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
350 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
351 rtx_insn *insn;
352
353 bitmap_clear (&seen_in_block);
354 bitmap_clear (&seen_in_insn);
355
356 /* Artificials are only hard regs. */
357 if (!(df->changeable_flags & DF_NO_HARD_REGS))
358 df_rd_bb_local_compute_process_def (bb_info,
359 df_get_artificial_defs (bb_index),
360 0);
361
362 FOR_BB_INSNS_REVERSE (bb, insn)
363 {
364 unsigned int uid = INSN_UID (insn);
365
366 if (!INSN_P (insn))
367 continue;
368
369 df_rd_bb_local_compute_process_def (bb_info,
370 DF_INSN_UID_DEFS (uid), 0);
371
372 /* This complex dance with the two bitmaps is required because
373 instructions can assign twice to the same pseudo. This
374 generally happens with calls that will have one def for the
375 result and another def for the clobber. If only one vector
376 is used and the clobber goes first, the result will be
377 lost. */
378 bitmap_ior_into (&seen_in_block, &seen_in_insn);
379 bitmap_clear (&seen_in_insn);
380 }
381
382 /* Process the artificial defs at the top of the block last since we
383 are going backwards through the block and these are logically at
384 the start. */
385 if (!(df->changeable_flags & DF_NO_HARD_REGS))
386 df_rd_bb_local_compute_process_def (bb_info,
387 df_get_artificial_defs (bb_index),
388 DF_REF_AT_TOP);
389 }
390
391
392 /* Compute local reaching def info for each basic block within BLOCKS. */
393
394 static void
395 df_rd_local_compute (bitmap all_blocks)
396 {
397 unsigned int bb_index;
398 bitmap_iterator bi;
399 unsigned int regno;
400 struct df_rd_problem_data *problem_data
401 = (struct df_rd_problem_data *) df_rd->problem_data;
402 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
403 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
404
405 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
406 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
407
408 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
409
410 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
411 {
412 df_rd_bb_local_compute (bb_index);
413 }
414
415 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
416 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
417 {
418 if (! HARD_REGISTER_NUM_P (regno)
419 || !(df->changeable_flags & DF_NO_HARD_REGS))
420 {
421 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
422 bitmap_set_bit (sparse_invalidated, regno);
423 else
424 bitmap_set_range (dense_invalidated,
425 DF_DEFS_BEGIN (regno),
426 DF_DEFS_COUNT (regno));
427 }
428 }
429
430 bitmap_clear (&seen_in_block);
431 bitmap_clear (&seen_in_insn);
432 }
433
434
435 /* Initialize the solution bit vectors for problem. */
436
437 static void
438 df_rd_init_solution (bitmap all_blocks)
439 {
440 unsigned int bb_index;
441 bitmap_iterator bi;
442
443 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
444 {
445 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
446
447 bitmap_copy (&bb_info->out, &bb_info->gen);
448 bitmap_clear (&bb_info->in);
449 }
450 }
451
452 /* In of target gets or of out of source. */
453
454 static bool
455 df_rd_confluence_n (edge e)
456 {
457 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
458 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
459 bool changed = false;
460
461 if (e->flags & EDGE_FAKE)
462 return false;
463
464 if (e->flags & EDGE_EH)
465 {
466 struct df_rd_problem_data *problem_data
467 = (struct df_rd_problem_data *) df_rd->problem_data;
468 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
469 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
470 bitmap_iterator bi;
471 unsigned int regno;
472 bitmap_head tmp;
473
474 bitmap_initialize (&tmp, &df_bitmap_obstack);
475 bitmap_and_compl (&tmp, op2, dense_invalidated);
476
477 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
478 {
479 bitmap_clear_range (&tmp,
480 DF_DEFS_BEGIN (regno),
481 DF_DEFS_COUNT (regno));
482 }
483 changed |= bitmap_ior_into (op1, &tmp);
484 bitmap_clear (&tmp);
485 return changed;
486 }
487 else
488 return bitmap_ior_into (op1, op2);
489 }
490
491
492 /* Transfer function. */
493
494 static bool
495 df_rd_transfer_function (int bb_index)
496 {
497 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
498 unsigned int regno;
499 bitmap_iterator bi;
500 bitmap in = &bb_info->in;
501 bitmap out = &bb_info->out;
502 bitmap gen = &bb_info->gen;
503 bitmap kill = &bb_info->kill;
504 bitmap sparse_kill = &bb_info->sparse_kill;
505 bool changed = false;
506
507 if (bitmap_empty_p (sparse_kill))
508 changed = bitmap_ior_and_compl (out, gen, in, kill);
509 else
510 {
511 struct df_rd_problem_data *problem_data;
512 bitmap_head tmp;
513
514 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
515 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
516 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
517 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
518
519 bitmap_and_compl (&tmp, in, kill);
520 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
521 {
522 bitmap_clear_range (&tmp,
523 DF_DEFS_BEGIN (regno),
524 DF_DEFS_COUNT (regno));
525 }
526 bitmap_ior_into (&tmp, gen);
527 changed = !bitmap_equal_p (&tmp, out);
528 if (changed)
529 {
530 bitmap_clear (out);
531 bb_info->out = tmp;
532 }
533 else
534 bitmap_clear (&tmp);
535 }
536
537 if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
538 {
539 /* Create a mask of DEFs for all registers live at the end of this
540 basic block, and mask out DEFs of registers that are not live.
541 Computing the mask looks costly, but the benefit of the pruning
542 outweighs the cost. */
543 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
544 bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
545 bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
546 unsigned int regno;
547 bitmap_iterator bi;
548
549 EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi)
550 bitmap_set_range (live_defs,
551 DF_DEFS_BEGIN (regno),
552 DF_DEFS_COUNT (regno));
553 changed |= bitmap_and_into (&bb_info->out, live_defs);
554 BITMAP_FREE (live_defs);
555 }
556
557 return changed;
558 }
559
560 /* Free all storage associated with the problem. */
561
562 static void
563 df_rd_free (void)
564 {
565 struct df_rd_problem_data *problem_data
566 = (struct df_rd_problem_data *) df_rd->problem_data;
567
568 if (problem_data)
569 {
570 bitmap_obstack_release (&problem_data->rd_bitmaps);
571
572 df_rd->block_info_size = 0;
573 free (df_rd->block_info);
574 df_rd->block_info = NULL;
575 free (df_rd->problem_data);
576 }
577 free (df_rd);
578 }
579
580
581 /* Debugging info. */
582
583 static void
584 df_rd_start_dump (FILE *file)
585 {
586 struct df_rd_problem_data *problem_data
587 = (struct df_rd_problem_data *) df_rd->problem_data;
588 unsigned int m = DF_REG_SIZE (df);
589 unsigned int regno;
590
591 if (!df_rd->block_info)
592 return;
593
594 fprintf (file, ";; Reaching defs:\n");
595
596 fprintf (file, ";; sparse invalidated \t");
597 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
598 fprintf (file, ";; dense invalidated \t");
599 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
600
601 fprintf (file, ";; reg->defs[] map:\t");
602 for (regno = 0; regno < m; regno++)
603 if (DF_DEFS_COUNT (regno))
604 fprintf (file, "%d[%d,%d] ", regno,
605 DF_DEFS_BEGIN (regno),
606 DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1);
607 fprintf (file, "\n");
608 }
609
610
611 static void
612 df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
613 {
614 bitmap_head tmp;
615 unsigned int regno;
616 unsigned int m = DF_REG_SIZE (df);
617 bool first_reg = true;
618
619 fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
620
621 bitmap_initialize (&tmp, &df_bitmap_obstack);
622 for (regno = 0; regno < m; regno++)
623 {
624 if (HARD_REGISTER_NUM_P (regno)
625 && (df->changeable_flags & DF_NO_HARD_REGS))
626 continue;
627 bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
628 bitmap_and_into (&tmp, defs_set);
629 if (! bitmap_empty_p (&tmp))
630 {
631 bitmap_iterator bi;
632 unsigned int ix;
633 bool first_def = true;
634
635 if (! first_reg)
636 fprintf (file, ",");
637 first_reg = false;
638
639 fprintf (file, "%u[", regno);
640 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi)
641 {
642 fprintf (file, "%s%u", first_def ? "" : ",", ix);
643 first_def = false;
644 }
645 fprintf (file, "]");
646 }
647 bitmap_clear (&tmp);
648 }
649
650 fprintf (file, "\n");
651 bitmap_clear (&tmp);
652 }
653
654 /* Debugging info at top of bb. */
655
656 static void
657 df_rd_top_dump (basic_block bb, FILE *file)
658 {
659 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
660 if (!bb_info)
661 return;
662
663 df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file);
664 df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file);
665 df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file);
666 }
667
668
669 /* Debugging info at bottom of bb. */
670
671 static void
672 df_rd_bottom_dump (basic_block bb, FILE *file)
673 {
674 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
675 if (!bb_info)
676 return;
677
678 df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file);
679 }
680
681 /* All of the information associated with every instance of the problem. */
682
683 static struct df_problem problem_RD =
684 {
685 DF_RD, /* Problem id. */
686 DF_FORWARD, /* Direction. */
687 df_rd_alloc, /* Allocate the problem specific data. */
688 NULL, /* Reset global information. */
689 df_rd_free_bb_info, /* Free basic block info. */
690 df_rd_local_compute, /* Local compute function. */
691 df_rd_init_solution, /* Init the solution specific data. */
692 df_worklist_dataflow, /* Worklist solver. */
693 NULL, /* Confluence operator 0. */
694 df_rd_confluence_n, /* Confluence operator n. */
695 df_rd_transfer_function, /* Transfer function. */
696 NULL, /* Finalize function. */
697 df_rd_free, /* Free all of the problem information. */
698 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
699 df_rd_start_dump, /* Debugging. */
700 df_rd_top_dump, /* Debugging start block. */
701 df_rd_bottom_dump, /* Debugging end block. */
702 NULL, /* Debugging start insn. */
703 NULL, /* Debugging end insn. */
704 NULL, /* Incremental solution verify start. */
705 NULL, /* Incremental solution verify end. */
706 NULL, /* Dependent problem. */
707 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
708 TV_DF_RD, /* Timing variable. */
709 true /* Reset blocks on dropping out of blocks_to_analyze. */
710 };
711
712
713
714 /* Create a new RD instance and add it to the existing instance
715 of DF. */
716
717 void
718 df_rd_add_problem (void)
719 {
720 df_add_problem (&problem_RD);
721 }
722
723
724 \f
725 /*----------------------------------------------------------------------------
726 LIVE REGISTERS
727
728 Find the locations in the function where any use of a pseudo can
729 reach in the backwards direction. In and out bitvectors are built
730 for each basic block. The regno is used to index into these sets.
731 See df.h for details.
732 ----------------------------------------------------------------------------*/
733
734 /* Private data used to verify the solution for this problem. */
735 struct df_lr_problem_data
736 {
737 bitmap_head *in;
738 bitmap_head *out;
739 /* An obstack for the bitmaps we need for this problem. */
740 bitmap_obstack lr_bitmaps;
741 };
742
743 /* Free basic block info. */
744
745 static void
746 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
747 void *vbb_info)
748 {
749 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
750 if (bb_info)
751 {
752 bitmap_clear (&bb_info->use);
753 bitmap_clear (&bb_info->def);
754 bitmap_clear (&bb_info->in);
755 bitmap_clear (&bb_info->out);
756 }
757 }
758
759
760 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
761 not touched unless the block is new. */
762
763 static void
764 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
765 {
766 unsigned int bb_index;
767 bitmap_iterator bi;
768 struct df_lr_problem_data *problem_data;
769
770 df_grow_bb_info (df_lr);
771 if (df_lr->problem_data)
772 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
773 else
774 {
775 problem_data = XNEW (struct df_lr_problem_data);
776 df_lr->problem_data = problem_data;
777
778 problem_data->out = NULL;
779 problem_data->in = NULL;
780 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
781 }
782
783 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
784 {
785 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
786
787 /* When bitmaps are already initialized, just clear them. */
788 if (bb_info->use.obstack)
789 {
790 bitmap_clear (&bb_info->def);
791 bitmap_clear (&bb_info->use);
792 }
793 else
794 {
795 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
796 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
797 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
798 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
799 }
800 }
801
802 df_lr->optional_p = false;
803 }
804
805
806 /* Reset the global solution for recalculation. */
807
808 static void
809 df_lr_reset (bitmap all_blocks)
810 {
811 unsigned int bb_index;
812 bitmap_iterator bi;
813
814 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
815 {
816 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
817 gcc_assert (bb_info);
818 bitmap_clear (&bb_info->in);
819 bitmap_clear (&bb_info->out);
820 }
821 }
822
823
824 /* Compute local live register info for basic block BB. */
825
826 static void
827 df_lr_bb_local_compute (unsigned int bb_index)
828 {
829 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
830 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
831 rtx_insn *insn;
832 df_ref def, use;
833
834 /* Process the registers set in an exception handler. */
835 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
836 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
837 {
838 unsigned int dregno = DF_REF_REGNO (def);
839 bitmap_set_bit (&bb_info->def, dregno);
840 bitmap_clear_bit (&bb_info->use, dregno);
841 }
842
843 /* Process the hardware registers that are always live. */
844 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
845 /* Add use to set of uses in this BB. */
846 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
847 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
848
849 FOR_BB_INSNS_REVERSE (bb, insn)
850 {
851 if (!NONDEBUG_INSN_P (insn))
852 continue;
853
854 df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
855 FOR_EACH_INSN_INFO_DEF (def, insn_info)
856 /* If the def is to only part of the reg, it does
857 not kill the other defs that reach here. */
858 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
859 {
860 unsigned int dregno = DF_REF_REGNO (def);
861 bitmap_set_bit (&bb_info->def, dregno);
862 bitmap_clear_bit (&bb_info->use, dregno);
863 }
864
865 FOR_EACH_INSN_INFO_USE (use, insn_info)
866 /* Add use to set of uses in this BB. */
867 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
868 }
869
870 /* Process the registers set in an exception handler or the hard
871 frame pointer if this block is the target of a non local
872 goto. */
873 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
874 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
875 {
876 unsigned int dregno = DF_REF_REGNO (def);
877 bitmap_set_bit (&bb_info->def, dregno);
878 bitmap_clear_bit (&bb_info->use, dregno);
879 }
880
881 #ifdef EH_USES
882 /* Process the uses that are live into an exception handler. */
883 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
884 /* Add use to set of uses in this BB. */
885 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
886 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
887 #endif
888
889 /* If the df_live problem is not defined, such as at -O0 and -O1, we
890 still need to keep the luids up to date. This is normally done
891 in the df_live problem since this problem has a forwards
892 scan. */
893 if (!df_live)
894 df_recompute_luids (bb);
895 }
896
897
898 /* Compute local live register info for each basic block within BLOCKS. */
899
900 static void
901 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
902 {
903 unsigned int bb_index, i;
904 bitmap_iterator bi;
905
906 bitmap_clear (&df->hardware_regs_used);
907
908 /* The all-important stack pointer must always be live. */
909 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
910
911 /* Global regs are always live, too. */
912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
913 if (global_regs[i])
914 bitmap_set_bit (&df->hardware_regs_used, i);
915
916 /* Before reload, there are a few registers that must be forced
917 live everywhere -- which might not already be the case for
918 blocks within infinite loops. */
919 if (!reload_completed)
920 {
921 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
922 /* Any reference to any pseudo before reload is a potential
923 reference of the frame pointer. */
924 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
925
926 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
927 /* Pseudos with argument area equivalences may require
928 reloading via the argument pointer. */
929 if (fixed_regs[ARG_POINTER_REGNUM])
930 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
931 #endif
932
933 /* Any constant, or pseudo with constant equivalences, may
934 require reloading from memory using the pic register. */
935 if (pic_offset_table_regnum != INVALID_REGNUM
936 && fixed_regs[pic_offset_table_regnum])
937 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
938 }
939
940 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
941 {
942 if (bb_index == EXIT_BLOCK)
943 {
944 /* The exit block is special for this problem and its bits are
945 computed from thin air. */
946 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
947 bitmap_copy (&bb_info->use, df->exit_block_uses);
948 }
949 else
950 df_lr_bb_local_compute (bb_index);
951 }
952
953 bitmap_clear (df_lr->out_of_date_transfer_functions);
954 }
955
956
957 /* Initialize the solution vectors. */
958
959 static void
960 df_lr_init (bitmap all_blocks)
961 {
962 unsigned int bb_index;
963 bitmap_iterator bi;
964
965 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
966 {
967 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
968 bitmap_copy (&bb_info->in, &bb_info->use);
969 bitmap_clear (&bb_info->out);
970 }
971 }
972
973
974 /* Confluence function that processes infinite loops. This might be a
975 noreturn function that throws. And even if it isn't, getting the
976 unwind info right helps debugging. */
977 static void
978 df_lr_confluence_0 (basic_block bb)
979 {
980 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
981 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
982 bitmap_copy (op1, &df->hardware_regs_used);
983 }
984
985
986 /* Confluence function that ignores fake edges. */
987
988 static bool
989 df_lr_confluence_n (edge e)
990 {
991 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
992 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
993 bool changed = false;
994
995 /* Call-clobbered registers die across exception and call edges. */
996 /* ??? Abnormal call edges ignored for the moment, as this gets
997 confused by sibling call edges, which crashes reg-stack. */
998 if (e->flags & EDGE_EH)
999 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
1000 else
1001 changed = bitmap_ior_into (op1, op2);
1002
1003 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
1004 return changed;
1005 }
1006
1007
1008 /* Transfer function. */
1009
1010 static bool
1011 df_lr_transfer_function (int bb_index)
1012 {
1013 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1014 bitmap in = &bb_info->in;
1015 bitmap out = &bb_info->out;
1016 bitmap use = &bb_info->use;
1017 bitmap def = &bb_info->def;
1018
1019 return bitmap_ior_and_compl (in, use, out, def);
1020 }
1021
1022
1023 /* Run the fast dce as a side effect of building LR. */
1024
1025 static void
1026 df_lr_finalize (bitmap all_blocks)
1027 {
1028 df_lr->solutions_dirty = false;
1029 if (df->changeable_flags & DF_LR_RUN_DCE)
1030 {
1031 run_fast_df_dce ();
1032
1033 /* If dce deletes some instructions, we need to recompute the lr
1034 solution before proceeding further. The problem is that fast
1035 dce is a pessimestic dataflow algorithm. In the case where
1036 it deletes a statement S inside of a loop, the uses inside of
1037 S may not be deleted from the dataflow solution because they
1038 were carried around the loop. While it is conservatively
1039 correct to leave these extra bits, the standards of df
1040 require that we maintain the best possible (least fixed
1041 point) solution. The only way to do that is to redo the
1042 iteration from the beginning. See PR35805 for an
1043 example. */
1044 if (df_lr->solutions_dirty)
1045 {
1046 df_clear_flags (DF_LR_RUN_DCE);
1047 df_lr_alloc (all_blocks);
1048 df_lr_local_compute (all_blocks);
1049 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1050 df_lr_finalize (all_blocks);
1051 df_set_flags (DF_LR_RUN_DCE);
1052 }
1053 }
1054 }
1055
1056
1057 /* Free all storage associated with the problem. */
1058
1059 static void
1060 df_lr_free (void)
1061 {
1062 struct df_lr_problem_data *problem_data
1063 = (struct df_lr_problem_data *) df_lr->problem_data;
1064 if (df_lr->block_info)
1065 {
1066
1067 df_lr->block_info_size = 0;
1068 free (df_lr->block_info);
1069 df_lr->block_info = NULL;
1070 bitmap_obstack_release (&problem_data->lr_bitmaps);
1071 free (df_lr->problem_data);
1072 df_lr->problem_data = NULL;
1073 }
1074
1075 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1076 free (df_lr);
1077 }
1078
1079
1080 /* Debugging info at top of bb. */
1081
1082 static void
1083 df_lr_top_dump (basic_block bb, FILE *file)
1084 {
1085 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1086 struct df_lr_problem_data *problem_data;
1087 if (!bb_info)
1088 return;
1089
1090 fprintf (file, ";; lr in \t");
1091 df_print_regset (file, &bb_info->in);
1092 if (df_lr->problem_data)
1093 {
1094 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1095 if (problem_data->in)
1096 {
1097 fprintf (file, ";; old in \t");
1098 df_print_regset (file, &problem_data->in[bb->index]);
1099 }
1100 }
1101 fprintf (file, ";; lr use \t");
1102 df_print_regset (file, &bb_info->use);
1103 fprintf (file, ";; lr def \t");
1104 df_print_regset (file, &bb_info->def);
1105 }
1106
1107
1108 /* Debugging info at bottom of bb. */
1109
1110 static void
1111 df_lr_bottom_dump (basic_block bb, FILE *file)
1112 {
1113 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1114 struct df_lr_problem_data *problem_data;
1115 if (!bb_info)
1116 return;
1117
1118 fprintf (file, ";; lr out \t");
1119 df_print_regset (file, &bb_info->out);
1120 if (df_lr->problem_data)
1121 {
1122 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1123 if (problem_data->out)
1124 {
1125 fprintf (file, ";; old out \t");
1126 df_print_regset (file, &problem_data->out[bb->index]);
1127 }
1128 }
1129 }
1130
1131
1132 /* Build the datastructure to verify that the solution to the dataflow
1133 equations is not dirty. */
1134
1135 static void
1136 df_lr_verify_solution_start (void)
1137 {
1138 basic_block bb;
1139 struct df_lr_problem_data *problem_data;
1140 if (df_lr->solutions_dirty)
1141 return;
1142
1143 /* Set it true so that the solution is recomputed. */
1144 df_lr->solutions_dirty = true;
1145
1146 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1147 problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1148 problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1149
1150 FOR_ALL_BB_FN (bb, cfun)
1151 {
1152 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1153 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1154 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1155 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1156 }
1157 }
1158
1159
1160 /* Compare the saved datastructure and the new solution to the dataflow
1161 equations. */
1162
1163 static void
1164 df_lr_verify_solution_end (void)
1165 {
1166 struct df_lr_problem_data *problem_data;
1167 basic_block bb;
1168
1169 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1170
1171 if (!problem_data->out)
1172 return;
1173
1174 if (df_lr->solutions_dirty)
1175 /* Do not check if the solution is still dirty. See the comment
1176 in df_lr_finalize for details. */
1177 df_lr->solutions_dirty = false;
1178 else
1179 FOR_ALL_BB_FN (bb, cfun)
1180 {
1181 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1182 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1183 {
1184 /*df_dump (stderr);*/
1185 gcc_unreachable ();
1186 }
1187 }
1188
1189 /* Cannot delete them immediately because you may want to dump them
1190 if the comparison fails. */
1191 FOR_ALL_BB_FN (bb, cfun)
1192 {
1193 bitmap_clear (&problem_data->in[bb->index]);
1194 bitmap_clear (&problem_data->out[bb->index]);
1195 }
1196
1197 free (problem_data->in);
1198 free (problem_data->out);
1199 problem_data->in = NULL;
1200 problem_data->out = NULL;
1201 }
1202
1203
1204 /* All of the information associated with every instance of the problem. */
1205
1206 static struct df_problem problem_LR =
1207 {
1208 DF_LR, /* Problem id. */
1209 DF_BACKWARD, /* Direction. */
1210 df_lr_alloc, /* Allocate the problem specific data. */
1211 df_lr_reset, /* Reset global information. */
1212 df_lr_free_bb_info, /* Free basic block info. */
1213 df_lr_local_compute, /* Local compute function. */
1214 df_lr_init, /* Init the solution specific data. */
1215 df_worklist_dataflow, /* Worklist solver. */
1216 df_lr_confluence_0, /* Confluence operator 0. */
1217 df_lr_confluence_n, /* Confluence operator n. */
1218 df_lr_transfer_function, /* Transfer function. */
1219 df_lr_finalize, /* Finalize function. */
1220 df_lr_free, /* Free all of the problem information. */
1221 NULL, /* Remove this problem from the stack of dataflow problems. */
1222 NULL, /* Debugging. */
1223 df_lr_top_dump, /* Debugging start block. */
1224 df_lr_bottom_dump, /* Debugging end block. */
1225 NULL, /* Debugging start insn. */
1226 NULL, /* Debugging end insn. */
1227 df_lr_verify_solution_start,/* Incremental solution verify start. */
1228 df_lr_verify_solution_end, /* Incremental solution verify end. */
1229 NULL, /* Dependent problem. */
1230 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1231 TV_DF_LR, /* Timing variable. */
1232 false /* Reset blocks on dropping out of blocks_to_analyze. */
1233 };
1234
1235
1236 /* Create a new DATAFLOW instance and add it to an existing instance
1237 of DF. The returned structure is what is used to get at the
1238 solution. */
1239
1240 void
1241 df_lr_add_problem (void)
1242 {
1243 df_add_problem (&problem_LR);
1244 /* These will be initialized when df_scan_blocks processes each
1245 block. */
1246 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1247 }
1248
1249
1250 /* Verify that all of the lr related info is consistent and
1251 correct. */
1252
1253 void
1254 df_lr_verify_transfer_functions (void)
1255 {
1256 basic_block bb;
1257 bitmap_head saved_def;
1258 bitmap_head saved_use;
1259 bitmap_head all_blocks;
1260
1261 if (!df)
1262 return;
1263
1264 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1265 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1266 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1267
1268 FOR_ALL_BB_FN (bb, cfun)
1269 {
1270 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1271 bitmap_set_bit (&all_blocks, bb->index);
1272
1273 if (bb_info)
1274 {
1275 /* Make a copy of the transfer functions and then compute
1276 new ones to see if the transfer functions have
1277 changed. */
1278 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1279 bb->index))
1280 {
1281 bitmap_copy (&saved_def, &bb_info->def);
1282 bitmap_copy (&saved_use, &bb_info->use);
1283 bitmap_clear (&bb_info->def);
1284 bitmap_clear (&bb_info->use);
1285
1286 df_lr_bb_local_compute (bb->index);
1287 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1288 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1289 }
1290 }
1291 else
1292 {
1293 /* If we do not have basic block info, the block must be in
1294 the list of dirty blocks or else some one has added a
1295 block behind our backs. */
1296 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1297 bb->index));
1298 }
1299 /* Make sure no one created a block without following
1300 procedures. */
1301 gcc_assert (df_scan_get_bb_info (bb->index));
1302 }
1303
1304 /* Make sure there are no dirty bits in blocks that have been deleted. */
1305 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1306 &all_blocks));
1307
1308 bitmap_clear (&saved_def);
1309 bitmap_clear (&saved_use);
1310 bitmap_clear (&all_blocks);
1311 }
1312
1313
1314 \f
1315 /*----------------------------------------------------------------------------
1316 LIVE AND MUST-INITIALIZED REGISTERS.
1317
1318 This problem first computes the IN and OUT bitvectors for the
1319 must-initialized registers problems, which is a forward problem.
1320 It gives the set of registers for which we MUST have an available
1321 definition on any path from the entry block to the entry/exit of
1322 a basic block. Sets generate a definition, while clobbers kill
1323 a definition.
1324
1325 In and out bitvectors are built for each basic block and are indexed by
1326 regnum (see df.h for details). In and out bitvectors in struct
1327 df_live_bb_info actually refers to the must-initialized problem;
1328
1329 Then, the in and out sets for the LIVE problem itself are computed.
1330 These are the logical AND of the IN and OUT sets from the LR problem
1331 and the must-initialized problem.
1332 ----------------------------------------------------------------------------*/
1333
1334 /* Private data used to verify the solution for this problem. */
1335 struct df_live_problem_data
1336 {
1337 bitmap_head *in;
1338 bitmap_head *out;
1339 /* An obstack for the bitmaps we need for this problem. */
1340 bitmap_obstack live_bitmaps;
1341 };
1342
1343 /* Scratch var used by transfer functions. This is used to implement
1344 an optimization to reduce the amount of space used to compute the
1345 combined lr and live analysis. */
1346 static bitmap_head df_live_scratch;
1347
1348
1349 /* Free basic block info. */
1350
1351 static void
1352 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1353 void *vbb_info)
1354 {
1355 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1356 if (bb_info)
1357 {
1358 bitmap_clear (&bb_info->gen);
1359 bitmap_clear (&bb_info->kill);
1360 bitmap_clear (&bb_info->in);
1361 bitmap_clear (&bb_info->out);
1362 }
1363 }
1364
1365
1366 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1367 not touched unless the block is new. */
1368
1369 static void
1370 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1371 {
1372 unsigned int bb_index;
1373 bitmap_iterator bi;
1374 struct df_live_problem_data *problem_data;
1375
1376 if (df_live->problem_data)
1377 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1378 else
1379 {
1380 problem_data = XNEW (struct df_live_problem_data);
1381 df_live->problem_data = problem_data;
1382
1383 problem_data->out = NULL;
1384 problem_data->in = NULL;
1385 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1386 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1387 }
1388
1389 df_grow_bb_info (df_live);
1390
1391 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1392 {
1393 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1394
1395 /* When bitmaps are already initialized, just clear them. */
1396 if (bb_info->kill.obstack)
1397 {
1398 bitmap_clear (&bb_info->kill);
1399 bitmap_clear (&bb_info->gen);
1400 }
1401 else
1402 {
1403 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1404 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1405 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1406 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1407 }
1408 }
1409 df_live->optional_p = (optimize <= 1);
1410 }
1411
1412
1413 /* Reset the global solution for recalculation. */
1414
1415 static void
1416 df_live_reset (bitmap all_blocks)
1417 {
1418 unsigned int bb_index;
1419 bitmap_iterator bi;
1420
1421 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1422 {
1423 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1424 gcc_assert (bb_info);
1425 bitmap_clear (&bb_info->in);
1426 bitmap_clear (&bb_info->out);
1427 }
1428 }
1429
1430
1431 /* Compute local uninitialized register info for basic block BB. */
1432
1433 static void
1434 df_live_bb_local_compute (unsigned int bb_index)
1435 {
1436 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1437 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1438 rtx_insn *insn;
1439 df_ref def;
1440 int luid = 0;
1441
1442 FOR_BB_INSNS (bb, insn)
1443 {
1444 unsigned int uid = INSN_UID (insn);
1445 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1446
1447 /* Inserting labels does not always trigger the incremental
1448 rescanning. */
1449 if (!insn_info)
1450 {
1451 gcc_assert (!INSN_P (insn));
1452 insn_info = df_insn_create_insn_record (insn);
1453 }
1454
1455 DF_INSN_INFO_LUID (insn_info) = luid;
1456 if (!INSN_P (insn))
1457 continue;
1458
1459 luid++;
1460 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1461 {
1462 unsigned int regno = DF_REF_REGNO (def);
1463
1464 if (DF_REF_FLAGS_IS_SET (def,
1465 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1466 /* All partial or conditional def
1467 seen are included in the gen set. */
1468 bitmap_set_bit (&bb_info->gen, regno);
1469 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1470 /* Only must clobbers for the entire reg destroy the
1471 value. */
1472 bitmap_set_bit (&bb_info->kill, regno);
1473 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1474 bitmap_set_bit (&bb_info->gen, regno);
1475 }
1476 }
1477
1478 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1479 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1480 }
1481
1482
1483 /* Compute local uninitialized register info. */
1484
1485 static void
1486 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1487 {
1488 unsigned int bb_index;
1489 bitmap_iterator bi;
1490
1491 df_grow_insn_info ();
1492
1493 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1494 0, bb_index, bi)
1495 {
1496 df_live_bb_local_compute (bb_index);
1497 }
1498
1499 bitmap_clear (df_live->out_of_date_transfer_functions);
1500 }
1501
1502
1503 /* Initialize the solution vectors. */
1504
1505 static void
1506 df_live_init (bitmap all_blocks)
1507 {
1508 unsigned int bb_index;
1509 bitmap_iterator bi;
1510
1511 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1512 {
1513 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1514 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1515
1516 /* No register may reach a location where it is not used. Thus
1517 we trim the rr result to the places where it is used. */
1518 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1519 bitmap_clear (&bb_info->in);
1520 }
1521 }
1522
1523 /* Forward confluence function that ignores fake edges. */
1524
1525 static bool
1526 df_live_confluence_n (edge e)
1527 {
1528 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1529 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1530
1531 if (e->flags & EDGE_FAKE)
1532 return false;
1533
1534 return bitmap_ior_into (op1, op2);
1535 }
1536
1537
1538 /* Transfer function for the forwards must-initialized problem. */
1539
1540 static bool
1541 df_live_transfer_function (int bb_index)
1542 {
1543 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1544 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1545 bitmap in = &bb_info->in;
1546 bitmap out = &bb_info->out;
1547 bitmap gen = &bb_info->gen;
1548 bitmap kill = &bb_info->kill;
1549
1550 /* We need to use a scratch set here so that the value returned from this
1551 function invocation properly reflects whether the sets changed in a
1552 significant way; i.e. not just because the lr set was anded in. */
1553 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1554 /* No register may reach a location where it is not used. Thus
1555 we trim the rr result to the places where it is used. */
1556 bitmap_and_into (in, &bb_lr_info->in);
1557
1558 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1559 }
1560
1561
1562 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1563
1564 static void
1565 df_live_finalize (bitmap all_blocks)
1566 {
1567
1568 if (df_live->solutions_dirty)
1569 {
1570 bitmap_iterator bi;
1571 unsigned int bb_index;
1572
1573 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1574 {
1575 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1576 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1577
1578 /* No register may reach a location where it is not used. Thus
1579 we trim the rr result to the places where it is used. */
1580 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1581 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1582 }
1583
1584 df_live->solutions_dirty = false;
1585 }
1586 }
1587
1588
1589 /* Free all storage associated with the problem. */
1590
1591 static void
1592 df_live_free (void)
1593 {
1594 struct df_live_problem_data *problem_data
1595 = (struct df_live_problem_data *) df_live->problem_data;
1596 if (df_live->block_info)
1597 {
1598 df_live->block_info_size = 0;
1599 free (df_live->block_info);
1600 df_live->block_info = NULL;
1601 bitmap_clear (&df_live_scratch);
1602 bitmap_obstack_release (&problem_data->live_bitmaps);
1603 free (problem_data);
1604 df_live->problem_data = NULL;
1605 }
1606 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1607 free (df_live);
1608 }
1609
1610
1611 /* Debugging info at top of bb. */
1612
1613 static void
1614 df_live_top_dump (basic_block bb, FILE *file)
1615 {
1616 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1617 struct df_live_problem_data *problem_data;
1618
1619 if (!bb_info)
1620 return;
1621
1622 fprintf (file, ";; live in \t");
1623 df_print_regset (file, &bb_info->in);
1624 if (df_live->problem_data)
1625 {
1626 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1627 if (problem_data->in)
1628 {
1629 fprintf (file, ";; old in \t");
1630 df_print_regset (file, &problem_data->in[bb->index]);
1631 }
1632 }
1633 fprintf (file, ";; live gen \t");
1634 df_print_regset (file, &bb_info->gen);
1635 fprintf (file, ";; live kill\t");
1636 df_print_regset (file, &bb_info->kill);
1637 }
1638
1639
1640 /* Debugging info at bottom of bb. */
1641
1642 static void
1643 df_live_bottom_dump (basic_block bb, FILE *file)
1644 {
1645 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1646 struct df_live_problem_data *problem_data;
1647
1648 if (!bb_info)
1649 return;
1650
1651 fprintf (file, ";; live out \t");
1652 df_print_regset (file, &bb_info->out);
1653 if (df_live->problem_data)
1654 {
1655 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1656 if (problem_data->out)
1657 {
1658 fprintf (file, ";; old out \t");
1659 df_print_regset (file, &problem_data->out[bb->index]);
1660 }
1661 }
1662 }
1663
1664
1665 /* Build the datastructure to verify that the solution to the dataflow
1666 equations is not dirty. */
1667
1668 static void
1669 df_live_verify_solution_start (void)
1670 {
1671 basic_block bb;
1672 struct df_live_problem_data *problem_data;
1673 if (df_live->solutions_dirty)
1674 return;
1675
1676 /* Set it true so that the solution is recomputed. */
1677 df_live->solutions_dirty = true;
1678
1679 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1680 problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1681 problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1682
1683 FOR_ALL_BB_FN (bb, cfun)
1684 {
1685 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1686 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1687 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1688 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1689 }
1690 }
1691
1692
1693 /* Compare the saved datastructure and the new solution to the dataflow
1694 equations. */
1695
1696 static void
1697 df_live_verify_solution_end (void)
1698 {
1699 struct df_live_problem_data *problem_data;
1700 basic_block bb;
1701
1702 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1703 if (!problem_data->out)
1704 return;
1705
1706 FOR_ALL_BB_FN (bb, cfun)
1707 {
1708 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1709 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1710 {
1711 /*df_dump (stderr);*/
1712 gcc_unreachable ();
1713 }
1714 }
1715
1716 /* Cannot delete them immediately because you may want to dump them
1717 if the comparison fails. */
1718 FOR_ALL_BB_FN (bb, cfun)
1719 {
1720 bitmap_clear (&problem_data->in[bb->index]);
1721 bitmap_clear (&problem_data->out[bb->index]);
1722 }
1723
1724 free (problem_data->in);
1725 free (problem_data->out);
1726 free (problem_data);
1727 df_live->problem_data = NULL;
1728 }
1729
1730
1731 /* All of the information associated with every instance of the problem. */
1732
1733 static struct df_problem problem_LIVE =
1734 {
1735 DF_LIVE, /* Problem id. */
1736 DF_FORWARD, /* Direction. */
1737 df_live_alloc, /* Allocate the problem specific data. */
1738 df_live_reset, /* Reset global information. */
1739 df_live_free_bb_info, /* Free basic block info. */
1740 df_live_local_compute, /* Local compute function. */
1741 df_live_init, /* Init the solution specific data. */
1742 df_worklist_dataflow, /* Worklist solver. */
1743 NULL, /* Confluence operator 0. */
1744 df_live_confluence_n, /* Confluence operator n. */
1745 df_live_transfer_function, /* Transfer function. */
1746 df_live_finalize, /* Finalize function. */
1747 df_live_free, /* Free all of the problem information. */
1748 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1749 NULL, /* Debugging. */
1750 df_live_top_dump, /* Debugging start block. */
1751 df_live_bottom_dump, /* Debugging end block. */
1752 NULL, /* Debugging start insn. */
1753 NULL, /* Debugging end insn. */
1754 df_live_verify_solution_start,/* Incremental solution verify start. */
1755 df_live_verify_solution_end, /* Incremental solution verify end. */
1756 &problem_LR, /* Dependent problem. */
1757 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1758 TV_DF_LIVE, /* Timing variable. */
1759 false /* Reset blocks on dropping out of blocks_to_analyze. */
1760 };
1761
1762
1763 /* Create a new DATAFLOW instance and add it to an existing instance
1764 of DF. The returned structure is what is used to get at the
1765 solution. */
1766
1767 void
1768 df_live_add_problem (void)
1769 {
1770 df_add_problem (&problem_LIVE);
1771 /* These will be initialized when df_scan_blocks processes each
1772 block. */
1773 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1774 }
1775
1776
1777 /* Set all of the blocks as dirty. This needs to be done if this
1778 problem is added after all of the insns have been scanned. */
1779
1780 void
1781 df_live_set_all_dirty (void)
1782 {
1783 basic_block bb;
1784 FOR_ALL_BB_FN (bb, cfun)
1785 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1786 bb->index);
1787 }
1788
1789
1790 /* Verify that all of the lr related info is consistent and
1791 correct. */
1792
1793 void
1794 df_live_verify_transfer_functions (void)
1795 {
1796 basic_block bb;
1797 bitmap_head saved_gen;
1798 bitmap_head saved_kill;
1799 bitmap_head all_blocks;
1800
1801 if (!df)
1802 return;
1803
1804 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1805 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1806 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1807
1808 df_grow_insn_info ();
1809
1810 FOR_ALL_BB_FN (bb, cfun)
1811 {
1812 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1813 bitmap_set_bit (&all_blocks, bb->index);
1814
1815 if (bb_info)
1816 {
1817 /* Make a copy of the transfer functions and then compute
1818 new ones to see if the transfer functions have
1819 changed. */
1820 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1821 bb->index))
1822 {
1823 bitmap_copy (&saved_gen, &bb_info->gen);
1824 bitmap_copy (&saved_kill, &bb_info->kill);
1825 bitmap_clear (&bb_info->gen);
1826 bitmap_clear (&bb_info->kill);
1827
1828 df_live_bb_local_compute (bb->index);
1829 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1830 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1831 }
1832 }
1833 else
1834 {
1835 /* If we do not have basic block info, the block must be in
1836 the list of dirty blocks or else some one has added a
1837 block behind our backs. */
1838 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1839 bb->index));
1840 }
1841 /* Make sure no one created a block without following
1842 procedures. */
1843 gcc_assert (df_scan_get_bb_info (bb->index));
1844 }
1845
1846 /* Make sure there are no dirty bits in blocks that have been deleted. */
1847 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1848 &all_blocks));
1849 bitmap_clear (&saved_gen);
1850 bitmap_clear (&saved_kill);
1851 bitmap_clear (&all_blocks);
1852 }
1853 \f
1854 /*----------------------------------------------------------------------------
1855 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1856
1857 Link either the defs to the uses and / or the uses to the defs.
1858
1859 These problems are set up like the other dataflow problems so that
1860 they nicely fit into the framework. They are much simpler and only
1861 involve a single traversal of instructions and an examination of
1862 the reaching defs information (the dependent problem).
1863 ----------------------------------------------------------------------------*/
1864
1865 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1866
1867 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1868
1869 struct df_link *
1870 df_chain_create (df_ref src, df_ref dst)
1871 {
1872 struct df_link *head = DF_REF_CHAIN (src);
1873 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1874
1875 DF_REF_CHAIN (src) = link;
1876 link->next = head;
1877 link->ref = dst;
1878 return link;
1879 }
1880
1881
1882 /* Delete any du or ud chains that start at REF and point to
1883 TARGET. */
1884 static void
1885 df_chain_unlink_1 (df_ref ref, df_ref target)
1886 {
1887 struct df_link *chain = DF_REF_CHAIN (ref);
1888 struct df_link *prev = NULL;
1889
1890 while (chain)
1891 {
1892 if (chain->ref == target)
1893 {
1894 if (prev)
1895 prev->next = chain->next;
1896 else
1897 DF_REF_CHAIN (ref) = chain->next;
1898 pool_free (df_chain->block_pool, chain);
1899 return;
1900 }
1901 prev = chain;
1902 chain = chain->next;
1903 }
1904 }
1905
1906
1907 /* Delete a du or ud chain that leave or point to REF. */
1908
1909 void
1910 df_chain_unlink (df_ref ref)
1911 {
1912 struct df_link *chain = DF_REF_CHAIN (ref);
1913 while (chain)
1914 {
1915 struct df_link *next = chain->next;
1916 /* Delete the other side if it exists. */
1917 df_chain_unlink_1 (chain->ref, ref);
1918 pool_free (df_chain->block_pool, chain);
1919 chain = next;
1920 }
1921 DF_REF_CHAIN (ref) = NULL;
1922 }
1923
1924
1925 /* Copy the du or ud chain starting at FROM_REF and attach it to
1926 TO_REF. */
1927
1928 void
1929 df_chain_copy (df_ref to_ref,
1930 struct df_link *from_ref)
1931 {
1932 while (from_ref)
1933 {
1934 df_chain_create (to_ref, from_ref->ref);
1935 from_ref = from_ref->next;
1936 }
1937 }
1938
1939
1940 /* Remove this problem from the stack of dataflow problems. */
1941
1942 static void
1943 df_chain_remove_problem (void)
1944 {
1945 bitmap_iterator bi;
1946 unsigned int bb_index;
1947
1948 /* Wholesale destruction of the old chains. */
1949 if (df_chain->block_pool)
1950 free_alloc_pool (df_chain->block_pool);
1951
1952 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1953 {
1954 rtx_insn *insn;
1955 df_ref def, use;
1956 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1957
1958 if (df_chain_problem_p (DF_DU_CHAIN))
1959 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1960 DF_REF_CHAIN (def) = NULL;
1961 if (df_chain_problem_p (DF_UD_CHAIN))
1962 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1963 DF_REF_CHAIN (use) = NULL;
1964
1965 FOR_BB_INSNS (bb, insn)
1966 if (INSN_P (insn))
1967 {
1968 df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1969 if (df_chain_problem_p (DF_DU_CHAIN))
1970 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1971 DF_REF_CHAIN (def) = NULL;
1972 if (df_chain_problem_p (DF_UD_CHAIN))
1973 {
1974 FOR_EACH_INSN_INFO_USE (use, insn_info)
1975 DF_REF_CHAIN (use) = NULL;
1976 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1977 DF_REF_CHAIN (use) = NULL;
1978 }
1979 }
1980 }
1981
1982 bitmap_clear (df_chain->out_of_date_transfer_functions);
1983 df_chain->block_pool = NULL;
1984 }
1985
1986
1987 /* Remove the chain problem completely. */
1988
1989 static void
1990 df_chain_fully_remove_problem (void)
1991 {
1992 df_chain_remove_problem ();
1993 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
1994 free (df_chain);
1995 }
1996
1997
1998 /* Create def-use or use-def chains. */
1999
2000 static void
2001 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2002 {
2003 df_chain_remove_problem ();
2004 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
2005 sizeof (struct df_link), 50);
2006 df_chain->optional_p = true;
2007 }
2008
2009
2010 /* Reset all of the chains when the set of basic blocks changes. */
2011
2012 static void
2013 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2014 {
2015 df_chain_remove_problem ();
2016 }
2017
2018
2019 /* Create the chains for a list of USEs. */
2020
2021 static void
2022 df_chain_create_bb_process_use (bitmap local_rd,
2023 df_ref use,
2024 int top_flag)
2025 {
2026 bitmap_iterator bi;
2027 unsigned int def_index;
2028
2029 for (; use; use = DF_REF_NEXT_LOC (use))
2030 {
2031 unsigned int uregno = DF_REF_REGNO (use);
2032 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2033 || (uregno >= FIRST_PSEUDO_REGISTER))
2034 {
2035 /* Do not want to go through this for an uninitialized var. */
2036 int count = DF_DEFS_COUNT (uregno);
2037 if (count)
2038 {
2039 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2040 {
2041 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2042 unsigned int last_index = first_index + count - 1;
2043
2044 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2045 {
2046 df_ref def;
2047 if (def_index > last_index)
2048 break;
2049
2050 def = DF_DEFS_GET (def_index);
2051 if (df_chain_problem_p (DF_DU_CHAIN))
2052 df_chain_create (def, use);
2053 if (df_chain_problem_p (DF_UD_CHAIN))
2054 df_chain_create (use, def);
2055 }
2056 }
2057 }
2058 }
2059 }
2060 }
2061
2062
2063 /* Create chains from reaching defs bitmaps for basic block BB. */
2064
2065 static void
2066 df_chain_create_bb (unsigned int bb_index)
2067 {
2068 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
2069 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2070 rtx_insn *insn;
2071 bitmap_head cpy;
2072
2073 bitmap_initialize (&cpy, &bitmap_default_obstack);
2074 bitmap_copy (&cpy, &bb_info->in);
2075 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2076
2077 /* Since we are going forwards, process the artificial uses first
2078 then the artificial defs second. */
2079
2080 #ifdef EH_USES
2081 /* Create the chains for the artificial uses from the EH_USES at the
2082 beginning of the block. */
2083
2084 /* Artificials are only hard regs. */
2085 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2086 df_chain_create_bb_process_use (&cpy,
2087 df_get_artificial_uses (bb->index),
2088 DF_REF_AT_TOP);
2089 #endif
2090
2091 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2092
2093 /* Process the regular instructions next. */
2094 FOR_BB_INSNS (bb, insn)
2095 if (INSN_P (insn))
2096 {
2097 unsigned int uid = INSN_UID (insn);
2098
2099 /* First scan the uses and link them up with the defs that remain
2100 in the cpy vector. */
2101 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2102 if (df->changeable_flags & DF_EQ_NOTES)
2103 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2104
2105 /* Since we are going forwards, process the defs second. */
2106 df_rd_simulate_one_insn (bb, insn, &cpy);
2107 }
2108
2109 /* Create the chains for the artificial uses of the hard registers
2110 at the end of the block. */
2111 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2112 df_chain_create_bb_process_use (&cpy,
2113 df_get_artificial_uses (bb->index),
2114 0);
2115
2116 bitmap_clear (&cpy);
2117 }
2118
2119 /* Create def-use chains from reaching use bitmaps for basic blocks
2120 in BLOCKS. */
2121
2122 static void
2123 df_chain_finalize (bitmap all_blocks)
2124 {
2125 unsigned int bb_index;
2126 bitmap_iterator bi;
2127
2128 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2129 {
2130 df_chain_create_bb (bb_index);
2131 }
2132 }
2133
2134
2135 /* Free all storage associated with the problem. */
2136
2137 static void
2138 df_chain_free (void)
2139 {
2140 free_alloc_pool (df_chain->block_pool);
2141 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2142 free (df_chain);
2143 }
2144
2145
2146 /* Debugging info. */
2147
2148 static void
2149 df_chain_bb_dump (basic_block bb, FILE *file, bool top)
2150 {
2151 /* Artificials are only hard regs. */
2152 if (df->changeable_flags & DF_NO_HARD_REGS)
2153 return;
2154 if (df_chain_problem_p (DF_UD_CHAIN))
2155 {
2156 df_ref use;
2157
2158 fprintf (file,
2159 ";; UD chains for artificial uses at %s\n",
2160 top ? "top" : "bottom");
2161 FOR_EACH_ARTIFICIAL_USE (use, bb->index)
2162 if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2163 || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
2164 {
2165 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2166 df_chain_dump (DF_REF_CHAIN (use), file);
2167 fprintf (file, "\n");
2168 }
2169 }
2170 if (df_chain_problem_p (DF_DU_CHAIN))
2171 {
2172 df_ref def;
2173
2174 fprintf (file,
2175 ";; DU chains for artificial defs at %s\n",
2176 top ? "top" : "bottom");
2177 FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
2178 if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
2179 || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
2180 {
2181 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2182 df_chain_dump (DF_REF_CHAIN (def), file);
2183 fprintf (file, "\n");
2184 }
2185 }
2186 }
2187
2188 static void
2189 df_chain_top_dump (basic_block bb, FILE *file)
2190 {
2191 df_chain_bb_dump (bb, file, /*top=*/true);
2192 }
2193
2194 static void
2195 df_chain_bottom_dump (basic_block bb, FILE *file)
2196 {
2197 df_chain_bb_dump (bb, file, /*top=*/false);
2198 }
2199
2200 static void
2201 df_chain_insn_top_dump (const rtx_insn *insn, FILE *file)
2202 {
2203 if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
2204 {
2205 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2206 df_ref use;
2207
2208 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2209 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2210 FOR_EACH_INSN_INFO_USE (use, insn_info)
2211 if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2212 || !(df->changeable_flags & DF_NO_HARD_REGS))
2213 {
2214 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2215 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2216 fprintf (file, "read/write ");
2217 df_chain_dump (DF_REF_CHAIN (use), file);
2218 fprintf (file, "\n");
2219 }
2220 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
2221 if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2222 || !(df->changeable_flags & DF_NO_HARD_REGS))
2223 {
2224 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2225 df_chain_dump (DF_REF_CHAIN (use), file);
2226 fprintf (file, "\n");
2227 }
2228 }
2229 }
2230
2231 static void
2232 df_chain_insn_bottom_dump (const rtx_insn *insn, FILE *file)
2233 {
2234 if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
2235 {
2236 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2237 df_ref def;
2238 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2239 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2240 FOR_EACH_INSN_INFO_DEF (def, insn_info)
2241 if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
2242 || !(df->changeable_flags & DF_NO_HARD_REGS))
2243 {
2244 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2245 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2246 fprintf (file, "read/write ");
2247 df_chain_dump (DF_REF_CHAIN (def), file);
2248 fprintf (file, "\n");
2249 }
2250 fprintf (file, "\n");
2251 }
2252 }
2253
2254 static struct df_problem problem_CHAIN =
2255 {
2256 DF_CHAIN, /* Problem id. */
2257 DF_NONE, /* Direction. */
2258 df_chain_alloc, /* Allocate the problem specific data. */
2259 df_chain_reset, /* Reset global information. */
2260 NULL, /* Free basic block info. */
2261 NULL, /* Local compute function. */
2262 NULL, /* Init the solution specific data. */
2263 NULL, /* Iterative solver. */
2264 NULL, /* Confluence operator 0. */
2265 NULL, /* Confluence operator n. */
2266 NULL, /* Transfer function. */
2267 df_chain_finalize, /* Finalize function. */
2268 df_chain_free, /* Free all of the problem information. */
2269 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2270 NULL, /* Debugging. */
2271 df_chain_top_dump, /* Debugging start block. */
2272 df_chain_bottom_dump, /* Debugging end block. */
2273 df_chain_insn_top_dump, /* Debugging start insn. */
2274 df_chain_insn_bottom_dump, /* Debugging end insn. */
2275 NULL, /* Incremental solution verify start. */
2276 NULL, /* Incremental solution verify end. */
2277 &problem_RD, /* Dependent problem. */
2278 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2279 TV_DF_CHAIN, /* Timing variable. */
2280 false /* Reset blocks on dropping out of blocks_to_analyze. */
2281 };
2282
2283
2284 /* Create a new DATAFLOW instance and add it to an existing instance
2285 of DF. The returned structure is what is used to get at the
2286 solution. */
2287
2288 void
2289 df_chain_add_problem (unsigned int chain_flags)
2290 {
2291 df_add_problem (&problem_CHAIN);
2292 df_chain->local_flags = chain_flags;
2293 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2294 }
2295
2296 #undef df_chain_problem_p
2297
2298 \f
2299 /*----------------------------------------------------------------------------
2300 WORD LEVEL LIVE REGISTERS
2301
2302 Find the locations in the function where any use of a pseudo can
2303 reach in the backwards direction. In and out bitvectors are built
2304 for each basic block. We only track pseudo registers that have a
2305 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2306 contain two bits corresponding to each of the subwords.
2307
2308 ----------------------------------------------------------------------------*/
2309
2310 /* Private data used to verify the solution for this problem. */
2311 struct df_word_lr_problem_data
2312 {
2313 /* An obstack for the bitmaps we need for this problem. */
2314 bitmap_obstack word_lr_bitmaps;
2315 };
2316
2317
2318 /* Free basic block info. */
2319
2320 static void
2321 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2322 void *vbb_info)
2323 {
2324 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2325 if (bb_info)
2326 {
2327 bitmap_clear (&bb_info->use);
2328 bitmap_clear (&bb_info->def);
2329 bitmap_clear (&bb_info->in);
2330 bitmap_clear (&bb_info->out);
2331 }
2332 }
2333
2334
2335 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2336 not touched unless the block is new. */
2337
2338 static void
2339 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2340 {
2341 unsigned int bb_index;
2342 bitmap_iterator bi;
2343 basic_block bb;
2344 struct df_word_lr_problem_data *problem_data
2345 = XNEW (struct df_word_lr_problem_data);
2346
2347 df_word_lr->problem_data = problem_data;
2348
2349 df_grow_bb_info (df_word_lr);
2350
2351 /* Create the mapping from regnos to slots. This does not change
2352 unless the problem is destroyed and recreated. In particular, if
2353 we end up deleting the only insn that used a subreg, we do not
2354 want to redo the mapping because this would invalidate everything
2355 else. */
2356
2357 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2358
2359 FOR_EACH_BB_FN (bb, cfun)
2360 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2361
2362 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2363 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2364
2365 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2366 {
2367 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2368
2369 /* When bitmaps are already initialized, just clear them. */
2370 if (bb_info->use.obstack)
2371 {
2372 bitmap_clear (&bb_info->def);
2373 bitmap_clear (&bb_info->use);
2374 }
2375 else
2376 {
2377 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2378 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2379 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2380 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2381 }
2382 }
2383
2384 df_word_lr->optional_p = true;
2385 }
2386
2387
2388 /* Reset the global solution for recalculation. */
2389
2390 static void
2391 df_word_lr_reset (bitmap all_blocks)
2392 {
2393 unsigned int bb_index;
2394 bitmap_iterator bi;
2395
2396 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2397 {
2398 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2399 gcc_assert (bb_info);
2400 bitmap_clear (&bb_info->in);
2401 bitmap_clear (&bb_info->out);
2402 }
2403 }
2404
2405 /* Examine REF, and if it is for a reg we're interested in, set or
2406 clear the bits corresponding to its subwords from the bitmap
2407 according to IS_SET. LIVE is the bitmap we should update. We do
2408 not track hard regs or pseudos of any size other than 2 *
2409 UNITS_PER_WORD.
2410 We return true if we changed the bitmap, or if we encountered a register
2411 we're not tracking. */
2412
2413 bool
2414 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2415 {
2416 rtx orig_reg = DF_REF_REG (ref);
2417 rtx reg = orig_reg;
2418 enum machine_mode reg_mode;
2419 unsigned regno;
2420 /* Left at -1 for whole accesses. */
2421 int which_subword = -1;
2422 bool changed = false;
2423
2424 if (GET_CODE (reg) == SUBREG)
2425 reg = SUBREG_REG (orig_reg);
2426 regno = REGNO (reg);
2427 reg_mode = GET_MODE (reg);
2428 if (regno < FIRST_PSEUDO_REGISTER
2429 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2430 return true;
2431
2432 if (GET_CODE (orig_reg) == SUBREG
2433 && df_read_modify_subreg_p (orig_reg))
2434 {
2435 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2436 if (subreg_lowpart_p (orig_reg))
2437 which_subword = 0;
2438 else
2439 which_subword = 1;
2440 }
2441 if (is_set)
2442 {
2443 if (which_subword != 1)
2444 changed |= bitmap_set_bit (live, regno * 2);
2445 if (which_subword != 0)
2446 changed |= bitmap_set_bit (live, regno * 2 + 1);
2447 }
2448 else
2449 {
2450 if (which_subword != 1)
2451 changed |= bitmap_clear_bit (live, regno * 2);
2452 if (which_subword != 0)
2453 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2454 }
2455 return changed;
2456 }
2457
2458 /* Compute local live register info for basic block BB. */
2459
2460 static void
2461 df_word_lr_bb_local_compute (unsigned int bb_index)
2462 {
2463 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
2464 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2465 rtx_insn *insn;
2466 df_ref def, use;
2467
2468 /* Ensure that artificial refs don't contain references to pseudos. */
2469 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
2470 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2471
2472 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
2473 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2474
2475 FOR_BB_INSNS_REVERSE (bb, insn)
2476 {
2477 if (!NONDEBUG_INSN_P (insn))
2478 continue;
2479
2480 df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2481 FOR_EACH_INSN_INFO_DEF (def, insn_info)
2482 /* If the def is to only part of the reg, it does
2483 not kill the other defs that reach here. */
2484 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2485 {
2486 df_word_lr_mark_ref (def, true, &bb_info->def);
2487 df_word_lr_mark_ref (def, false, &bb_info->use);
2488 }
2489 FOR_EACH_INSN_INFO_USE (use, insn_info)
2490 df_word_lr_mark_ref (use, true, &bb_info->use);
2491 }
2492 }
2493
2494
2495 /* Compute local live register info for each basic block within BLOCKS. */
2496
2497 static void
2498 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2499 {
2500 unsigned int bb_index;
2501 bitmap_iterator bi;
2502
2503 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2504 {
2505 if (bb_index == EXIT_BLOCK)
2506 {
2507 unsigned regno;
2508 bitmap_iterator bi;
2509 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2510 regno, bi)
2511 gcc_unreachable ();
2512 }
2513 else
2514 df_word_lr_bb_local_compute (bb_index);
2515 }
2516
2517 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2518 }
2519
2520
2521 /* Initialize the solution vectors. */
2522
2523 static void
2524 df_word_lr_init (bitmap all_blocks)
2525 {
2526 unsigned int bb_index;
2527 bitmap_iterator bi;
2528
2529 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2530 {
2531 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2532 bitmap_copy (&bb_info->in, &bb_info->use);
2533 bitmap_clear (&bb_info->out);
2534 }
2535 }
2536
2537
2538 /* Confluence function that ignores fake edges. */
2539
2540 static bool
2541 df_word_lr_confluence_n (edge e)
2542 {
2543 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2544 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2545
2546 return bitmap_ior_into (op1, op2);
2547 }
2548
2549
2550 /* Transfer function. */
2551
2552 static bool
2553 df_word_lr_transfer_function (int bb_index)
2554 {
2555 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2556 bitmap in = &bb_info->in;
2557 bitmap out = &bb_info->out;
2558 bitmap use = &bb_info->use;
2559 bitmap def = &bb_info->def;
2560
2561 return bitmap_ior_and_compl (in, use, out, def);
2562 }
2563
2564
2565 /* Free all storage associated with the problem. */
2566
2567 static void
2568 df_word_lr_free (void)
2569 {
2570 struct df_word_lr_problem_data *problem_data
2571 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2572
2573 if (df_word_lr->block_info)
2574 {
2575 df_word_lr->block_info_size = 0;
2576 free (df_word_lr->block_info);
2577 df_word_lr->block_info = NULL;
2578 }
2579
2580 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2581 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2582 free (problem_data);
2583 free (df_word_lr);
2584 }
2585
2586
2587 /* Debugging info at top of bb. */
2588
2589 static void
2590 df_word_lr_top_dump (basic_block bb, FILE *file)
2591 {
2592 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2593 if (!bb_info)
2594 return;
2595
2596 fprintf (file, ";; blr in \t");
2597 df_print_word_regset (file, &bb_info->in);
2598 fprintf (file, ";; blr use \t");
2599 df_print_word_regset (file, &bb_info->use);
2600 fprintf (file, ";; blr def \t");
2601 df_print_word_regset (file, &bb_info->def);
2602 }
2603
2604
2605 /* Debugging info at bottom of bb. */
2606
2607 static void
2608 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2609 {
2610 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2611 if (!bb_info)
2612 return;
2613
2614 fprintf (file, ";; blr out \t");
2615 df_print_word_regset (file, &bb_info->out);
2616 }
2617
2618
2619 /* All of the information associated with every instance of the problem. */
2620
2621 static struct df_problem problem_WORD_LR =
2622 {
2623 DF_WORD_LR, /* Problem id. */
2624 DF_BACKWARD, /* Direction. */
2625 df_word_lr_alloc, /* Allocate the problem specific data. */
2626 df_word_lr_reset, /* Reset global information. */
2627 df_word_lr_free_bb_info, /* Free basic block info. */
2628 df_word_lr_local_compute, /* Local compute function. */
2629 df_word_lr_init, /* Init the solution specific data. */
2630 df_worklist_dataflow, /* Worklist solver. */
2631 NULL, /* Confluence operator 0. */
2632 df_word_lr_confluence_n, /* Confluence operator n. */
2633 df_word_lr_transfer_function, /* Transfer function. */
2634 NULL, /* Finalize function. */
2635 df_word_lr_free, /* Free all of the problem information. */
2636 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2637 NULL, /* Debugging. */
2638 df_word_lr_top_dump, /* Debugging start block. */
2639 df_word_lr_bottom_dump, /* Debugging end block. */
2640 NULL, /* Debugging start insn. */
2641 NULL, /* Debugging end insn. */
2642 NULL, /* Incremental solution verify start. */
2643 NULL, /* Incremental solution verify end. */
2644 NULL, /* Dependent problem. */
2645 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2646 TV_DF_WORD_LR, /* Timing variable. */
2647 false /* Reset blocks on dropping out of blocks_to_analyze. */
2648 };
2649
2650
2651 /* Create a new DATAFLOW instance and add it to an existing instance
2652 of DF. The returned structure is what is used to get at the
2653 solution. */
2654
2655 void
2656 df_word_lr_add_problem (void)
2657 {
2658 df_add_problem (&problem_WORD_LR);
2659 /* These will be initialized when df_scan_blocks processes each
2660 block. */
2661 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2662 }
2663
2664
2665 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2666 any bits, which is used by the caller to determine whether a set is
2667 necessary. We also return true if there are other reasons not to delete
2668 an insn. */
2669
2670 bool
2671 df_word_lr_simulate_defs (rtx_insn *insn, bitmap live)
2672 {
2673 bool changed = false;
2674 df_ref def;
2675
2676 FOR_EACH_INSN_DEF (def, insn)
2677 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2678 changed = true;
2679 else
2680 changed |= df_word_lr_mark_ref (def, false, live);
2681 return changed;
2682 }
2683
2684
2685 /* Simulate the effects of the uses of INSN on LIVE. */
2686
2687 void
2688 df_word_lr_simulate_uses (rtx_insn *insn, bitmap live)
2689 {
2690 df_ref use;
2691
2692 FOR_EACH_INSN_USE (use, insn)
2693 df_word_lr_mark_ref (use, true, live);
2694 }
2695 \f
2696 /*----------------------------------------------------------------------------
2697 This problem computes REG_DEAD and REG_UNUSED notes.
2698 ----------------------------------------------------------------------------*/
2699
2700 static void
2701 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2702 {
2703 df_note->optional_p = true;
2704 }
2705
2706 /* This is only used if REG_DEAD_DEBUGGING is in effect. */
2707 static void
2708 df_print_note (const char *prefix, rtx_insn *insn, rtx note)
2709 {
2710 if (dump_file)
2711 {
2712 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2713 print_rtl (dump_file, note);
2714 fprintf (dump_file, "\n");
2715 }
2716 }
2717
2718
2719 /* After reg-stack, the x86 floating point stack regs are difficult to
2720 analyze because of all of the pushes, pops and rotations. Thus, we
2721 just leave the notes alone. */
2722
2723 #ifdef STACK_REGS
2724 static inline bool
2725 df_ignore_stack_reg (int regno)
2726 {
2727 return regstack_completed
2728 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2729 }
2730 #else
2731 static inline bool
2732 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2733 {
2734 return false;
2735 }
2736 #endif
2737
2738
2739 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
2740
2741 static void
2742 df_remove_dead_and_unused_notes (rtx_insn *insn)
2743 {
2744 rtx *pprev = &REG_NOTES (insn);
2745 rtx link = *pprev;
2746
2747 while (link)
2748 {
2749 switch (REG_NOTE_KIND (link))
2750 {
2751 case REG_DEAD:
2752 /* After reg-stack, we need to ignore any unused notes
2753 for the stack registers. */
2754 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2755 {
2756 pprev = &XEXP (link, 1);
2757 link = *pprev;
2758 }
2759 else
2760 {
2761 rtx next = XEXP (link, 1);
2762 if (REG_DEAD_DEBUGGING)
2763 df_print_note ("deleting: ", insn, link);
2764 free_EXPR_LIST_node (link);
2765 *pprev = link = next;
2766 }
2767 break;
2768
2769 case REG_UNUSED:
2770 /* After reg-stack, we need to ignore any unused notes
2771 for the stack registers. */
2772 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2773 {
2774 pprev = &XEXP (link, 1);
2775 link = *pprev;
2776 }
2777 else
2778 {
2779 rtx next = XEXP (link, 1);
2780 if (REG_DEAD_DEBUGGING)
2781 df_print_note ("deleting: ", insn, link);
2782 free_EXPR_LIST_node (link);
2783 *pprev = link = next;
2784 }
2785 break;
2786
2787 default:
2788 pprev = &XEXP (link, 1);
2789 link = *pprev;
2790 break;
2791 }
2792 }
2793 }
2794
2795 /* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
2796 as the bitmap of currently live registers. */
2797
2798 static void
2799 df_remove_dead_eq_notes (rtx_insn *insn, bitmap live)
2800 {
2801 rtx *pprev = &REG_NOTES (insn);
2802 rtx link = *pprev;
2803
2804 while (link)
2805 {
2806 switch (REG_NOTE_KIND (link))
2807 {
2808 case REG_EQUAL:
2809 case REG_EQUIV:
2810 {
2811 /* Remove the notes that refer to dead registers. As we have at most
2812 one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
2813 so we need to purge the complete EQ_USES vector when removing
2814 the note using df_notes_rescan. */
2815 df_ref use;
2816 bool deleted = false;
2817
2818 FOR_EACH_INSN_EQ_USE (use, insn)
2819 if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
2820 && DF_REF_LOC (use)
2821 && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
2822 && !bitmap_bit_p (live, DF_REF_REGNO (use))
2823 && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
2824 {
2825 deleted = true;
2826 break;
2827 }
2828 if (deleted)
2829 {
2830 rtx next;
2831 if (REG_DEAD_DEBUGGING)
2832 df_print_note ("deleting: ", insn, link);
2833 next = XEXP (link, 1);
2834 free_EXPR_LIST_node (link);
2835 *pprev = link = next;
2836 df_notes_rescan (insn);
2837 }
2838 else
2839 {
2840 pprev = &XEXP (link, 1);
2841 link = *pprev;
2842 }
2843 break;
2844 }
2845
2846 default:
2847 pprev = &XEXP (link, 1);
2848 link = *pprev;
2849 break;
2850 }
2851 }
2852 }
2853
2854 /* Set a NOTE_TYPE note for REG in INSN. */
2855
2856 static inline void
2857 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2858 {
2859 gcc_checking_assert (!DEBUG_INSN_P (insn));
2860 add_reg_note (insn, note_type, reg);
2861 }
2862
2863 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2864 arguments. Return true if the register value described by MWS's
2865 mw_reg is known to be completely unused, and if mw_reg can therefore
2866 be used in a REG_UNUSED note. */
2867
2868 static bool
2869 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2870 bitmap live, bitmap artificial_uses)
2871 {
2872 unsigned int r;
2873
2874 /* If MWS describes a partial reference, create REG_UNUSED notes for
2875 individual hard registers. */
2876 if (mws->flags & DF_REF_PARTIAL)
2877 return false;
2878
2879 /* Likewise if some part of the register is used. */
2880 for (r = mws->start_regno; r <= mws->end_regno; r++)
2881 if (bitmap_bit_p (live, r)
2882 || bitmap_bit_p (artificial_uses, r))
2883 return false;
2884
2885 gcc_assert (REG_P (mws->mw_reg));
2886 return true;
2887 }
2888
2889
2890 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2891 based on the bits in LIVE. Do not generate notes for registers in
2892 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2893 not generated if the reg is both read and written by the
2894 instruction.
2895 */
2896
2897 static void
2898 df_set_unused_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
2899 bitmap live, bitmap do_not_gen,
2900 bitmap artificial_uses,
2901 struct dead_debug_local *debug)
2902 {
2903 unsigned int r;
2904
2905 if (REG_DEAD_DEBUGGING && dump_file)
2906 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
2907 mws->start_regno, mws->end_regno);
2908
2909 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
2910 {
2911 unsigned int regno = mws->start_regno;
2912 df_set_note (REG_UNUSED, insn, mws->mw_reg);
2913 dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
2914
2915 if (REG_DEAD_DEBUGGING)
2916 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
2917
2918 bitmap_set_bit (do_not_gen, regno);
2919 /* Only do this if the value is totally dead. */
2920 }
2921 else
2922 for (r = mws->start_regno; r <= mws->end_regno; r++)
2923 {
2924 if (!bitmap_bit_p (live, r)
2925 && !bitmap_bit_p (artificial_uses, r))
2926 {
2927 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
2928 dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
2929 if (REG_DEAD_DEBUGGING)
2930 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
2931 }
2932 bitmap_set_bit (do_not_gen, r);
2933 }
2934 }
2935
2936
2937 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
2938 arguments. Return true if the register value described by MWS's
2939 mw_reg is known to be completely dead, and if mw_reg can therefore
2940 be used in a REG_DEAD note. */
2941
2942 static bool
2943 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
2944 bitmap live, bitmap artificial_uses,
2945 bitmap do_not_gen)
2946 {
2947 unsigned int r;
2948
2949 /* If MWS describes a partial reference, create REG_DEAD notes for
2950 individual hard registers. */
2951 if (mws->flags & DF_REF_PARTIAL)
2952 return false;
2953
2954 /* Likewise if some part of the register is not dead. */
2955 for (r = mws->start_regno; r <= mws->end_regno; r++)
2956 if (bitmap_bit_p (live, r)
2957 || bitmap_bit_p (artificial_uses, r)
2958 || bitmap_bit_p (do_not_gen, r))
2959 return false;
2960
2961 gcc_assert (REG_P (mws->mw_reg));
2962 return true;
2963 }
2964
2965 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
2966 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
2967 from being set if the instruction both reads and writes the
2968 register. */
2969
2970 static void
2971 df_set_dead_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
2972 bitmap live, bitmap do_not_gen,
2973 bitmap artificial_uses, bool *added_notes_p)
2974 {
2975 unsigned int r;
2976 bool is_debug = *added_notes_p;
2977
2978 *added_notes_p = false;
2979
2980 if (REG_DEAD_DEBUGGING && dump_file)
2981 {
2982 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
2983 mws->start_regno, mws->end_regno);
2984 df_print_regset (dump_file, do_not_gen);
2985 fprintf (dump_file, " live =");
2986 df_print_regset (dump_file, live);
2987 fprintf (dump_file, " artificial uses =");
2988 df_print_regset (dump_file, artificial_uses);
2989 }
2990
2991 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
2992 {
2993 if (is_debug)
2994 {
2995 *added_notes_p = true;
2996 return;
2997 }
2998 /* Add a dead note for the entire multi word register. */
2999 df_set_note (REG_DEAD, insn, mws->mw_reg);
3000 if (REG_DEAD_DEBUGGING)
3001 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3002 }
3003 else
3004 {
3005 for (r = mws->start_regno; r <= mws->end_regno; r++)
3006 if (!bitmap_bit_p (live, r)
3007 && !bitmap_bit_p (artificial_uses, r)
3008 && !bitmap_bit_p (do_not_gen, r))
3009 {
3010 if (is_debug)
3011 {
3012 *added_notes_p = true;
3013 return;
3014 }
3015 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
3016 if (REG_DEAD_DEBUGGING)
3017 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3018 }
3019 }
3020 return;
3021 }
3022
3023
3024 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3025 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3026
3027 static void
3028 df_create_unused_note (rtx_insn *insn, df_ref def,
3029 bitmap live, bitmap artificial_uses,
3030 struct dead_debug_local *debug)
3031 {
3032 unsigned int dregno = DF_REF_REGNO (def);
3033
3034 if (REG_DEAD_DEBUGGING && dump_file)
3035 {
3036 fprintf (dump_file, " regular looking at def ");
3037 df_ref_debug (def, dump_file);
3038 }
3039
3040 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3041 || bitmap_bit_p (live, dregno)
3042 || bitmap_bit_p (artificial_uses, dregno)
3043 || df_ignore_stack_reg (dregno)))
3044 {
3045 rtx reg = (DF_REF_LOC (def))
3046 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3047 df_set_note (REG_UNUSED, insn, reg);
3048 dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3049 if (REG_DEAD_DEBUGGING)
3050 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3051 }
3052
3053 return;
3054 }
3055
3056
3057 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3058 info: lifetime, bb, and number of defs and uses for basic block
3059 BB. The three bitvectors are scratch regs used here. */
3060
3061 static void
3062 df_note_bb_compute (unsigned int bb_index,
3063 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3064 {
3065 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3066 rtx_insn *insn;
3067 df_ref def, use;
3068 struct dead_debug_local debug;
3069
3070 dead_debug_local_init (&debug, NULL, NULL);
3071
3072 bitmap_copy (live, df_get_live_out (bb));
3073 bitmap_clear (artificial_uses);
3074
3075 if (REG_DEAD_DEBUGGING && dump_file)
3076 {
3077 fprintf (dump_file, "live at bottom ");
3078 df_print_regset (dump_file, live);
3079 }
3080
3081 /* Process the artificial defs and uses at the bottom of the block
3082 to begin processing. */
3083 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
3084 {
3085 if (REG_DEAD_DEBUGGING && dump_file)
3086 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3087
3088 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3089 bitmap_clear_bit (live, DF_REF_REGNO (def));
3090 }
3091
3092 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
3093 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3094 {
3095 unsigned int regno = DF_REF_REGNO (use);
3096 bitmap_set_bit (live, regno);
3097
3098 /* Notes are not generated for any of the artificial registers
3099 at the bottom of the block. */
3100 bitmap_set_bit (artificial_uses, regno);
3101 }
3102
3103 if (REG_DEAD_DEBUGGING && dump_file)
3104 {
3105 fprintf (dump_file, "live before artificials out ");
3106 df_print_regset (dump_file, live);
3107 }
3108
3109 FOR_BB_INSNS_REVERSE (bb, insn)
3110 {
3111 df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3112 df_mw_hardreg *mw;
3113 int debug_insn;
3114
3115 if (!INSN_P (insn))
3116 continue;
3117
3118 debug_insn = DEBUG_INSN_P (insn);
3119
3120 bitmap_clear (do_not_gen);
3121 df_remove_dead_and_unused_notes (insn);
3122
3123 /* Process the defs. */
3124 if (CALL_P (insn))
3125 {
3126 if (REG_DEAD_DEBUGGING && dump_file)
3127 {
3128 fprintf (dump_file, "processing call %d\n live =",
3129 INSN_UID (insn));
3130 df_print_regset (dump_file, live);
3131 }
3132
3133 /* We only care about real sets for calls. Clobbers cannot
3134 be depended on to really die. */
3135 FOR_EACH_INSN_INFO_MW (mw, insn_info)
3136 if ((DF_MWS_REG_DEF_P (mw))
3137 && !df_ignore_stack_reg (mw->start_regno))
3138 df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
3139 artificial_uses, &debug);
3140
3141 /* All of the defs except the return value are some sort of
3142 clobber. This code is for the return. */
3143 FOR_EACH_INSN_INFO_DEF (def, insn_info)
3144 {
3145 unsigned int dregno = DF_REF_REGNO (def);
3146 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3147 {
3148 df_create_unused_note (insn,
3149 def, live, artificial_uses, &debug);
3150 bitmap_set_bit (do_not_gen, dregno);
3151 }
3152
3153 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3154 bitmap_clear_bit (live, dregno);
3155 }
3156 }
3157 else
3158 {
3159 /* Regular insn. */
3160 FOR_EACH_INSN_INFO_MW (mw, insn_info)
3161 if (DF_MWS_REG_DEF_P (mw))
3162 df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
3163 artificial_uses, &debug);
3164
3165 FOR_EACH_INSN_INFO_DEF (def, insn_info)
3166 {
3167 unsigned int dregno = DF_REF_REGNO (def);
3168 df_create_unused_note (insn,
3169 def, live, artificial_uses, &debug);
3170
3171 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3172 bitmap_set_bit (do_not_gen, dregno);
3173
3174 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3175 bitmap_clear_bit (live, dregno);
3176 }
3177 }
3178
3179 /* Process the uses. */
3180 FOR_EACH_INSN_INFO_MW (mw, insn_info)
3181 if (DF_MWS_REG_USE_P (mw)
3182 && !df_ignore_stack_reg (mw->start_regno))
3183 {
3184 bool really_add_notes = debug_insn != 0;
3185
3186 df_set_dead_notes_for_mw (insn, mw, live, do_not_gen,
3187 artificial_uses,
3188 &really_add_notes);
3189
3190 if (really_add_notes)
3191 debug_insn = -1;
3192 }
3193
3194 FOR_EACH_INSN_INFO_USE (use, insn_info)
3195 {
3196 unsigned int uregno = DF_REF_REGNO (use);
3197
3198 if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
3199 {
3200 fprintf (dump_file, " regular looking at use ");
3201 df_ref_debug (use, dump_file);
3202 }
3203
3204 if (!bitmap_bit_p (live, uregno))
3205 {
3206 if (debug_insn)
3207 {
3208 if (debug_insn > 0)
3209 {
3210 /* We won't add REG_UNUSED or REG_DEAD notes for
3211 these, so we don't have to mess with them in
3212 debug insns either. */
3213 if (!bitmap_bit_p (artificial_uses, uregno)
3214 && !df_ignore_stack_reg (uregno))
3215 dead_debug_add (&debug, use, uregno);
3216 continue;
3217 }
3218 break;
3219 }
3220 else
3221 dead_debug_insert_temp (&debug, uregno, insn,
3222 DEBUG_TEMP_BEFORE_WITH_REG);
3223
3224 if ( (!(DF_REF_FLAGS (use)
3225 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3226 && (!bitmap_bit_p (do_not_gen, uregno))
3227 && (!bitmap_bit_p (artificial_uses, uregno))
3228 && (!df_ignore_stack_reg (uregno)))
3229 {
3230 rtx reg = (DF_REF_LOC (use))
3231 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3232 df_set_note (REG_DEAD, insn, reg);
3233
3234 if (REG_DEAD_DEBUGGING)
3235 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3236 }
3237 /* This register is now live. */
3238 bitmap_set_bit (live, uregno);
3239 }
3240 }
3241
3242 df_remove_dead_eq_notes (insn, live);
3243
3244 if (debug_insn == -1)
3245 {
3246 /* ??? We could probably do better here, replacing dead
3247 registers with their definitions. */
3248 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3249 df_insn_rescan_debug_internal (insn);
3250 }
3251 }
3252
3253 dead_debug_local_finish (&debug, NULL);
3254 }
3255
3256
3257 /* Compute register info: lifetime, bb, and number of defs and uses. */
3258 static void
3259 df_note_compute (bitmap all_blocks)
3260 {
3261 unsigned int bb_index;
3262 bitmap_iterator bi;
3263 bitmap_head live, do_not_gen, artificial_uses;
3264
3265 bitmap_initialize (&live, &df_bitmap_obstack);
3266 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3267 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3268
3269 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3270 {
3271 /* ??? Unlike fast DCE, we don't use global_debug for uses of dead
3272 pseudos in debug insns because we don't always (re)visit blocks
3273 with death points after visiting dead uses. Even changing this
3274 loop to postorder would still leave room for visiting a death
3275 point before visiting a subsequent debug use. */
3276 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3277 }
3278
3279 bitmap_clear (&live);
3280 bitmap_clear (&do_not_gen);
3281 bitmap_clear (&artificial_uses);
3282 }
3283
3284
3285 /* Free all storage associated with the problem. */
3286
3287 static void
3288 df_note_free (void)
3289 {
3290 free (df_note);
3291 }
3292
3293
3294 /* All of the information associated every instance of the problem. */
3295
3296 static struct df_problem problem_NOTE =
3297 {
3298 DF_NOTE, /* Problem id. */
3299 DF_NONE, /* Direction. */
3300 df_note_alloc, /* Allocate the problem specific data. */
3301 NULL, /* Reset global information. */
3302 NULL, /* Free basic block info. */
3303 df_note_compute, /* Local compute function. */
3304 NULL, /* Init the solution specific data. */
3305 NULL, /* Iterative solver. */
3306 NULL, /* Confluence operator 0. */
3307 NULL, /* Confluence operator n. */
3308 NULL, /* Transfer function. */
3309 NULL, /* Finalize function. */
3310 df_note_free, /* Free all of the problem information. */
3311 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3312 NULL, /* Debugging. */
3313 NULL, /* Debugging start block. */
3314 NULL, /* Debugging end block. */
3315 NULL, /* Debugging start insn. */
3316 NULL, /* Debugging end insn. */
3317 NULL, /* Incremental solution verify start. */
3318 NULL, /* Incremental solution verify end. */
3319 &problem_LR, /* Dependent problem. */
3320 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3321 TV_DF_NOTE, /* Timing variable. */
3322 false /* Reset blocks on dropping out of blocks_to_analyze. */
3323 };
3324
3325
3326 /* Create a new DATAFLOW instance and add it to an existing instance
3327 of DF. The returned structure is what is used to get at the
3328 solution. */
3329
3330 void
3331 df_note_add_problem (void)
3332 {
3333 df_add_problem (&problem_NOTE);
3334 }
3335
3336
3337
3338 \f
3339 /*----------------------------------------------------------------------------
3340 Functions for simulating the effects of single insns.
3341
3342 You can either simulate in the forwards direction, starting from
3343 the top of a block or the backwards direction from the end of the
3344 block. If you go backwards, defs are examined first to clear bits,
3345 then uses are examined to set bits. If you go forwards, defs are
3346 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3347 are examined to clear bits. In either case, the result of examining
3348 a def can be undone (respectively by a use or a REG_UNUSED note).
3349
3350 If you start at the top of the block, use one of DF_LIVE_IN or
3351 DF_LR_IN. If you start at the bottom of the block use one of
3352 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3353 THEY WILL BE DESTROYED.
3354 ----------------------------------------------------------------------------*/
3355
3356
3357 /* Find the set of DEFs for INSN. */
3358
3359 void
3360 df_simulate_find_defs (rtx_insn *insn, bitmap defs)
3361 {
3362 df_ref def;
3363
3364 FOR_EACH_INSN_DEF (def, insn)
3365 bitmap_set_bit (defs, DF_REF_REGNO (def));
3366 }
3367
3368 /* Find the set of uses for INSN. This includes partial defs. */
3369
3370 static void
3371 df_simulate_find_uses (rtx_insn *insn, bitmap uses)
3372 {
3373 df_ref def, use;
3374 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3375
3376 FOR_EACH_INSN_INFO_DEF (def, insn_info)
3377 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3378 bitmap_set_bit (uses, DF_REF_REGNO (def));
3379 FOR_EACH_INSN_INFO_USE (use, insn_info)
3380 bitmap_set_bit (uses, DF_REF_REGNO (use));
3381 }
3382
3383 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3384
3385 void
3386 df_simulate_find_noclobber_defs (rtx_insn *insn, bitmap defs)
3387 {
3388 df_ref def;
3389
3390 FOR_EACH_INSN_DEF (def, insn)
3391 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3392 bitmap_set_bit (defs, DF_REF_REGNO (def));
3393 }
3394
3395
3396 /* Simulate the effects of the defs of INSN on LIVE. */
3397
3398 void
3399 df_simulate_defs (rtx_insn *insn, bitmap live)
3400 {
3401 df_ref def;
3402
3403 FOR_EACH_INSN_DEF (def, insn)
3404 {
3405 unsigned int dregno = DF_REF_REGNO (def);
3406
3407 /* If the def is to only part of the reg, it does
3408 not kill the other defs that reach here. */
3409 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3410 bitmap_clear_bit (live, dregno);
3411 }
3412 }
3413
3414
3415 /* Simulate the effects of the uses of INSN on LIVE. */
3416
3417 void
3418 df_simulate_uses (rtx_insn *insn, bitmap live)
3419 {
3420 df_ref use;
3421
3422 if (DEBUG_INSN_P (insn))
3423 return;
3424
3425 FOR_EACH_INSN_USE (use, insn)
3426 /* Add use to set of uses in this BB. */
3427 bitmap_set_bit (live, DF_REF_REGNO (use));
3428 }
3429
3430
3431 /* Add back the always live regs in BB to LIVE. */
3432
3433 static inline void
3434 df_simulate_fixup_sets (basic_block bb, bitmap live)
3435 {
3436 /* These regs are considered always live so if they end up dying
3437 because of some def, we need to bring the back again. */
3438 if (bb_has_eh_pred (bb))
3439 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3440 else
3441 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3442 }
3443
3444
3445 /*----------------------------------------------------------------------------
3446 The following three functions are used only for BACKWARDS scanning:
3447 i.e. they process the defs before the uses.
3448
3449 df_simulate_initialize_backwards should be called first with a
3450 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3451 df_simulate_one_insn_backwards should be called for each insn in
3452 the block, starting with the last one. Finally,
3453 df_simulate_finalize_backwards can be called to get a new value
3454 of the sets at the top of the block (this is rarely used).
3455 ----------------------------------------------------------------------------*/
3456
3457 /* Apply the artificial uses and defs at the end of BB in a backwards
3458 direction. */
3459
3460 void
3461 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3462 {
3463 df_ref def, use;
3464 int bb_index = bb->index;
3465
3466 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
3467 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3468 bitmap_clear_bit (live, DF_REF_REGNO (def));
3469
3470 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
3471 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3472 bitmap_set_bit (live, DF_REF_REGNO (use));
3473 }
3474
3475
3476 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3477
3478 void
3479 df_simulate_one_insn_backwards (basic_block bb, rtx_insn *insn, bitmap live)
3480 {
3481 if (!NONDEBUG_INSN_P (insn))
3482 return;
3483
3484 df_simulate_defs (insn, live);
3485 df_simulate_uses (insn, live);
3486 df_simulate_fixup_sets (bb, live);
3487 }
3488
3489
3490 /* Apply the artificial uses and defs at the top of BB in a backwards
3491 direction. */
3492
3493 void
3494 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3495 {
3496 df_ref def;
3497 #ifdef EH_USES
3498 df_ref use;
3499 #endif
3500 int bb_index = bb->index;
3501
3502 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
3503 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3504 bitmap_clear_bit (live, DF_REF_REGNO (def));
3505
3506 #ifdef EH_USES
3507 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
3508 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3509 bitmap_set_bit (live, DF_REF_REGNO (use));
3510 #endif
3511 }
3512 /*----------------------------------------------------------------------------
3513 The following three functions are used only for FORWARDS scanning:
3514 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3515 Thus it is important to add the DF_NOTES problem to the stack of
3516 problems computed before using these functions.
3517
3518 df_simulate_initialize_forwards should be called first with a
3519 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3520 df_simulate_one_insn_forwards should be called for each insn in
3521 the block, starting with the first one.
3522 ----------------------------------------------------------------------------*/
3523
3524 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3525 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3526 defs live. */
3527
3528 void
3529 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3530 {
3531 df_ref def;
3532 int bb_index = bb->index;
3533
3534 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
3535 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3536 bitmap_set_bit (live, DF_REF_REGNO (def));
3537 }
3538
3539 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3540
3541 void
3542 df_simulate_one_insn_forwards (basic_block bb, rtx_insn *insn, bitmap live)
3543 {
3544 rtx link;
3545 if (! INSN_P (insn))
3546 return;
3547
3548 /* Make sure that DF_NOTE really is an active df problem. */
3549 gcc_assert (df_note);
3550
3551 /* Note that this is the opposite as how the problem is defined, because
3552 in the LR problem defs _kill_ liveness. However, they do so backwards,
3553 while here the scan is performed forwards! So, first assume that the
3554 def is live, and if this is not true REG_UNUSED notes will rectify the
3555 situation. */
3556 df_simulate_find_noclobber_defs (insn, live);
3557
3558 /* Clear all of the registers that go dead. */
3559 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3560 {
3561 switch (REG_NOTE_KIND (link))
3562 {
3563 case REG_DEAD:
3564 case REG_UNUSED:
3565 {
3566 rtx reg = XEXP (link, 0);
3567 int regno = REGNO (reg);
3568 if (HARD_REGISTER_NUM_P (regno))
3569 bitmap_clear_range (live, regno,
3570 hard_regno_nregs[regno][GET_MODE (reg)]);
3571 else
3572 bitmap_clear_bit (live, regno);
3573 }
3574 break;
3575 default:
3576 break;
3577 }
3578 }
3579 df_simulate_fixup_sets (bb, live);
3580 }
3581 \f
3582 /* Used by the next two functions to encode information about the
3583 memory references we found. */
3584 #define MEMREF_NORMAL 1
3585 #define MEMREF_VOLATILE 2
3586
3587 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3588 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3589
3590 static int
3591 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3592 {
3593 rtx x = *px;
3594
3595 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3596 return MEMREF_VOLATILE;
3597
3598 if (!MEM_P (x))
3599 return 0;
3600 if (MEM_VOLATILE_P (x))
3601 return MEMREF_VOLATILE;
3602 if (MEM_READONLY_P (x))
3603 return 0;
3604
3605 return MEMREF_NORMAL;
3606 }
3607
3608 /* A subroutine of can_move_insns_across_p called through note_stores.
3609 DATA points to an integer in which we set either the bit for
3610 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3611 of either kind. */
3612
3613 static void
3614 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3615 void *data ATTRIBUTE_UNUSED)
3616 {
3617 int *pflags = (int *)data;
3618 if (GET_CODE (x) == SUBREG)
3619 x = XEXP (x, 0);
3620 /* Treat stores to SP as stores to memory, this will prevent problems
3621 when there are references to the stack frame. */
3622 if (x == stack_pointer_rtx)
3623 *pflags |= MEMREF_VOLATILE;
3624 if (!MEM_P (x))
3625 return;
3626 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3627 }
3628
3629 /* Scan BB backwards, using df_simulate functions to keep track of
3630 lifetimes, up to insn POINT. The result is stored in LIVE. */
3631
3632 void
3633 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
3634 {
3635 rtx_insn *insn;
3636 bitmap_copy (live, df_get_live_out (bb));
3637 df_simulate_initialize_backwards (bb, live);
3638
3639 /* Scan and update life information until we reach the point we're
3640 interested in. */
3641 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
3642 df_simulate_one_insn_backwards (bb, insn, live);
3643 }
3644
3645 /* Return true if it is safe to move a group of insns, described by
3646 the range FROM to TO, backwards across another group of insns,
3647 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
3648 are no insns between ACROSS_TO and FROM, but they may be in
3649 different basic blocks; MERGE_BB is the block from which the
3650 insns will be moved. The caller must pass in a regset MERGE_LIVE
3651 which specifies the registers live after TO.
3652
3653 This function may be called in one of two cases: either we try to
3654 move identical instructions from all successor blocks into their
3655 predecessor, or we try to move from only one successor block. If
3656 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
3657 the second case. It should contain a set of registers live at the
3658 end of ACROSS_TO which must not be clobbered by moving the insns.
3659 In that case, we're also more careful about moving memory references
3660 and trapping insns.
3661
3662 We return false if it is not safe to move the entire group, but it
3663 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
3664 is set to point at the last moveable insn in such a case. */
3665
3666 bool
3667 can_move_insns_across (rtx_insn *from, rtx_insn *to,
3668 rtx_insn *across_from, rtx_insn *across_to,
3669 basic_block merge_bb, regset merge_live,
3670 regset other_branch_live, rtx_insn **pmove_upto)
3671 {
3672 rtx_insn *insn, *next, *max_to;
3673 bitmap merge_set, merge_use, local_merge_live;
3674 bitmap test_set, test_use;
3675 unsigned i, fail = 0;
3676 bitmap_iterator bi;
3677 int memrefs_in_across = 0;
3678 int mem_sets_in_across = 0;
3679 bool trapping_insns_in_across = false;
3680
3681 if (pmove_upto != NULL)
3682 *pmove_upto = NULL;
3683
3684 /* Find real bounds, ignoring debug insns. */
3685 while (!NONDEBUG_INSN_P (from) && from != to)
3686 from = NEXT_INSN (from);
3687 while (!NONDEBUG_INSN_P (to) && from != to)
3688 to = PREV_INSN (to);
3689
3690 for (insn = across_to; ; insn = next)
3691 {
3692 if (CALL_P (insn))
3693 {
3694 if (RTL_CONST_OR_PURE_CALL_P (insn))
3695 /* Pure functions can read from memory. Const functions can
3696 read from arguments that the ABI has forced onto the stack.
3697 Neither sort of read can be volatile. */
3698 memrefs_in_across |= MEMREF_NORMAL;
3699 else
3700 {
3701 memrefs_in_across |= MEMREF_VOLATILE;
3702 mem_sets_in_across |= MEMREF_VOLATILE;
3703 }
3704 }
3705 if (NONDEBUG_INSN_P (insn))
3706 {
3707 if (volatile_insn_p (PATTERN (insn)))
3708 return false;
3709 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
3710 NULL);
3711 note_stores (PATTERN (insn), find_memory_stores,
3712 &mem_sets_in_across);
3713 /* This is used just to find sets of the stack pointer. */
3714 memrefs_in_across |= mem_sets_in_across;
3715 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
3716 }
3717 next = PREV_INSN (insn);
3718 if (insn == across_from)
3719 break;
3720 }
3721
3722 /* Collect:
3723 MERGE_SET = set of registers set in MERGE_BB
3724 MERGE_USE = set of registers used in MERGE_BB and live at its top
3725 MERGE_LIVE = set of registers live at the point inside the MERGE
3726 range that we've reached during scanning
3727 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
3728 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
3729 and live before ACROSS_FROM. */
3730
3731 merge_set = BITMAP_ALLOC (&reg_obstack);
3732 merge_use = BITMAP_ALLOC (&reg_obstack);
3733 local_merge_live = BITMAP_ALLOC (&reg_obstack);
3734 test_set = BITMAP_ALLOC (&reg_obstack);
3735 test_use = BITMAP_ALLOC (&reg_obstack);
3736
3737 /* Compute the set of registers set and used in the ACROSS range. */
3738 if (other_branch_live != NULL)
3739 bitmap_copy (test_use, other_branch_live);
3740 df_simulate_initialize_backwards (merge_bb, test_use);
3741 for (insn = across_to; ; insn = next)
3742 {
3743 if (NONDEBUG_INSN_P (insn))
3744 {
3745 df_simulate_find_defs (insn, test_set);
3746 df_simulate_defs (insn, test_use);
3747 df_simulate_uses (insn, test_use);
3748 }
3749 next = PREV_INSN (insn);
3750 if (insn == across_from)
3751 break;
3752 }
3753
3754 /* Compute an upper bound for the amount of insns moved, by finding
3755 the first insn in MERGE that sets a register in TEST_USE, or uses
3756 a register in TEST_SET. We also check for calls, trapping operations,
3757 and memory references. */
3758 max_to = NULL;
3759 for (insn = from; ; insn = next)
3760 {
3761 if (CALL_P (insn))
3762 break;
3763 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3764 break;
3765 if (NONDEBUG_INSN_P (insn))
3766 {
3767 if (may_trap_or_fault_p (PATTERN (insn))
3768 && (trapping_insns_in_across
3769 || other_branch_live != NULL
3770 || volatile_insn_p (PATTERN (insn))))
3771 break;
3772
3773 /* We cannot move memory stores past each other, or move memory
3774 reads past stores, at least not without tracking them and
3775 calling true_dependence on every pair.
3776
3777 If there is no other branch and no memory references or
3778 sets in the ACROSS range, we can move memory references
3779 freely, even volatile ones.
3780
3781 Otherwise, the rules are as follows: volatile memory
3782 references and stores can't be moved at all, and any type
3783 of memory reference can't be moved if there are volatile
3784 accesses or stores in the ACROSS range. That leaves
3785 normal reads, which can be moved, as the trapping case is
3786 dealt with elsewhere. */
3787 if (other_branch_live != NULL || memrefs_in_across != 0)
3788 {
3789 int mem_ref_flags = 0;
3790 int mem_set_flags = 0;
3791 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
3792 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
3793 NULL);
3794 /* Catch sets of the stack pointer. */
3795 mem_ref_flags |= mem_set_flags;
3796
3797 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
3798 break;
3799 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
3800 break;
3801 if (mem_set_flags != 0
3802 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
3803 break;
3804 }
3805 df_simulate_find_uses (insn, merge_use);
3806 /* We're only interested in uses which use a value live at
3807 the top, not one previously set in this block. */
3808 bitmap_and_compl_into (merge_use, merge_set);
3809 df_simulate_find_defs (insn, merge_set);
3810 if (bitmap_intersect_p (merge_set, test_use)
3811 || bitmap_intersect_p (merge_use, test_set))
3812 break;
3813 #ifdef HAVE_cc0
3814 if (!sets_cc0_p (insn))
3815 #endif
3816 max_to = insn;
3817 }
3818 next = NEXT_INSN (insn);
3819 if (insn == to)
3820 break;
3821 }
3822 if (max_to != to)
3823 fail = 1;
3824
3825 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
3826 goto out;
3827
3828 /* Now, lower this upper bound by also taking into account that
3829 a range of insns moved across ACROSS must not leave a register
3830 live at the end that will be clobbered in ACROSS. We need to
3831 find a point where TEST_SET & LIVE == 0.
3832
3833 Insns in the MERGE range that set registers which are also set
3834 in the ACROSS range may still be moved as long as we also move
3835 later insns which use the results of the set, and make the
3836 register dead again. This is verified by the condition stated
3837 above. We only need to test it for registers that are set in
3838 the moved region.
3839
3840 MERGE_LIVE is provided by the caller and holds live registers after
3841 TO. */
3842 bitmap_copy (local_merge_live, merge_live);
3843 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
3844 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
3845
3846 /* We're not interested in registers that aren't set in the moved
3847 region at all. */
3848 bitmap_and_into (local_merge_live, merge_set);
3849 for (;;)
3850 {
3851 if (NONDEBUG_INSN_P (insn))
3852 {
3853 if (!bitmap_intersect_p (test_set, local_merge_live)
3854 #ifdef HAVE_cc0
3855 && !sets_cc0_p (insn)
3856 #endif
3857 )
3858 {
3859 max_to = insn;
3860 break;
3861 }
3862
3863 df_simulate_one_insn_backwards (merge_bb, insn,
3864 local_merge_live);
3865 }
3866 if (insn == from)
3867 {
3868 fail = 1;
3869 goto out;
3870 }
3871 insn = PREV_INSN (insn);
3872 }
3873
3874 if (max_to != to)
3875 fail = 1;
3876
3877 if (pmove_upto)
3878 *pmove_upto = max_to;
3879
3880 /* For small register class machines, don't lengthen lifetimes of
3881 hard registers before reload. */
3882 if (! reload_completed
3883 && targetm.small_register_classes_for_mode_p (VOIDmode))
3884 {
3885 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3886 {
3887 if (i < FIRST_PSEUDO_REGISTER
3888 && ! fixed_regs[i]
3889 && ! global_regs[i])
3890 {
3891 fail = 1;
3892 break;
3893 }
3894 }
3895 }
3896
3897 out:
3898 BITMAP_FREE (merge_set);
3899 BITMAP_FREE (merge_use);
3900 BITMAP_FREE (local_merge_live);
3901 BITMAP_FREE (test_set);
3902 BITMAP_FREE (test_use);
3903
3904 return !fail;
3905 }
3906
3907 \f
3908 /*----------------------------------------------------------------------------
3909 MULTIPLE DEFINITIONS
3910
3911 Find the locations in the function reached by multiple definition sites
3912 for a live pseudo. In and out bitvectors are built for each basic
3913 block. They are restricted for efficiency to live registers.
3914
3915 The gen and kill sets for the problem are obvious. Together they
3916 include all defined registers in a basic block; the gen set includes
3917 registers where a partial or conditional or may-clobber definition is
3918 last in the BB, while the kill set includes registers with a complete
3919 definition coming last. However, the computation of the dataflow
3920 itself is interesting.
3921
3922 The idea behind it comes from SSA form's iterated dominance frontier
3923 criterion for inserting PHI functions. Just like in that case, we can use
3924 the dominance frontier to find places where multiple definitions meet;
3925 a register X defined in a basic block BB1 has multiple definitions in
3926 basic blocks in BB1's dominance frontier.
3927
3928 So, the in-set of a basic block BB2 is not just the union of the
3929 out-sets of BB2's predecessors, but includes some more bits that come
3930 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
3931 the previous paragraph). I called this set the init-set of BB2.
3932
3933 (Note: I actually use the kill-set only to build the init-set.
3934 gen bits are anyway propagated from BB1 to BB2 by dataflow).
3935
3936 For example, if you have
3937
3938 BB1 : r10 = 0
3939 r11 = 0
3940 if <...> goto BB2 else goto BB3;
3941
3942 BB2 : r10 = 1
3943 r12 = 1
3944 goto BB3;
3945
3946 BB3 :
3947
3948 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
3949 init-set of BB3 includes r10 and r12, but not r11. Note that we do
3950 not need to iterate the dominance frontier, because we do not insert
3951 anything like PHI functions there! Instead, dataflow will take care of
3952 propagating the information to BB3's successors.
3953 ---------------------------------------------------------------------------*/
3954
3955 /* Private data used to verify the solution for this problem. */
3956 struct df_md_problem_data
3957 {
3958 /* An obstack for the bitmaps we need for this problem. */
3959 bitmap_obstack md_bitmaps;
3960 };
3961
3962 /* Scratch var used by transfer functions. This is used to do md analysis
3963 only for live registers. */
3964 static bitmap_head df_md_scratch;
3965
3966
3967 static void
3968 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
3969 void *vbb_info)
3970 {
3971 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
3972 if (bb_info)
3973 {
3974 bitmap_clear (&bb_info->kill);
3975 bitmap_clear (&bb_info->gen);
3976 bitmap_clear (&bb_info->init);
3977 bitmap_clear (&bb_info->in);
3978 bitmap_clear (&bb_info->out);
3979 }
3980 }
3981
3982
3983 /* Allocate or reset bitmaps for DF_MD. The solution bits are
3984 not touched unless the block is new. */
3985
3986 static void
3987 df_md_alloc (bitmap all_blocks)
3988 {
3989 unsigned int bb_index;
3990 bitmap_iterator bi;
3991 struct df_md_problem_data *problem_data;
3992
3993 df_grow_bb_info (df_md);
3994 if (df_md->problem_data)
3995 problem_data = (struct df_md_problem_data *) df_md->problem_data;
3996 else
3997 {
3998 problem_data = XNEW (struct df_md_problem_data);
3999 df_md->problem_data = problem_data;
4000 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4001 }
4002 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4003
4004 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4005 {
4006 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4007 /* When bitmaps are already initialized, just clear them. */
4008 if (bb_info->init.obstack)
4009 {
4010 bitmap_clear (&bb_info->init);
4011 bitmap_clear (&bb_info->gen);
4012 bitmap_clear (&bb_info->kill);
4013 bitmap_clear (&bb_info->in);
4014 bitmap_clear (&bb_info->out);
4015 }
4016 else
4017 {
4018 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4019 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4020 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4021 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4022 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4023 }
4024 }
4025
4026 df_md->optional_p = true;
4027 }
4028
4029 /* Add the effect of the top artificial defs of BB to the multiple definitions
4030 bitmap LOCAL_MD. */
4031
4032 void
4033 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4034 {
4035 int bb_index = bb->index;
4036 df_ref def;
4037 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
4038 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4039 {
4040 unsigned int dregno = DF_REF_REGNO (def);
4041 if (DF_REF_FLAGS (def)
4042 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4043 bitmap_set_bit (local_md, dregno);
4044 else
4045 bitmap_clear_bit (local_md, dregno);
4046 }
4047 }
4048
4049
4050 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4051 LOCAL_MD. */
4052
4053 void
4054 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
4055 bitmap local_md)
4056 {
4057 df_ref def;
4058
4059 FOR_EACH_INSN_DEF (def, insn)
4060 {
4061 unsigned int dregno = DF_REF_REGNO (def);
4062 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4063 || (dregno >= FIRST_PSEUDO_REGISTER))
4064 {
4065 if (DF_REF_FLAGS (def)
4066 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4067 bitmap_set_bit (local_md, DF_REF_ID (def));
4068 else
4069 bitmap_clear_bit (local_md, DF_REF_ID (def));
4070 }
4071 }
4072 }
4073
4074 static void
4075 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4076 df_ref def,
4077 int top_flag)
4078 {
4079 bitmap_clear (&seen_in_insn);
4080
4081 for (; def; def = DF_REF_NEXT_LOC (def))
4082 {
4083 unsigned int dregno = DF_REF_REGNO (def);
4084 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4085 || (dregno >= FIRST_PSEUDO_REGISTER))
4086 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4087 {
4088 if (!bitmap_bit_p (&seen_in_insn, dregno))
4089 {
4090 if (DF_REF_FLAGS (def)
4091 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4092 {
4093 bitmap_set_bit (&bb_info->gen, dregno);
4094 bitmap_clear_bit (&bb_info->kill, dregno);
4095 }
4096 else
4097 {
4098 /* When we find a clobber and a regular def,
4099 make sure the regular def wins. */
4100 bitmap_set_bit (&seen_in_insn, dregno);
4101 bitmap_set_bit (&bb_info->kill, dregno);
4102 bitmap_clear_bit (&bb_info->gen, dregno);
4103 }
4104 }
4105 }
4106 }
4107 }
4108
4109
4110 /* Compute local multiple def info for basic block BB. */
4111
4112 static void
4113 df_md_bb_local_compute (unsigned int bb_index)
4114 {
4115 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
4116 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4117 rtx_insn *insn;
4118
4119 /* Artificials are only hard regs. */
4120 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4121 df_md_bb_local_compute_process_def (bb_info,
4122 df_get_artificial_defs (bb_index),
4123 DF_REF_AT_TOP);
4124
4125 FOR_BB_INSNS (bb, insn)
4126 {
4127 unsigned int uid = INSN_UID (insn);
4128 if (!INSN_P (insn))
4129 continue;
4130
4131 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4132 }
4133
4134 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4135 df_md_bb_local_compute_process_def (bb_info,
4136 df_get_artificial_defs (bb_index),
4137 0);
4138 }
4139
4140 /* Compute local reaching def info for each basic block within BLOCKS. */
4141
4142 static void
4143 df_md_local_compute (bitmap all_blocks)
4144 {
4145 unsigned int bb_index, df_bb_index;
4146 bitmap_iterator bi1, bi2;
4147 basic_block bb;
4148 bitmap_head *frontiers;
4149
4150 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4151
4152 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4153 {
4154 df_md_bb_local_compute (bb_index);
4155 }
4156
4157 bitmap_clear (&seen_in_insn);
4158
4159 frontiers = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
4160 FOR_ALL_BB_FN (bb, cfun)
4161 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4162
4163 compute_dominance_frontiers (frontiers);
4164
4165 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4166 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4167 {
4168 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4169 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4170 {
4171 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, df_bb_index);
4172 if (bitmap_bit_p (all_blocks, df_bb_index))
4173 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4174 df_get_live_in (bb));
4175 }
4176 }
4177
4178 FOR_ALL_BB_FN (bb, cfun)
4179 bitmap_clear (&frontiers[bb->index]);
4180 free (frontiers);
4181 }
4182
4183
4184 /* Reset the global solution for recalculation. */
4185
4186 static void
4187 df_md_reset (bitmap all_blocks)
4188 {
4189 unsigned int bb_index;
4190 bitmap_iterator bi;
4191
4192 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4193 {
4194 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4195 gcc_assert (bb_info);
4196 bitmap_clear (&bb_info->in);
4197 bitmap_clear (&bb_info->out);
4198 }
4199 }
4200
4201 static bool
4202 df_md_transfer_function (int bb_index)
4203 {
4204 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
4205 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4206 bitmap in = &bb_info->in;
4207 bitmap out = &bb_info->out;
4208 bitmap gen = &bb_info->gen;
4209 bitmap kill = &bb_info->kill;
4210
4211 /* We need to use a scratch set here so that the value returned from this
4212 function invocation properly reflects whether the sets changed in a
4213 significant way; i.e. not just because the live set was anded in. */
4214 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4215
4216 /* Multiple definitions of a register are not relevant if it is not
4217 live. Thus we trim the result to the places where it is live. */
4218 bitmap_and_into (in, df_get_live_in (bb));
4219
4220 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4221 }
4222
4223 /* Initialize the solution bit vectors for problem. */
4224
4225 static void
4226 df_md_init (bitmap all_blocks)
4227 {
4228 unsigned int bb_index;
4229 bitmap_iterator bi;
4230
4231 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4232 {
4233 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4234
4235 bitmap_copy (&bb_info->in, &bb_info->init);
4236 df_md_transfer_function (bb_index);
4237 }
4238 }
4239
4240 static void
4241 df_md_confluence_0 (basic_block bb)
4242 {
4243 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4244 bitmap_copy (&bb_info->in, &bb_info->init);
4245 }
4246
4247 /* In of target gets or of out of source. */
4248
4249 static bool
4250 df_md_confluence_n (edge e)
4251 {
4252 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4253 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4254
4255 if (e->flags & EDGE_FAKE)
4256 return false;
4257
4258 if (e->flags & EDGE_EH)
4259 return bitmap_ior_and_compl_into (op1, op2,
4260 regs_invalidated_by_call_regset);
4261 else
4262 return bitmap_ior_into (op1, op2);
4263 }
4264
4265 /* Free all storage associated with the problem. */
4266
4267 static void
4268 df_md_free (void)
4269 {
4270 struct df_md_problem_data *problem_data
4271 = (struct df_md_problem_data *) df_md->problem_data;
4272
4273 bitmap_obstack_release (&problem_data->md_bitmaps);
4274 free (problem_data);
4275 df_md->problem_data = NULL;
4276
4277 df_md->block_info_size = 0;
4278 free (df_md->block_info);
4279 df_md->block_info = NULL;
4280 free (df_md);
4281 }
4282
4283
4284 /* Debugging info at top of bb. */
4285
4286 static void
4287 df_md_top_dump (basic_block bb, FILE *file)
4288 {
4289 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4290 if (!bb_info)
4291 return;
4292
4293 fprintf (file, ";; md in \t");
4294 df_print_regset (file, &bb_info->in);
4295 fprintf (file, ";; md init \t");
4296 df_print_regset (file, &bb_info->init);
4297 fprintf (file, ";; md gen \t");
4298 df_print_regset (file, &bb_info->gen);
4299 fprintf (file, ";; md kill \t");
4300 df_print_regset (file, &bb_info->kill);
4301 }
4302
4303 /* Debugging info at bottom of bb. */
4304
4305 static void
4306 df_md_bottom_dump (basic_block bb, FILE *file)
4307 {
4308 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4309 if (!bb_info)
4310 return;
4311
4312 fprintf (file, ";; md out \t");
4313 df_print_regset (file, &bb_info->out);
4314 }
4315
4316 static struct df_problem problem_MD =
4317 {
4318 DF_MD, /* Problem id. */
4319 DF_FORWARD, /* Direction. */
4320 df_md_alloc, /* Allocate the problem specific data. */
4321 df_md_reset, /* Reset global information. */
4322 df_md_free_bb_info, /* Free basic block info. */
4323 df_md_local_compute, /* Local compute function. */
4324 df_md_init, /* Init the solution specific data. */
4325 df_worklist_dataflow, /* Worklist solver. */
4326 df_md_confluence_0, /* Confluence operator 0. */
4327 df_md_confluence_n, /* Confluence operator n. */
4328 df_md_transfer_function, /* Transfer function. */
4329 NULL, /* Finalize function. */
4330 df_md_free, /* Free all of the problem information. */
4331 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4332 NULL, /* Debugging. */
4333 df_md_top_dump, /* Debugging start block. */
4334 df_md_bottom_dump, /* Debugging end block. */
4335 NULL, /* Debugging start insn. */
4336 NULL, /* Debugging end insn. */
4337 NULL, /* Incremental solution verify start. */
4338 NULL, /* Incremental solution verify end. */
4339 NULL, /* Dependent problem. */
4340 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4341 TV_DF_MD, /* Timing variable. */
4342 false /* Reset blocks on dropping out of blocks_to_analyze. */
4343 };
4344
4345 /* Create a new MD instance and add it to the existing instance
4346 of DF. */
4347
4348 void
4349 df_md_add_problem (void)
4350 {
4351 df_add_problem (&problem_MD);
4352 }
4353
4354
4355