re PR tree-optimization/43833 (false warning: array subscript is above array bounds...
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "df.h"
44 #include "except.h"
45 #include "dce.h"
46 #include "vecprim.h"
47
48 /* Note that turning REG_DEAD_DEBUGGING on will cause
49 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
50 addresses in the dumps. */
51 #if 0
52 #define REG_DEAD_DEBUGGING
53 #endif
54
55 #define DF_SPARSE_THRESHOLD 32
56
57 static bitmap seen_in_block = NULL;
58 static bitmap seen_in_insn = NULL;
59
60 \f
61 /*----------------------------------------------------------------------------
62 Public functions access functions for the dataflow problems.
63 ----------------------------------------------------------------------------*/
64 /* Get the live at out set for BB no matter what problem happens to be
65 defined. This function is used by the register allocators who
66 choose different dataflow problems depending on the optimization
67 level. */
68
69 bitmap
70 df_get_live_out (basic_block bb)
71 {
72 gcc_assert (df_lr);
73
74 if (df_live)
75 return DF_LIVE_OUT (bb);
76 else
77 return DF_LR_OUT (bb);
78 }
79
80 /* Get the live at in set for BB no matter what problem happens to be
81 defined. This function is used by the register allocators who
82 choose different dataflow problems depending on the optimization
83 level. */
84
85 bitmap
86 df_get_live_in (basic_block bb)
87 {
88 gcc_assert (df_lr);
89
90 if (df_live)
91 return DF_LIVE_IN (bb);
92 else
93 return DF_LR_IN (bb);
94 }
95
96 /*----------------------------------------------------------------------------
97 Utility functions.
98 ----------------------------------------------------------------------------*/
99
100 /* Generic versions to get the void* version of the block info. Only
101 used inside the problem instance vectors. */
102
103 /* Grow the bb_info array. */
104
105 void
106 df_grow_bb_info (struct dataflow *dflow)
107 {
108 unsigned int new_size = last_basic_block + 1;
109 if (dflow->block_info_size < new_size)
110 {
111 new_size += new_size / 4;
112 dflow->block_info = XRESIZEVEC (void *, dflow->block_info, new_size);
113 memset (dflow->block_info + dflow->block_info_size, 0,
114 (new_size - dflow->block_info_size) *sizeof (void *));
115 dflow->block_info_size = new_size;
116 }
117 }
118
119 /* Dump a def-use or use-def chain for REF to FILE. */
120
121 void
122 df_chain_dump (struct df_link *link, FILE *file)
123 {
124 fprintf (file, "{ ");
125 for (; link; link = link->next)
126 {
127 fprintf (file, "%c%d(bb %d insn %d) ",
128 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
129 DF_REF_ID (link->ref),
130 DF_REF_BBNO (link->ref),
131 DF_REF_IS_ARTIFICIAL (link->ref) ? -1 : DF_REF_INSN_UID (link->ref));
132 }
133 fprintf (file, "}");
134 }
135
136
137 /* Print some basic block info as part of df_dump. */
138
139 void
140 df_print_bb_index (basic_block bb, FILE *file)
141 {
142 edge e;
143 edge_iterator ei;
144
145 fprintf (file, "\n( ");
146 FOR_EACH_EDGE (e, ei, bb->preds)
147 {
148 basic_block pred = e->src;
149 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
150 }
151 fprintf (file, ")->[%d]->( ", bb->index);
152 FOR_EACH_EDGE (e, ei, bb->succs)
153 {
154 basic_block succ = e->dest;
155 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
156 }
157 fprintf (file, ")\n");
158 }
159
160 \f
161 /*----------------------------------------------------------------------------
162 REACHING DEFINITIONS
163
164 Find the locations in the function where each definition site for a
165 pseudo reaches. In and out bitvectors are built for each basic
166 block. The id field in the ref is used to index into these sets.
167 See df.h for details.
168 ----------------------------------------------------------------------------*/
169
170 /* This problem plays a large number of games for the sake of
171 efficiency.
172
173 1) The order of the bits in the bitvectors. After the scanning
174 phase, all of the defs are sorted. All of the defs for the reg 0
175 are first, followed by all defs for reg 1 and so on.
176
177 2) There are two kill sets, one if the number of defs is less or
178 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
179 greater.
180
181 <= : Data is built directly in the kill set.
182
183 > : One level of indirection is used to keep from generating long
184 strings of 1 bits in the kill sets. Bitvectors that are indexed
185 by the regnum are used to represent that there is a killing def
186 for the register. The confluence and transfer functions use
187 these along with the bitmap_clear_range call to remove ranges of
188 bits without actually generating a knockout vector.
189
190 The kill and sparse_kill and the dense_invalidated_by_call and
191 sparse_invalidated_by_call both play this game. */
192
193 /* Private data used to compute the solution for this problem. These
194 data structures are not accessible outside of this module. */
195 struct df_rd_problem_data
196 {
197 /* The set of defs to regs invalidated by call. */
198 bitmap sparse_invalidated_by_call;
199 /* The set of defs to regs invalidate by call for rd. */
200 bitmap dense_invalidated_by_call;
201 /* An obstack for the bitmaps we need for this problem. */
202 bitmap_obstack rd_bitmaps;
203 };
204
205 /* Set basic block info. */
206
207 static void
208 df_rd_set_bb_info (unsigned int index,
209 struct df_rd_bb_info *bb_info)
210 {
211 gcc_assert (df_rd);
212 gcc_assert (index < df_rd->block_info_size);
213 df_rd->block_info[index] = bb_info;
214 }
215
216
217 /* Free basic block info. */
218
219 static void
220 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
221 void *vbb_info)
222 {
223 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
224 if (bb_info)
225 {
226 BITMAP_FREE (bb_info->kill);
227 BITMAP_FREE (bb_info->sparse_kill);
228 BITMAP_FREE (bb_info->gen);
229 BITMAP_FREE (bb_info->in);
230 BITMAP_FREE (bb_info->out);
231 pool_free (df_rd->block_pool, bb_info);
232 }
233 }
234
235
236 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
237 not touched unless the block is new. */
238
239 static void
240 df_rd_alloc (bitmap all_blocks)
241 {
242 unsigned int bb_index;
243 bitmap_iterator bi;
244 struct df_rd_problem_data *problem_data;
245
246 if (!df_rd->block_pool)
247 df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
248 sizeof (struct df_rd_bb_info), 50);
249
250 if (df_rd->problem_data)
251 {
252 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
253 bitmap_clear (problem_data->sparse_invalidated_by_call);
254 bitmap_clear (problem_data->dense_invalidated_by_call);
255 }
256 else
257 {
258 problem_data = XNEW (struct df_rd_problem_data);
259 df_rd->problem_data = problem_data;
260
261 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
262 problem_data->sparse_invalidated_by_call
263 = BITMAP_ALLOC (&problem_data->rd_bitmaps);
264 problem_data->dense_invalidated_by_call
265 = BITMAP_ALLOC (&problem_data->rd_bitmaps);
266 }
267
268 df_grow_bb_info (df_rd);
269
270 /* Because of the clustering of all use sites for the same pseudo,
271 we have to process all of the blocks before doing the
272 analysis. */
273
274 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
275 {
276 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
277 if (bb_info)
278 {
279 bitmap_clear (bb_info->kill);
280 bitmap_clear (bb_info->sparse_kill);
281 bitmap_clear (bb_info->gen);
282 }
283 else
284 {
285 bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool);
286 df_rd_set_bb_info (bb_index, bb_info);
287 bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
288 bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
289 bb_info->gen = BITMAP_ALLOC (&problem_data->rd_bitmaps);
290 bb_info->in = BITMAP_ALLOC (&problem_data->rd_bitmaps);
291 bb_info->out = BITMAP_ALLOC (&problem_data->rd_bitmaps);
292 }
293 }
294 df_rd->optional_p = true;
295 }
296
297
298 /* Add the effect of the top artificial defs of BB to the reaching definitions
299 bitmap LOCAL_RD. */
300
301 void
302 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
303 {
304 int bb_index = bb->index;
305 df_ref *def_rec;
306 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
307 {
308 df_ref def = *def_rec;
309 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
310 {
311 unsigned int dregno = DF_REF_REGNO (def);
312 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
313 bitmap_clear_range (local_rd,
314 DF_DEFS_BEGIN (dregno),
315 DF_DEFS_COUNT (dregno));
316 bitmap_set_bit (local_rd, DF_REF_ID (def));
317 }
318 }
319 }
320
321 /* Add the effect of the defs of INSN to the reaching definitions bitmap
322 LOCAL_RD. */
323
324 void
325 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
326 bitmap local_rd)
327 {
328 unsigned uid = INSN_UID (insn);
329 df_ref *def_rec;
330
331 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
332 {
333 df_ref def = *def_rec;
334 unsigned int dregno = DF_REF_REGNO (def);
335 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
336 || (dregno >= FIRST_PSEUDO_REGISTER))
337 {
338 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
339 bitmap_clear_range (local_rd,
340 DF_DEFS_BEGIN (dregno),
341 DF_DEFS_COUNT (dregno));
342 if (!(DF_REF_FLAGS (def)
343 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
344 bitmap_set_bit (local_rd, DF_REF_ID (def));
345 }
346 }
347 }
348
349 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
350 more complicated than just simulating, because we must produce the
351 gen and kill sets and hence deal with the two possible representations
352 of kill sets. */
353
354 static void
355 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
356 df_ref *def_rec,
357 int top_flag)
358 {
359 while (*def_rec)
360 {
361 df_ref def = *def_rec;
362 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
363 {
364 unsigned int regno = DF_REF_REGNO (def);
365 unsigned int begin = DF_DEFS_BEGIN (regno);
366 unsigned int n_defs = DF_DEFS_COUNT (regno);
367
368 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
369 || (regno >= FIRST_PSEUDO_REGISTER))
370 {
371 /* Only the last def(s) for a regno in the block has any
372 effect. */
373 if (!bitmap_bit_p (seen_in_block, regno))
374 {
375 /* The first def for regno in insn gets to knock out the
376 defs from other instructions. */
377 if ((!bitmap_bit_p (seen_in_insn, regno))
378 /* If the def is to only part of the reg, it does
379 not kill the other defs that reach here. */
380 && (!(DF_REF_FLAGS (def) &
381 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
382 {
383 if (n_defs > DF_SPARSE_THRESHOLD)
384 {
385 bitmap_set_bit (bb_info->sparse_kill, regno);
386 bitmap_clear_range(bb_info->gen, begin, n_defs);
387 }
388 else
389 {
390 bitmap_set_range (bb_info->kill, begin, n_defs);
391 bitmap_clear_range (bb_info->gen, begin, n_defs);
392 }
393 }
394
395 bitmap_set_bit (seen_in_insn, regno);
396 /* All defs for regno in the instruction may be put into
397 the gen set. */
398 if (!(DF_REF_FLAGS (def)
399 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
400 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
401 }
402 }
403 }
404 def_rec++;
405 }
406 }
407
408 /* Compute local reaching def info for basic block BB. */
409
410 static void
411 df_rd_bb_local_compute (unsigned int bb_index)
412 {
413 basic_block bb = BASIC_BLOCK (bb_index);
414 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
415 rtx insn;
416
417 bitmap_clear (seen_in_block);
418 bitmap_clear (seen_in_insn);
419
420 /* Artificials are only hard regs. */
421 if (!(df->changeable_flags & DF_NO_HARD_REGS))
422 df_rd_bb_local_compute_process_def (bb_info,
423 df_get_artificial_defs (bb_index),
424 0);
425
426 FOR_BB_INSNS_REVERSE (bb, insn)
427 {
428 unsigned int uid = INSN_UID (insn);
429
430 if (!INSN_P (insn))
431 continue;
432
433 df_rd_bb_local_compute_process_def (bb_info,
434 DF_INSN_UID_DEFS (uid), 0);
435
436 /* This complex dance with the two bitmaps is required because
437 instructions can assign twice to the same pseudo. This
438 generally happens with calls that will have one def for the
439 result and another def for the clobber. If only one vector
440 is used and the clobber goes first, the result will be
441 lost. */
442 bitmap_ior_into (seen_in_block, seen_in_insn);
443 bitmap_clear (seen_in_insn);
444 }
445
446 /* Process the artificial defs at the top of the block last since we
447 are going backwards through the block and these are logically at
448 the start. */
449 if (!(df->changeable_flags & DF_NO_HARD_REGS))
450 df_rd_bb_local_compute_process_def (bb_info,
451 df_get_artificial_defs (bb_index),
452 DF_REF_AT_TOP);
453 }
454
455
456 /* Compute local reaching def info for each basic block within BLOCKS. */
457
458 static void
459 df_rd_local_compute (bitmap all_blocks)
460 {
461 unsigned int bb_index;
462 bitmap_iterator bi;
463 unsigned int regno;
464 struct df_rd_problem_data *problem_data
465 = (struct df_rd_problem_data *) df_rd->problem_data;
466 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
467 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
468
469 seen_in_block = BITMAP_ALLOC (&df_bitmap_obstack);
470 seen_in_insn = BITMAP_ALLOC (&df_bitmap_obstack);
471
472 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
473
474 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
475 {
476 df_rd_bb_local_compute (bb_index);
477 }
478
479 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
480 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
481 {
482 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
483 bitmap_set_bit (sparse_invalidated, regno);
484 else
485 bitmap_set_range (dense_invalidated,
486 DF_DEFS_BEGIN (regno),
487 DF_DEFS_COUNT (regno));
488 }
489
490 BITMAP_FREE (seen_in_block);
491 BITMAP_FREE (seen_in_insn);
492 }
493
494
495 /* Initialize the solution bit vectors for problem. */
496
497 static void
498 df_rd_init_solution (bitmap all_blocks)
499 {
500 unsigned int bb_index;
501 bitmap_iterator bi;
502
503 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
504 {
505 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
506
507 bitmap_copy (bb_info->out, bb_info->gen);
508 bitmap_clear (bb_info->in);
509 }
510 }
511
512 /* In of target gets or of out of source. */
513
514 static void
515 df_rd_confluence_n (edge e)
516 {
517 bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
518 bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
519
520 if (e->flags & EDGE_FAKE)
521 return;
522
523 if (e->flags & EDGE_EH)
524 {
525 struct df_rd_problem_data *problem_data
526 = (struct df_rd_problem_data *) df_rd->problem_data;
527 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
528 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
529 bitmap_iterator bi;
530 unsigned int regno;
531 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
532
533 bitmap_copy (tmp, op2);
534 bitmap_and_compl_into (tmp, dense_invalidated);
535
536 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
537 {
538 bitmap_clear_range (tmp,
539 DF_DEFS_BEGIN (regno),
540 DF_DEFS_COUNT (regno));
541 }
542 bitmap_ior_into (op1, tmp);
543 BITMAP_FREE (tmp);
544 }
545 else
546 bitmap_ior_into (op1, op2);
547 }
548
549
550 /* Transfer function. */
551
552 static bool
553 df_rd_transfer_function (int bb_index)
554 {
555 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
556 unsigned int regno;
557 bitmap_iterator bi;
558 bitmap in = bb_info->in;
559 bitmap out = bb_info->out;
560 bitmap gen = bb_info->gen;
561 bitmap kill = bb_info->kill;
562 bitmap sparse_kill = bb_info->sparse_kill;
563
564 if (bitmap_empty_p (sparse_kill))
565 return bitmap_ior_and_compl (out, gen, in, kill);
566 else
567 {
568 struct df_rd_problem_data *problem_data;
569 bool changed = false;
570 bitmap tmp;
571
572 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
573 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
574 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
575 tmp = BITMAP_ALLOC (&problem_data->rd_bitmaps);
576
577 bitmap_copy (tmp, in);
578 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
579 {
580 bitmap_clear_range (tmp,
581 DF_DEFS_BEGIN (regno),
582 DF_DEFS_COUNT (regno));
583 }
584 bitmap_and_compl_into (tmp, kill);
585 bitmap_ior_into (tmp, gen);
586 changed = !bitmap_equal_p (tmp, out);
587 if (changed)
588 {
589 BITMAP_FREE (out);
590 bb_info->out = tmp;
591 }
592 else
593 BITMAP_FREE (tmp);
594 return changed;
595 }
596 }
597
598
599 /* Free all storage associated with the problem. */
600
601 static void
602 df_rd_free (void)
603 {
604 struct df_rd_problem_data *problem_data
605 = (struct df_rd_problem_data *) df_rd->problem_data;
606
607 if (problem_data)
608 {
609 free_alloc_pool (df_rd->block_pool);
610 bitmap_obstack_release (&problem_data->rd_bitmaps);
611
612 df_rd->block_info_size = 0;
613 free (df_rd->block_info);
614 free (df_rd->problem_data);
615 }
616 free (df_rd);
617 }
618
619
620 /* Debugging info. */
621
622 static void
623 df_rd_start_dump (FILE *file)
624 {
625 struct df_rd_problem_data *problem_data
626 = (struct df_rd_problem_data *) df_rd->problem_data;
627 unsigned int m = DF_REG_SIZE(df);
628 unsigned int regno;
629
630 if (!df_rd->block_info)
631 return;
632
633 fprintf (file, ";; Reaching defs:\n\n");
634
635 fprintf (file, " sparse invalidated \t");
636 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
637 fprintf (file, " dense invalidated \t");
638 dump_bitmap (file, problem_data->dense_invalidated_by_call);
639
640 for (regno = 0; regno < m; regno++)
641 if (DF_DEFS_COUNT (regno))
642 fprintf (file, "%d[%d,%d] ", regno,
643 DF_DEFS_BEGIN (regno),
644 DF_DEFS_COUNT (regno));
645 fprintf (file, "\n");
646
647 }
648
649
650 /* Debugging info at top of bb. */
651
652 static void
653 df_rd_top_dump (basic_block bb, FILE *file)
654 {
655 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
656 if (!bb_info || !bb_info->in)
657 return;
658
659 fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
660 dump_bitmap (file, bb_info->in);
661 fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
662 dump_bitmap (file, bb_info->gen);
663 fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
664 dump_bitmap (file, bb_info->kill);
665 }
666
667
668 /* Debugging info at top of bb. */
669
670 static void
671 df_rd_bottom_dump (basic_block bb, FILE *file)
672 {
673 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
674 if (!bb_info || !bb_info->out)
675 return;
676
677 fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
678 dump_bitmap (file, bb_info->out);
679 }
680
681 /* All of the information associated with every instance of the problem. */
682
683 static struct df_problem problem_RD =
684 {
685 DF_RD, /* Problem id. */
686 DF_FORWARD, /* Direction. */
687 df_rd_alloc, /* Allocate the problem specific data. */
688 NULL, /* Reset global information. */
689 df_rd_free_bb_info, /* Free basic block info. */
690 df_rd_local_compute, /* Local compute function. */
691 df_rd_init_solution, /* Init the solution specific data. */
692 df_worklist_dataflow, /* Worklist solver. */
693 NULL, /* Confluence operator 0. */
694 df_rd_confluence_n, /* Confluence operator n. */
695 df_rd_transfer_function, /* Transfer function. */
696 NULL, /* Finalize function. */
697 df_rd_free, /* Free all of the problem information. */
698 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
699 df_rd_start_dump, /* Debugging. */
700 df_rd_top_dump, /* Debugging start block. */
701 df_rd_bottom_dump, /* Debugging end block. */
702 NULL, /* Incremental solution verify start. */
703 NULL, /* Incremental solution verify end. */
704 NULL, /* Dependent problem. */
705 TV_DF_RD, /* Timing variable. */
706 true /* Reset blocks on dropping out of blocks_to_analyze. */
707 };
708
709
710
711 /* Create a new RD instance and add it to the existing instance
712 of DF. */
713
714 void
715 df_rd_add_problem (void)
716 {
717 df_add_problem (&problem_RD);
718 }
719
720
721 \f
722 /*----------------------------------------------------------------------------
723 LIVE REGISTERS
724
725 Find the locations in the function where any use of a pseudo can
726 reach in the backwards direction. In and out bitvectors are built
727 for each basic block. The regno is used to index into these sets.
728 See df.h for details.
729 ----------------------------------------------------------------------------*/
730
731 /* Private data used to verify the solution for this problem. */
732 struct df_lr_problem_data
733 {
734 bitmap *in;
735 bitmap *out;
736 };
737
738
739 /* Set basic block info. */
740
741 static void
742 df_lr_set_bb_info (unsigned int index,
743 struct df_lr_bb_info *bb_info)
744 {
745 gcc_assert (df_lr);
746 gcc_assert (index < df_lr->block_info_size);
747 df_lr->block_info[index] = bb_info;
748 }
749
750
751 /* Free basic block info. */
752
753 static void
754 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
755 void *vbb_info)
756 {
757 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
758 if (bb_info)
759 {
760 BITMAP_FREE (bb_info->use);
761 BITMAP_FREE (bb_info->def);
762 BITMAP_FREE (bb_info->in);
763 BITMAP_FREE (bb_info->out);
764 pool_free (df_lr->block_pool, bb_info);
765 }
766 }
767
768
769 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
770 not touched unless the block is new. */
771
772 static void
773 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
774 {
775 unsigned int bb_index;
776 bitmap_iterator bi;
777
778 if (!df_lr->block_pool)
779 df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
780 sizeof (struct df_lr_bb_info), 50);
781
782 df_grow_bb_info (df_lr);
783
784 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
785 {
786 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
787 if (bb_info)
788 {
789 bitmap_clear (bb_info->def);
790 bitmap_clear (bb_info->use);
791 }
792 else
793 {
794 bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool);
795 df_lr_set_bb_info (bb_index, bb_info);
796 bb_info->use = BITMAP_ALLOC (NULL);
797 bb_info->def = BITMAP_ALLOC (NULL);
798 bb_info->in = BITMAP_ALLOC (NULL);
799 bb_info->out = BITMAP_ALLOC (NULL);
800 }
801 }
802
803 df_lr->optional_p = false;
804 }
805
806
807 /* Reset the global solution for recalculation. */
808
809 static void
810 df_lr_reset (bitmap all_blocks)
811 {
812 unsigned int bb_index;
813 bitmap_iterator bi;
814
815 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
816 {
817 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
818 gcc_assert (bb_info);
819 bitmap_clear (bb_info->in);
820 bitmap_clear (bb_info->out);
821 }
822 }
823
824
825 /* Compute local live register info for basic block BB. */
826
827 static void
828 df_lr_bb_local_compute (unsigned int bb_index)
829 {
830 basic_block bb = BASIC_BLOCK (bb_index);
831 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
832 rtx insn;
833 df_ref *def_rec;
834 df_ref *use_rec;
835
836 /* Process the registers set in an exception handler. */
837 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
838 {
839 df_ref def = *def_rec;
840 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
841 {
842 unsigned int dregno = DF_REF_REGNO (def);
843 bitmap_set_bit (bb_info->def, dregno);
844 bitmap_clear_bit (bb_info->use, dregno);
845 }
846 }
847
848 /* Process the hardware registers that are always live. */
849 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
850 {
851 df_ref use = *use_rec;
852 /* Add use to set of uses in this BB. */
853 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
854 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
855 }
856
857 FOR_BB_INSNS_REVERSE (bb, insn)
858 {
859 unsigned int uid = INSN_UID (insn);
860
861 if (!NONDEBUG_INSN_P (insn))
862 continue;
863
864 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
865 {
866 df_ref def = *def_rec;
867 /* If the def is to only part of the reg, it does
868 not kill the other defs that reach here. */
869 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
870 {
871 unsigned int dregno = DF_REF_REGNO (def);
872 bitmap_set_bit (bb_info->def, dregno);
873 bitmap_clear_bit (bb_info->use, dregno);
874 }
875 }
876
877 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
878 {
879 df_ref use = *use_rec;
880 /* Add use to set of uses in this BB. */
881 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
882 }
883 }
884
885 /* Process the registers set in an exception handler or the hard
886 frame pointer if this block is the target of a non local
887 goto. */
888 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
889 {
890 df_ref def = *def_rec;
891 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
892 {
893 unsigned int dregno = DF_REF_REGNO (def);
894 bitmap_set_bit (bb_info->def, dregno);
895 bitmap_clear_bit (bb_info->use, dregno);
896 }
897 }
898
899 #ifdef EH_USES
900 /* Process the uses that are live into an exception handler. */
901 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
902 {
903 df_ref use = *use_rec;
904 /* Add use to set of uses in this BB. */
905 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
906 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
907 }
908 #endif
909
910 /* If the df_live problem is not defined, such as at -O0 and -O1, we
911 still need to keep the luids up to date. This is normally done
912 in the df_live problem since this problem has a forwards
913 scan. */
914 if (!df_live)
915 df_recompute_luids (bb);
916 }
917
918
919 /* Compute local live register info for each basic block within BLOCKS. */
920
921 static void
922 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
923 {
924 unsigned int bb_index;
925 bitmap_iterator bi;
926
927 bitmap_clear (df->hardware_regs_used);
928
929 /* The all-important stack pointer must always be live. */
930 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
931
932 /* Before reload, there are a few registers that must be forced
933 live everywhere -- which might not already be the case for
934 blocks within infinite loops. */
935 if (!reload_completed)
936 {
937 /* Any reference to any pseudo before reload is a potential
938 reference of the frame pointer. */
939 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
940
941 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
942 /* Pseudos with argument area equivalences may require
943 reloading via the argument pointer. */
944 if (fixed_regs[ARG_POINTER_REGNUM])
945 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
946 #endif
947
948 /* Any constant, or pseudo with constant equivalences, may
949 require reloading from memory using the pic register. */
950 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
951 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
952 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
953 }
954
955 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
956 {
957 if (bb_index == EXIT_BLOCK)
958 {
959 /* The exit block is special for this problem and its bits are
960 computed from thin air. */
961 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
962 bitmap_copy (bb_info->use, df->exit_block_uses);
963 }
964 else
965 df_lr_bb_local_compute (bb_index);
966 }
967
968 bitmap_clear (df_lr->out_of_date_transfer_functions);
969 }
970
971
972 /* Initialize the solution vectors. */
973
974 static void
975 df_lr_init (bitmap all_blocks)
976 {
977 unsigned int bb_index;
978 bitmap_iterator bi;
979
980 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
981 {
982 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
983 bitmap_copy (bb_info->in, bb_info->use);
984 bitmap_clear (bb_info->out);
985 }
986 }
987
988
989 /* Confluence function that processes infinite loops. This might be a
990 noreturn function that throws. And even if it isn't, getting the
991 unwind info right helps debugging. */
992 static void
993 df_lr_confluence_0 (basic_block bb)
994 {
995 bitmap op1 = df_lr_get_bb_info (bb->index)->out;
996 if (bb != EXIT_BLOCK_PTR)
997 bitmap_copy (op1, df->hardware_regs_used);
998 }
999
1000
1001 /* Confluence function that ignores fake edges. */
1002
1003 static void
1004 df_lr_confluence_n (edge e)
1005 {
1006 bitmap op1 = df_lr_get_bb_info (e->src->index)->out;
1007 bitmap op2 = df_lr_get_bb_info (e->dest->index)->in;
1008
1009 /* Call-clobbered registers die across exception and call edges. */
1010 /* ??? Abnormal call edges ignored for the moment, as this gets
1011 confused by sibling call edges, which crashes reg-stack. */
1012 if (e->flags & EDGE_EH)
1013 bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
1014 else
1015 bitmap_ior_into (op1, op2);
1016
1017 bitmap_ior_into (op1, df->hardware_regs_used);
1018 }
1019
1020
1021 /* Transfer function. */
1022
1023 static bool
1024 df_lr_transfer_function (int bb_index)
1025 {
1026 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1027 bitmap in = bb_info->in;
1028 bitmap out = bb_info->out;
1029 bitmap use = bb_info->use;
1030 bitmap def = bb_info->def;
1031
1032 return bitmap_ior_and_compl (in, use, out, def);
1033 }
1034
1035
1036 /* Run the fast dce as a side effect of building LR. */
1037
1038 static void
1039 df_lr_finalize (bitmap all_blocks)
1040 {
1041 df_lr->solutions_dirty = false;
1042 if (df->changeable_flags & DF_LR_RUN_DCE)
1043 {
1044 run_fast_df_dce ();
1045
1046 /* If dce deletes some instructions, we need to recompute the lr
1047 solution before proceeding further. The problem is that fast
1048 dce is a pessimestic dataflow algorithm. In the case where
1049 it deletes a statement S inside of a loop, the uses inside of
1050 S may not be deleted from the dataflow solution because they
1051 were carried around the loop. While it is conservatively
1052 correct to leave these extra bits, the standards of df
1053 require that we maintain the best possible (least fixed
1054 point) solution. The only way to do that is to redo the
1055 iteration from the beginning. See PR35805 for an
1056 example. */
1057 if (df_lr->solutions_dirty)
1058 {
1059 df_clear_flags (DF_LR_RUN_DCE);
1060 df_lr_alloc (all_blocks);
1061 df_lr_local_compute (all_blocks);
1062 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1063 df_lr_finalize (all_blocks);
1064 df_set_flags (DF_LR_RUN_DCE);
1065 }
1066 }
1067 }
1068
1069
1070 /* Free all storage associated with the problem. */
1071
1072 static void
1073 df_lr_free (void)
1074 {
1075 if (df_lr->block_info)
1076 {
1077 unsigned int i;
1078 for (i = 0; i < df_lr->block_info_size; i++)
1079 {
1080 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (i);
1081 if (bb_info)
1082 {
1083 BITMAP_FREE (bb_info->use);
1084 BITMAP_FREE (bb_info->def);
1085 BITMAP_FREE (bb_info->in);
1086 BITMAP_FREE (bb_info->out);
1087 }
1088 }
1089 free_alloc_pool (df_lr->block_pool);
1090
1091 df_lr->block_info_size = 0;
1092 free (df_lr->block_info);
1093 }
1094
1095 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1096 free (df_lr);
1097 }
1098
1099
1100 /* Debugging info at top of bb. */
1101
1102 static void
1103 df_lr_top_dump (basic_block bb, FILE *file)
1104 {
1105 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1106 struct df_lr_problem_data *problem_data;
1107 if (!bb_info || !bb_info->in)
1108 return;
1109
1110 fprintf (file, ";; lr in \t");
1111 df_print_regset (file, bb_info->in);
1112 if (df_lr->problem_data)
1113 {
1114 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1115 fprintf (file, ";; old in \t");
1116 df_print_regset (file, problem_data->in[bb->index]);
1117 }
1118 fprintf (file, ";; lr use \t");
1119 df_print_regset (file, bb_info->use);
1120 fprintf (file, ";; lr def \t");
1121 df_print_regset (file, bb_info->def);
1122 }
1123
1124
1125 /* Debugging info at bottom of bb. */
1126
1127 static void
1128 df_lr_bottom_dump (basic_block bb, FILE *file)
1129 {
1130 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1131 struct df_lr_problem_data *problem_data;
1132 if (!bb_info || !bb_info->out)
1133 return;
1134
1135 fprintf (file, ";; lr out \t");
1136 df_print_regset (file, bb_info->out);
1137 if (df_lr->problem_data)
1138 {
1139 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1140 fprintf (file, ";; old out \t");
1141 df_print_regset (file, problem_data->out[bb->index]);
1142 }
1143 }
1144
1145
1146 /* Build the datastructure to verify that the solution to the dataflow
1147 equations is not dirty. */
1148
1149 static void
1150 df_lr_verify_solution_start (void)
1151 {
1152 basic_block bb;
1153 struct df_lr_problem_data *problem_data;
1154 if (df_lr->solutions_dirty)
1155 {
1156 df_lr->problem_data = NULL;
1157 return;
1158 }
1159
1160 /* Set it true so that the solution is recomputed. */
1161 df_lr->solutions_dirty = true;
1162
1163 problem_data = XNEW (struct df_lr_problem_data);
1164 df_lr->problem_data = problem_data;
1165 problem_data->in = XNEWVEC (bitmap, last_basic_block);
1166 problem_data->out = XNEWVEC (bitmap, last_basic_block);
1167
1168 FOR_ALL_BB (bb)
1169 {
1170 problem_data->in[bb->index] = BITMAP_ALLOC (NULL);
1171 problem_data->out[bb->index] = BITMAP_ALLOC (NULL);
1172 bitmap_copy (problem_data->in[bb->index], DF_LR_IN (bb));
1173 bitmap_copy (problem_data->out[bb->index], DF_LR_OUT (bb));
1174 }
1175 }
1176
1177
1178 /* Compare the saved datastructure and the new solution to the dataflow
1179 equations. */
1180
1181 static void
1182 df_lr_verify_solution_end (void)
1183 {
1184 struct df_lr_problem_data *problem_data;
1185 basic_block bb;
1186
1187 if (df_lr->problem_data == NULL)
1188 return;
1189
1190 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1191
1192 if (df_lr->solutions_dirty)
1193 /* Do not check if the solution is still dirty. See the comment
1194 in df_lr_finalize for details. */
1195 df_lr->solutions_dirty = false;
1196 else
1197 FOR_ALL_BB (bb)
1198 {
1199 if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LR_IN (bb)))
1200 || (!bitmap_equal_p (problem_data->out[bb->index], DF_LR_OUT (bb))))
1201 {
1202 /*df_dump (stderr);*/
1203 gcc_unreachable ();
1204 }
1205 }
1206
1207 /* Cannot delete them immediately because you may want to dump them
1208 if the comparison fails. */
1209 FOR_ALL_BB (bb)
1210 {
1211 BITMAP_FREE (problem_data->in[bb->index]);
1212 BITMAP_FREE (problem_data->out[bb->index]);
1213 }
1214
1215 free (problem_data->in);
1216 free (problem_data->out);
1217 free (problem_data);
1218 df_lr->problem_data = NULL;
1219 }
1220
1221
1222 /* All of the information associated with every instance of the problem. */
1223
1224 static struct df_problem problem_LR =
1225 {
1226 DF_LR, /* Problem id. */
1227 DF_BACKWARD, /* Direction. */
1228 df_lr_alloc, /* Allocate the problem specific data. */
1229 df_lr_reset, /* Reset global information. */
1230 df_lr_free_bb_info, /* Free basic block info. */
1231 df_lr_local_compute, /* Local compute function. */
1232 df_lr_init, /* Init the solution specific data. */
1233 df_worklist_dataflow, /* Worklist solver. */
1234 df_lr_confluence_0, /* Confluence operator 0. */
1235 df_lr_confluence_n, /* Confluence operator n. */
1236 df_lr_transfer_function, /* Transfer function. */
1237 df_lr_finalize, /* Finalize function. */
1238 df_lr_free, /* Free all of the problem information. */
1239 NULL, /* Remove this problem from the stack of dataflow problems. */
1240 NULL, /* Debugging. */
1241 df_lr_top_dump, /* Debugging start block. */
1242 df_lr_bottom_dump, /* Debugging end block. */
1243 df_lr_verify_solution_start,/* Incremental solution verify start. */
1244 df_lr_verify_solution_end, /* Incremental solution verify end. */
1245 NULL, /* Dependent problem. */
1246 TV_DF_LR, /* Timing variable. */
1247 false /* Reset blocks on dropping out of blocks_to_analyze. */
1248 };
1249
1250
1251 /* Create a new DATAFLOW instance and add it to an existing instance
1252 of DF. The returned structure is what is used to get at the
1253 solution. */
1254
1255 void
1256 df_lr_add_problem (void)
1257 {
1258 df_add_problem (&problem_LR);
1259 /* These will be initialized when df_scan_blocks processes each
1260 block. */
1261 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1262 }
1263
1264
1265 /* Verify that all of the lr related info is consistent and
1266 correct. */
1267
1268 void
1269 df_lr_verify_transfer_functions (void)
1270 {
1271 basic_block bb;
1272 bitmap saved_def;
1273 bitmap saved_use;
1274 bitmap saved_adef;
1275 bitmap saved_ause;
1276 bitmap all_blocks;
1277
1278 if (!df)
1279 return;
1280
1281 saved_def = BITMAP_ALLOC (NULL);
1282 saved_use = BITMAP_ALLOC (NULL);
1283 saved_adef = BITMAP_ALLOC (NULL);
1284 saved_ause = BITMAP_ALLOC (NULL);
1285 all_blocks = BITMAP_ALLOC (NULL);
1286
1287 FOR_ALL_BB (bb)
1288 {
1289 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1290 bitmap_set_bit (all_blocks, bb->index);
1291
1292 if (bb_info)
1293 {
1294 /* Make a copy of the transfer functions and then compute
1295 new ones to see if the transfer functions have
1296 changed. */
1297 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1298 bb->index))
1299 {
1300 bitmap_copy (saved_def, bb_info->def);
1301 bitmap_copy (saved_use, bb_info->use);
1302 bitmap_clear (bb_info->def);
1303 bitmap_clear (bb_info->use);
1304
1305 df_lr_bb_local_compute (bb->index);
1306 gcc_assert (bitmap_equal_p (saved_def, bb_info->def));
1307 gcc_assert (bitmap_equal_p (saved_use, bb_info->use));
1308 }
1309 }
1310 else
1311 {
1312 /* If we do not have basic block info, the block must be in
1313 the list of dirty blocks or else some one has added a
1314 block behind our backs. */
1315 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1316 bb->index));
1317 }
1318 /* Make sure no one created a block without following
1319 procedures. */
1320 gcc_assert (df_scan_get_bb_info (bb->index));
1321 }
1322
1323 /* Make sure there are no dirty bits in blocks that have been deleted. */
1324 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1325 all_blocks));
1326
1327 BITMAP_FREE (saved_def);
1328 BITMAP_FREE (saved_use);
1329 BITMAP_FREE (saved_adef);
1330 BITMAP_FREE (saved_ause);
1331 BITMAP_FREE (all_blocks);
1332 }
1333
1334
1335 \f
1336 /*----------------------------------------------------------------------------
1337 LIVE AND MUST-INITIALIZED REGISTERS.
1338
1339 This problem first computes the IN and OUT bitvectors for the
1340 must-initialized registers problems, which is a forward problem.
1341 It gives the set of registers for which we MUST have an available
1342 definition on any path from the entry block to the entry/exit of
1343 a basic block. Sets generate a definition, while clobbers kill
1344 a definition.
1345
1346 In and out bitvectors are built for each basic block and are indexed by
1347 regnum (see df.h for details). In and out bitvectors in struct
1348 df_live_bb_info actually refers to the must-initialized problem;
1349
1350 Then, the in and out sets for the LIVE problem itself are computed.
1351 These are the logical AND of the IN and OUT sets from the LR problem
1352 and the must-initialized problem.
1353 ----------------------------------------------------------------------------*/
1354
1355 /* Private data used to verify the solution for this problem. */
1356 struct df_live_problem_data
1357 {
1358 bitmap *in;
1359 bitmap *out;
1360 };
1361
1362 /* Scratch var used by transfer functions. This is used to implement
1363 an optimization to reduce the amount of space used to compute the
1364 combined lr and live analysis. */
1365 static bitmap df_live_scratch;
1366
1367 /* Set basic block info. */
1368
1369 static void
1370 df_live_set_bb_info (unsigned int index,
1371 struct df_live_bb_info *bb_info)
1372 {
1373 gcc_assert (df_live);
1374 gcc_assert (index < df_live->block_info_size);
1375 df_live->block_info[index] = bb_info;
1376 }
1377
1378
1379 /* Free basic block info. */
1380
1381 static void
1382 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1383 void *vbb_info)
1384 {
1385 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1386 if (bb_info)
1387 {
1388 BITMAP_FREE (bb_info->gen);
1389 BITMAP_FREE (bb_info->kill);
1390 BITMAP_FREE (bb_info->in);
1391 BITMAP_FREE (bb_info->out);
1392 pool_free (df_live->block_pool, bb_info);
1393 }
1394 }
1395
1396
1397 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1398 not touched unless the block is new. */
1399
1400 static void
1401 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1402 {
1403 unsigned int bb_index;
1404 bitmap_iterator bi;
1405
1406 if (!df_live->block_pool)
1407 df_live->block_pool = create_alloc_pool ("df_live_block pool",
1408 sizeof (struct df_live_bb_info), 100);
1409 if (!df_live_scratch)
1410 df_live_scratch = BITMAP_ALLOC (NULL);
1411
1412 df_grow_bb_info (df_live);
1413
1414 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1415 {
1416 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1417 if (bb_info)
1418 {
1419 bitmap_clear (bb_info->kill);
1420 bitmap_clear (bb_info->gen);
1421 }
1422 else
1423 {
1424 bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool);
1425 df_live_set_bb_info (bb_index, bb_info);
1426 bb_info->kill = BITMAP_ALLOC (NULL);
1427 bb_info->gen = BITMAP_ALLOC (NULL);
1428 bb_info->in = BITMAP_ALLOC (NULL);
1429 bb_info->out = BITMAP_ALLOC (NULL);
1430 }
1431 }
1432 df_live->optional_p = (optimize <= 1);
1433 }
1434
1435
1436 /* Reset the global solution for recalculation. */
1437
1438 static void
1439 df_live_reset (bitmap all_blocks)
1440 {
1441 unsigned int bb_index;
1442 bitmap_iterator bi;
1443
1444 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1445 {
1446 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1447 gcc_assert (bb_info);
1448 bitmap_clear (bb_info->in);
1449 bitmap_clear (bb_info->out);
1450 }
1451 }
1452
1453
1454 /* Compute local uninitialized register info for basic block BB. */
1455
1456 static void
1457 df_live_bb_local_compute (unsigned int bb_index)
1458 {
1459 basic_block bb = BASIC_BLOCK (bb_index);
1460 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1461 rtx insn;
1462 df_ref *def_rec;
1463 int luid = 0;
1464
1465 FOR_BB_INSNS (bb, insn)
1466 {
1467 unsigned int uid = INSN_UID (insn);
1468 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1469
1470 /* Inserting labels does not always trigger the incremental
1471 rescanning. */
1472 if (!insn_info)
1473 {
1474 gcc_assert (!INSN_P (insn));
1475 insn_info = df_insn_create_insn_record (insn);
1476 }
1477
1478 DF_INSN_INFO_LUID (insn_info) = luid;
1479 if (!INSN_P (insn))
1480 continue;
1481
1482 luid++;
1483 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1484 {
1485 df_ref def = *def_rec;
1486 unsigned int regno = DF_REF_REGNO (def);
1487
1488 if (DF_REF_FLAGS_IS_SET (def,
1489 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1490 /* All partial or conditional def
1491 seen are included in the gen set. */
1492 bitmap_set_bit (bb_info->gen, regno);
1493 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1494 /* Only must clobbers for the entire reg destroy the
1495 value. */
1496 bitmap_set_bit (bb_info->kill, regno);
1497 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1498 bitmap_set_bit (bb_info->gen, regno);
1499 }
1500 }
1501
1502 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1503 {
1504 df_ref def = *def_rec;
1505 bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
1506 }
1507 }
1508
1509
1510 /* Compute local uninitialized register info. */
1511
1512 static void
1513 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1514 {
1515 unsigned int bb_index;
1516 bitmap_iterator bi;
1517
1518 df_grow_insn_info ();
1519
1520 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1521 0, bb_index, bi)
1522 {
1523 df_live_bb_local_compute (bb_index);
1524 }
1525
1526 bitmap_clear (df_live->out_of_date_transfer_functions);
1527 }
1528
1529
1530 /* Initialize the solution vectors. */
1531
1532 static void
1533 df_live_init (bitmap all_blocks)
1534 {
1535 unsigned int bb_index;
1536 bitmap_iterator bi;
1537
1538 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1539 {
1540 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1541 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1542
1543 /* No register may reach a location where it is not used. Thus
1544 we trim the rr result to the places where it is used. */
1545 bitmap_and (bb_info->out, bb_info->gen, bb_lr_info->out);
1546 bitmap_clear (bb_info->in);
1547 }
1548 }
1549
1550 /* Forward confluence function that ignores fake edges. */
1551
1552 static void
1553 df_live_confluence_n (edge e)
1554 {
1555 bitmap op1 = df_live_get_bb_info (e->dest->index)->in;
1556 bitmap op2 = df_live_get_bb_info (e->src->index)->out;
1557
1558 if (e->flags & EDGE_FAKE)
1559 return;
1560
1561 bitmap_ior_into (op1, op2);
1562 }
1563
1564
1565 /* Transfer function for the forwards must-initialized problem. */
1566
1567 static bool
1568 df_live_transfer_function (int bb_index)
1569 {
1570 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1571 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1572 bitmap in = bb_info->in;
1573 bitmap out = bb_info->out;
1574 bitmap gen = bb_info->gen;
1575 bitmap kill = bb_info->kill;
1576
1577 /* We need to use a scratch set here so that the value returned from this
1578 function invocation properly reflects whether the sets changed in a
1579 significant way; i.e. not just because the lr set was anded in. */
1580 bitmap_and (df_live_scratch, gen, bb_lr_info->out);
1581 /* No register may reach a location where it is not used. Thus
1582 we trim the rr result to the places where it is used. */
1583 bitmap_and_into (in, bb_lr_info->in);
1584
1585 return bitmap_ior_and_compl (out, df_live_scratch, in, kill);
1586 }
1587
1588
1589 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1590
1591 static void
1592 df_live_finalize (bitmap all_blocks)
1593 {
1594
1595 if (df_live->solutions_dirty)
1596 {
1597 bitmap_iterator bi;
1598 unsigned int bb_index;
1599
1600 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1601 {
1602 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1603 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1604
1605 /* No register may reach a location where it is not used. Thus
1606 we trim the rr result to the places where it is used. */
1607 bitmap_and_into (bb_live_info->in, bb_lr_info->in);
1608 bitmap_and_into (bb_live_info->out, bb_lr_info->out);
1609 }
1610
1611 df_live->solutions_dirty = false;
1612 }
1613 }
1614
1615
1616 /* Free all storage associated with the problem. */
1617
1618 static void
1619 df_live_free (void)
1620 {
1621 if (df_live->block_info)
1622 {
1623 unsigned int i;
1624
1625 for (i = 0; i < df_live->block_info_size; i++)
1626 {
1627 struct df_live_bb_info *bb_info = df_live_get_bb_info (i);
1628 if (bb_info)
1629 {
1630 BITMAP_FREE (bb_info->gen);
1631 BITMAP_FREE (bb_info->kill);
1632 BITMAP_FREE (bb_info->in);
1633 BITMAP_FREE (bb_info->out);
1634 }
1635 }
1636
1637 free_alloc_pool (df_live->block_pool);
1638 df_live->block_info_size = 0;
1639 free (df_live->block_info);
1640
1641 if (df_live_scratch)
1642 BITMAP_FREE (df_live_scratch);
1643 }
1644 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1645 free (df_live);
1646 }
1647
1648
1649 /* Debugging info at top of bb. */
1650
1651 static void
1652 df_live_top_dump (basic_block bb, FILE *file)
1653 {
1654 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1655 struct df_live_problem_data *problem_data;
1656
1657 if (!bb_info || !bb_info->in)
1658 return;
1659
1660 fprintf (file, ";; live in \t");
1661 df_print_regset (file, bb_info->in);
1662 if (df_live->problem_data)
1663 {
1664 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1665 fprintf (file, ";; old in \t");
1666 df_print_regset (file, problem_data->in[bb->index]);
1667 }
1668 fprintf (file, ";; live gen \t");
1669 df_print_regset (file, bb_info->gen);
1670 fprintf (file, ";; live kill\t");
1671 df_print_regset (file, bb_info->kill);
1672 }
1673
1674
1675 /* Debugging info at bottom of bb. */
1676
1677 static void
1678 df_live_bottom_dump (basic_block bb, FILE *file)
1679 {
1680 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1681 struct df_live_problem_data *problem_data;
1682
1683 if (!bb_info || !bb_info->out)
1684 return;
1685
1686 fprintf (file, ";; live out \t");
1687 df_print_regset (file, bb_info->out);
1688 if (df_live->problem_data)
1689 {
1690 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1691 fprintf (file, ";; old out \t");
1692 df_print_regset (file, problem_data->out[bb->index]);
1693 }
1694 }
1695
1696
1697 /* Build the datastructure to verify that the solution to the dataflow
1698 equations is not dirty. */
1699
1700 static void
1701 df_live_verify_solution_start (void)
1702 {
1703 basic_block bb;
1704 struct df_live_problem_data *problem_data;
1705 if (df_live->solutions_dirty)
1706 {
1707 df_live->problem_data = NULL;
1708 return;
1709 }
1710
1711 /* Set it true so that the solution is recomputed. */
1712 df_live->solutions_dirty = true;
1713
1714 problem_data = XNEW (struct df_live_problem_data);
1715 df_live->problem_data = problem_data;
1716 problem_data->in = XNEWVEC (bitmap, last_basic_block);
1717 problem_data->out = XNEWVEC (bitmap, last_basic_block);
1718
1719 FOR_ALL_BB (bb)
1720 {
1721 problem_data->in[bb->index] = BITMAP_ALLOC (NULL);
1722 problem_data->out[bb->index] = BITMAP_ALLOC (NULL);
1723 bitmap_copy (problem_data->in[bb->index], DF_LIVE_IN (bb));
1724 bitmap_copy (problem_data->out[bb->index], DF_LIVE_OUT (bb));
1725 }
1726 }
1727
1728
1729 /* Compare the saved datastructure and the new solution to the dataflow
1730 equations. */
1731
1732 static void
1733 df_live_verify_solution_end (void)
1734 {
1735 struct df_live_problem_data *problem_data;
1736 basic_block bb;
1737
1738 if (df_live->problem_data == NULL)
1739 return;
1740
1741 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1742
1743 FOR_ALL_BB (bb)
1744 {
1745 if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LIVE_IN (bb)))
1746 || (!bitmap_equal_p (problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1747 {
1748 /*df_dump (stderr);*/
1749 gcc_unreachable ();
1750 }
1751 }
1752
1753 /* Cannot delete them immediately because you may want to dump them
1754 if the comparison fails. */
1755 FOR_ALL_BB (bb)
1756 {
1757 BITMAP_FREE (problem_data->in[bb->index]);
1758 BITMAP_FREE (problem_data->out[bb->index]);
1759 }
1760
1761 free (problem_data->in);
1762 free (problem_data->out);
1763 free (problem_data);
1764 df_live->problem_data = NULL;
1765 }
1766
1767
1768 /* All of the information associated with every instance of the problem. */
1769
1770 static struct df_problem problem_LIVE =
1771 {
1772 DF_LIVE, /* Problem id. */
1773 DF_FORWARD, /* Direction. */
1774 df_live_alloc, /* Allocate the problem specific data. */
1775 df_live_reset, /* Reset global information. */
1776 df_live_free_bb_info, /* Free basic block info. */
1777 df_live_local_compute, /* Local compute function. */
1778 df_live_init, /* Init the solution specific data. */
1779 df_worklist_dataflow, /* Worklist solver. */
1780 NULL, /* Confluence operator 0. */
1781 df_live_confluence_n, /* Confluence operator n. */
1782 df_live_transfer_function, /* Transfer function. */
1783 df_live_finalize, /* Finalize function. */
1784 df_live_free, /* Free all of the problem information. */
1785 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1786 NULL, /* Debugging. */
1787 df_live_top_dump, /* Debugging start block. */
1788 df_live_bottom_dump, /* Debugging end block. */
1789 df_live_verify_solution_start,/* Incremental solution verify start. */
1790 df_live_verify_solution_end, /* Incremental solution verify end. */
1791 &problem_LR, /* Dependent problem. */
1792 TV_DF_LIVE, /* Timing variable. */
1793 false /* Reset blocks on dropping out of blocks_to_analyze. */
1794 };
1795
1796
1797 /* Create a new DATAFLOW instance and add it to an existing instance
1798 of DF. The returned structure is what is used to get at the
1799 solution. */
1800
1801 void
1802 df_live_add_problem (void)
1803 {
1804 df_add_problem (&problem_LIVE);
1805 /* These will be initialized when df_scan_blocks processes each
1806 block. */
1807 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
1808 }
1809
1810
1811 /* Set all of the blocks as dirty. This needs to be done if this
1812 problem is added after all of the insns have been scanned. */
1813
1814 void
1815 df_live_set_all_dirty (void)
1816 {
1817 basic_block bb;
1818 FOR_ALL_BB (bb)
1819 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1820 bb->index);
1821 }
1822
1823
1824 /* Verify that all of the lr related info is consistent and
1825 correct. */
1826
1827 void
1828 df_live_verify_transfer_functions (void)
1829 {
1830 basic_block bb;
1831 bitmap saved_gen;
1832 bitmap saved_kill;
1833 bitmap all_blocks;
1834
1835 if (!df)
1836 return;
1837
1838 saved_gen = BITMAP_ALLOC (NULL);
1839 saved_kill = BITMAP_ALLOC (NULL);
1840 all_blocks = BITMAP_ALLOC (NULL);
1841
1842 df_grow_insn_info ();
1843
1844 FOR_ALL_BB (bb)
1845 {
1846 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1847 bitmap_set_bit (all_blocks, bb->index);
1848
1849 if (bb_info)
1850 {
1851 /* Make a copy of the transfer functions and then compute
1852 new ones to see if the transfer functions have
1853 changed. */
1854 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1855 bb->index))
1856 {
1857 bitmap_copy (saved_gen, bb_info->gen);
1858 bitmap_copy (saved_kill, bb_info->kill);
1859 bitmap_clear (bb_info->gen);
1860 bitmap_clear (bb_info->kill);
1861
1862 df_live_bb_local_compute (bb->index);
1863 gcc_assert (bitmap_equal_p (saved_gen, bb_info->gen));
1864 gcc_assert (bitmap_equal_p (saved_kill, bb_info->kill));
1865 }
1866 }
1867 else
1868 {
1869 /* If we do not have basic block info, the block must be in
1870 the list of dirty blocks or else some one has added a
1871 block behind our backs. */
1872 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1873 bb->index));
1874 }
1875 /* Make sure no one created a block without following
1876 procedures. */
1877 gcc_assert (df_scan_get_bb_info (bb->index));
1878 }
1879
1880 /* Make sure there are no dirty bits in blocks that have been deleted. */
1881 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1882 all_blocks));
1883 BITMAP_FREE (saved_gen);
1884 BITMAP_FREE (saved_kill);
1885 BITMAP_FREE (all_blocks);
1886 }
1887 \f
1888 /*----------------------------------------------------------------------------
1889 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1890
1891 Link either the defs to the uses and / or the uses to the defs.
1892
1893 These problems are set up like the other dataflow problems so that
1894 they nicely fit into the framework. They are much simpler and only
1895 involve a single traversal of instructions and an examination of
1896 the reaching defs information (the dependent problem).
1897 ----------------------------------------------------------------------------*/
1898
1899 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1900
1901 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1902
1903 struct df_link *
1904 df_chain_create (df_ref src, df_ref dst)
1905 {
1906 struct df_link *head = DF_REF_CHAIN (src);
1907 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1908
1909 DF_REF_CHAIN (src) = link;
1910 link->next = head;
1911 link->ref = dst;
1912 return link;
1913 }
1914
1915
1916 /* Delete any du or ud chains that start at REF and point to
1917 TARGET. */
1918 static void
1919 df_chain_unlink_1 (df_ref ref, df_ref target)
1920 {
1921 struct df_link *chain = DF_REF_CHAIN (ref);
1922 struct df_link *prev = NULL;
1923
1924 while (chain)
1925 {
1926 if (chain->ref == target)
1927 {
1928 if (prev)
1929 prev->next = chain->next;
1930 else
1931 DF_REF_CHAIN (ref) = chain->next;
1932 pool_free (df_chain->block_pool, chain);
1933 return;
1934 }
1935 prev = chain;
1936 chain = chain->next;
1937 }
1938 }
1939
1940
1941 /* Delete a du or ud chain that leave or point to REF. */
1942
1943 void
1944 df_chain_unlink (df_ref ref)
1945 {
1946 struct df_link *chain = DF_REF_CHAIN (ref);
1947 while (chain)
1948 {
1949 struct df_link *next = chain->next;
1950 /* Delete the other side if it exists. */
1951 df_chain_unlink_1 (chain->ref, ref);
1952 pool_free (df_chain->block_pool, chain);
1953 chain = next;
1954 }
1955 DF_REF_CHAIN (ref) = NULL;
1956 }
1957
1958
1959 /* Copy the du or ud chain starting at FROM_REF and attach it to
1960 TO_REF. */
1961
1962 void
1963 df_chain_copy (df_ref to_ref,
1964 struct df_link *from_ref)
1965 {
1966 while (from_ref)
1967 {
1968 df_chain_create (to_ref, from_ref->ref);
1969 from_ref = from_ref->next;
1970 }
1971 }
1972
1973
1974 /* Remove this problem from the stack of dataflow problems. */
1975
1976 static void
1977 df_chain_remove_problem (void)
1978 {
1979 bitmap_iterator bi;
1980 unsigned int bb_index;
1981
1982 /* Wholesale destruction of the old chains. */
1983 if (df_chain->block_pool)
1984 free_alloc_pool (df_chain->block_pool);
1985
1986 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1987 {
1988 rtx insn;
1989 df_ref *def_rec;
1990 df_ref *use_rec;
1991 basic_block bb = BASIC_BLOCK (bb_index);
1992
1993 if (df_chain_problem_p (DF_DU_CHAIN))
1994 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1995 DF_REF_CHAIN (*def_rec) = NULL;
1996 if (df_chain_problem_p (DF_UD_CHAIN))
1997 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1998 DF_REF_CHAIN (*use_rec) = NULL;
1999
2000 FOR_BB_INSNS (bb, insn)
2001 {
2002 unsigned int uid = INSN_UID (insn);
2003
2004 if (INSN_P (insn))
2005 {
2006 if (df_chain_problem_p (DF_DU_CHAIN))
2007 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2008 DF_REF_CHAIN (*def_rec) = NULL;
2009 if (df_chain_problem_p (DF_UD_CHAIN))
2010 {
2011 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2012 DF_REF_CHAIN (*use_rec) = NULL;
2013 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
2014 DF_REF_CHAIN (*use_rec) = NULL;
2015 }
2016 }
2017 }
2018 }
2019
2020 bitmap_clear (df_chain->out_of_date_transfer_functions);
2021 df_chain->block_pool = NULL;
2022 }
2023
2024
2025 /* Remove the chain problem completely. */
2026
2027 static void
2028 df_chain_fully_remove_problem (void)
2029 {
2030 df_chain_remove_problem ();
2031 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2032 free (df_chain);
2033 }
2034
2035
2036 /* Create def-use or use-def chains. */
2037
2038 static void
2039 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2040 {
2041 df_chain_remove_problem ();
2042 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
2043 sizeof (struct df_link), 50);
2044 df_chain->optional_p = true;
2045 }
2046
2047
2048 /* Reset all of the chains when the set of basic blocks changes. */
2049
2050 static void
2051 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2052 {
2053 df_chain_remove_problem ();
2054 }
2055
2056
2057 /* Create the chains for a list of USEs. */
2058
2059 static void
2060 df_chain_create_bb_process_use (bitmap local_rd,
2061 df_ref *use_rec,
2062 int top_flag)
2063 {
2064 bitmap_iterator bi;
2065 unsigned int def_index;
2066
2067 while (*use_rec)
2068 {
2069 df_ref use = *use_rec;
2070 unsigned int uregno = DF_REF_REGNO (use);
2071 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2072 || (uregno >= FIRST_PSEUDO_REGISTER))
2073 {
2074 /* Do not want to go through this for an uninitialized var. */
2075 int count = DF_DEFS_COUNT (uregno);
2076 if (count)
2077 {
2078 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2079 {
2080 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2081 unsigned int last_index = first_index + count - 1;
2082
2083 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2084 {
2085 df_ref def;
2086 if (def_index > last_index)
2087 break;
2088
2089 def = DF_DEFS_GET (def_index);
2090 if (df_chain_problem_p (DF_DU_CHAIN))
2091 df_chain_create (def, use);
2092 if (df_chain_problem_p (DF_UD_CHAIN))
2093 df_chain_create (use, def);
2094 }
2095 }
2096 }
2097 }
2098
2099 use_rec++;
2100 }
2101 }
2102
2103
2104 /* Create chains from reaching defs bitmaps for basic block BB. */
2105
2106 static void
2107 df_chain_create_bb (unsigned int bb_index)
2108 {
2109 basic_block bb = BASIC_BLOCK (bb_index);
2110 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2111 rtx insn;
2112 bitmap cpy = BITMAP_ALLOC (NULL);
2113
2114 bitmap_copy (cpy, bb_info->in);
2115 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2116
2117 /* Since we are going forwards, process the artificial uses first
2118 then the artificial defs second. */
2119
2120 #ifdef EH_USES
2121 /* Create the chains for the artificial uses from the EH_USES at the
2122 beginning of the block. */
2123
2124 /* Artificials are only hard regs. */
2125 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2126 df_chain_create_bb_process_use (cpy,
2127 df_get_artificial_uses (bb->index),
2128 DF_REF_AT_TOP);
2129 #endif
2130
2131 df_rd_simulate_artificial_defs_at_top (bb, cpy);
2132
2133 /* Process the regular instructions next. */
2134 FOR_BB_INSNS (bb, insn)
2135 if (INSN_P (insn))
2136 {
2137 unsigned int uid = INSN_UID (insn);
2138
2139 /* First scan the uses and link them up with the defs that remain
2140 in the cpy vector. */
2141 df_chain_create_bb_process_use (cpy, DF_INSN_UID_USES (uid), 0);
2142 if (df->changeable_flags & DF_EQ_NOTES)
2143 df_chain_create_bb_process_use (cpy, DF_INSN_UID_EQ_USES (uid), 0);
2144
2145 /* Since we are going forwards, process the defs second. */
2146 df_rd_simulate_one_insn (bb, insn, cpy);
2147 }
2148
2149 /* Create the chains for the artificial uses of the hard registers
2150 at the end of the block. */
2151 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2152 df_chain_create_bb_process_use (cpy,
2153 df_get_artificial_uses (bb->index),
2154 0);
2155
2156 BITMAP_FREE (cpy);
2157 }
2158
2159 /* Create def-use chains from reaching use bitmaps for basic blocks
2160 in BLOCKS. */
2161
2162 static void
2163 df_chain_finalize (bitmap all_blocks)
2164 {
2165 unsigned int bb_index;
2166 bitmap_iterator bi;
2167
2168 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2169 {
2170 df_chain_create_bb (bb_index);
2171 }
2172 }
2173
2174
2175 /* Free all storage associated with the problem. */
2176
2177 static void
2178 df_chain_free (void)
2179 {
2180 free_alloc_pool (df_chain->block_pool);
2181 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2182 free (df_chain);
2183 }
2184
2185
2186 /* Debugging info. */
2187
2188 static void
2189 df_chain_top_dump (basic_block bb, FILE *file)
2190 {
2191 if (df_chain_problem_p (DF_DU_CHAIN))
2192 {
2193 rtx insn;
2194 df_ref *def_rec = df_get_artificial_defs (bb->index);
2195 if (*def_rec)
2196 {
2197
2198 fprintf (file, ";; DU chains for artificial defs\n");
2199 while (*def_rec)
2200 {
2201 df_ref def = *def_rec;
2202 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2203 df_chain_dump (DF_REF_CHAIN (def), file);
2204 fprintf (file, "\n");
2205 def_rec++;
2206 }
2207 }
2208
2209 FOR_BB_INSNS (bb, insn)
2210 {
2211 if (INSN_P (insn))
2212 {
2213 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2214 def_rec = DF_INSN_INFO_DEFS (insn_info);
2215 if (*def_rec)
2216 {
2217 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2218 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2219
2220 while (*def_rec)
2221 {
2222 df_ref def = *def_rec;
2223 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2224 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2225 fprintf (file, "read/write ");
2226 df_chain_dump (DF_REF_CHAIN (def), file);
2227 fprintf (file, "\n");
2228 def_rec++;
2229 }
2230 }
2231 }
2232 }
2233 }
2234 }
2235
2236
2237 static void
2238 df_chain_bottom_dump (basic_block bb, FILE *file)
2239 {
2240 if (df_chain_problem_p (DF_UD_CHAIN))
2241 {
2242 rtx insn;
2243 df_ref *use_rec = df_get_artificial_uses (bb->index);
2244
2245 if (*use_rec)
2246 {
2247 fprintf (file, ";; UD chains for artificial uses\n");
2248 while (*use_rec)
2249 {
2250 df_ref use = *use_rec;
2251 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2252 df_chain_dump (DF_REF_CHAIN (use), file);
2253 fprintf (file, "\n");
2254 use_rec++;
2255 }
2256 }
2257
2258 FOR_BB_INSNS (bb, insn)
2259 {
2260 if (INSN_P (insn))
2261 {
2262 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2263 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2264 use_rec = DF_INSN_INFO_USES (insn_info);
2265 if (*use_rec || *eq_use_rec)
2266 {
2267 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2268 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2269
2270 while (*use_rec)
2271 {
2272 df_ref use = *use_rec;
2273 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2274 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2275 fprintf (file, "read/write ");
2276 df_chain_dump (DF_REF_CHAIN (use), file);
2277 fprintf (file, "\n");
2278 use_rec++;
2279 }
2280 while (*eq_use_rec)
2281 {
2282 df_ref use = *eq_use_rec;
2283 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2284 df_chain_dump (DF_REF_CHAIN (use), file);
2285 fprintf (file, "\n");
2286 eq_use_rec++;
2287 }
2288 }
2289 }
2290 }
2291 }
2292 }
2293
2294
2295 static struct df_problem problem_CHAIN =
2296 {
2297 DF_CHAIN, /* Problem id. */
2298 DF_NONE, /* Direction. */
2299 df_chain_alloc, /* Allocate the problem specific data. */
2300 df_chain_reset, /* Reset global information. */
2301 NULL, /* Free basic block info. */
2302 NULL, /* Local compute function. */
2303 NULL, /* Init the solution specific data. */
2304 NULL, /* Iterative solver. */
2305 NULL, /* Confluence operator 0. */
2306 NULL, /* Confluence operator n. */
2307 NULL, /* Transfer function. */
2308 df_chain_finalize, /* Finalize function. */
2309 df_chain_free, /* Free all of the problem information. */
2310 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2311 NULL, /* Debugging. */
2312 df_chain_top_dump, /* Debugging start block. */
2313 df_chain_bottom_dump, /* Debugging end block. */
2314 NULL, /* Incremental solution verify start. */
2315 NULL, /* Incremental solution verify end. */
2316 &problem_RD, /* Dependent problem. */
2317 TV_DF_CHAIN, /* Timing variable. */
2318 false /* Reset blocks on dropping out of blocks_to_analyze. */
2319 };
2320
2321
2322 /* Create a new DATAFLOW instance and add it to an existing instance
2323 of DF. The returned structure is what is used to get at the
2324 solution. */
2325
2326 void
2327 df_chain_add_problem (unsigned int chain_flags)
2328 {
2329 df_add_problem (&problem_CHAIN);
2330 df_chain->local_flags = chain_flags;
2331 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2332 }
2333
2334 #undef df_chain_problem_p
2335
2336 \f
2337 /*----------------------------------------------------------------------------
2338 BYTE LEVEL LIVE REGISTERS
2339
2340 Find the locations in the function where any use of a pseudo can
2341 reach in the backwards direction. In and out bitvectors are built
2342 for each basic block. There are two mapping functions,
2343 df_byte_lr_get_regno_start and df_byte_lr_get_regno_len that are
2344 used to map regnos into bit vector positions.
2345
2346 This problem differs from the regular df_lr function in the way
2347 that subregs, *_extracts and strict_low_parts are handled. In lr
2348 these are consider partial kills, here, the exact set of bytes is
2349 modeled. Note that any reg that has none of these operations is
2350 only modeled with a single bit since all operations access the
2351 entire register.
2352
2353 This problem is more brittle that the regular lr. It currently can
2354 be used in dce incrementally, but cannot be used in an environment
2355 where insns are created or modified. The problem is that the
2356 mapping of regnos to bitmap positions is relatively compact, in
2357 that if a pseudo does not do any of the byte wise operations, only
2358 one slot is allocated, rather than a slot for each byte. If insn
2359 are created, where a subreg is used for a reg that had no subregs,
2360 the mapping would be wrong. Likewise, there are no checks to see
2361 that new pseudos have been added. These issues could be addressed
2362 by adding a problem specific flag to not use the compact mapping,
2363 if there was a need to do so.
2364
2365 ----------------------------------------------------------------------------*/
2366
2367 /* Private data used to verify the solution for this problem. */
2368 struct df_byte_lr_problem_data
2369 {
2370 /* Expanded versions of bitvectors used in lr. */
2371 bitmap invalidated_by_call;
2372 bitmap hardware_regs_used;
2373
2374 /* Indexed by regno, this is true if there are subregs, extracts or
2375 strict_low_parts for this regno. */
2376 bitmap needs_expansion;
2377
2378 /* The start position and len for each regno in the various bit
2379 vectors. */
2380 unsigned int* regno_start;
2381 unsigned int* regno_len;
2382 /* An obstack for the bitmaps we need for this problem. */
2383 bitmap_obstack byte_lr_bitmaps;
2384 };
2385
2386
2387 /* Get the starting location for REGNO in the df_byte_lr bitmaps. */
2388
2389 int
2390 df_byte_lr_get_regno_start (unsigned int regno)
2391 {
2392 struct df_byte_lr_problem_data *problem_data
2393 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
2394 return problem_data->regno_start[regno];
2395 }
2396
2397
2398 /* Get the len for REGNO in the df_byte_lr bitmaps. */
2399
2400 int
2401 df_byte_lr_get_regno_len (unsigned int regno)
2402 {
2403 struct df_byte_lr_problem_data *problem_data
2404 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;;
2405 return problem_data->regno_len[regno];
2406 }
2407
2408
2409 /* Set basic block info. */
2410
2411 static void
2412 df_byte_lr_set_bb_info (unsigned int index,
2413 struct df_byte_lr_bb_info *bb_info)
2414 {
2415 gcc_assert (df_byte_lr);
2416 gcc_assert (index < df_byte_lr->block_info_size);
2417 df_byte_lr->block_info[index] = bb_info;
2418 }
2419
2420
2421 /* Free basic block info. */
2422
2423 static void
2424 df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2425 void *vbb_info)
2426 {
2427 struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info;
2428 if (bb_info)
2429 {
2430 BITMAP_FREE (bb_info->use);
2431 BITMAP_FREE (bb_info->def);
2432 BITMAP_FREE (bb_info->in);
2433 BITMAP_FREE (bb_info->out);
2434 pool_free (df_byte_lr->block_pool, bb_info);
2435 }
2436 }
2437
2438
2439 /* Check all of the refs in REF_REC to see if any of them are
2440 extracts, subregs or strict_low_parts. */
2441
2442 static void
2443 df_byte_lr_check_regs (df_ref *ref_rec)
2444 {
2445 struct df_byte_lr_problem_data *problem_data
2446 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2447
2448 for (; *ref_rec; ref_rec++)
2449 {
2450 df_ref ref = *ref_rec;
2451 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT
2452 | DF_REF_ZERO_EXTRACT
2453 | DF_REF_STRICT_LOW_PART)
2454 || GET_CODE (DF_REF_REG (ref)) == SUBREG)
2455 bitmap_set_bit (problem_data->needs_expansion, DF_REF_REGNO (ref));
2456 }
2457 }
2458
2459
2460 /* Expand bitmap SRC which is indexed by regno to DEST which is indexed by
2461 regno_start and regno_len. */
2462
2463 static void
2464 df_byte_lr_expand_bitmap (bitmap dest, bitmap src)
2465 {
2466 struct df_byte_lr_problem_data *problem_data
2467 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2468 bitmap_iterator bi;
2469 unsigned int i;
2470
2471 bitmap_clear (dest);
2472 EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi)
2473 {
2474 bitmap_set_range (dest, problem_data->regno_start[i],
2475 problem_data->regno_len[i]);
2476 }
2477 }
2478
2479
2480 /* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are
2481 not touched unless the block is new. */
2482
2483 static void
2484 df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2485 {
2486 unsigned int bb_index;
2487 bitmap_iterator bi;
2488 basic_block bb;
2489 unsigned int regno;
2490 unsigned int index = 0;
2491 unsigned int max_reg = max_reg_num();
2492 struct df_byte_lr_problem_data *problem_data
2493 = XNEW (struct df_byte_lr_problem_data);
2494
2495 df_byte_lr->problem_data = problem_data;
2496
2497 if (!df_byte_lr->block_pool)
2498 df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool",
2499 sizeof (struct df_byte_lr_bb_info), 50);
2500
2501 df_grow_bb_info (df_byte_lr);
2502
2503 /* Create the mapping from regnos to slots. This does not change
2504 unless the problem is destroyed and recreated. In particular, if
2505 we end up deleting the only insn that used a subreg, we do not
2506 want to redo the mapping because this would invalidate everything
2507 else. */
2508
2509 bitmap_obstack_initialize (&problem_data->byte_lr_bitmaps);
2510 problem_data->regno_start = XNEWVEC (unsigned int, max_reg);
2511 problem_data->regno_len = XNEWVEC (unsigned int, max_reg);
2512 problem_data->hardware_regs_used = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2513 problem_data->invalidated_by_call = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2514 problem_data->needs_expansion = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2515
2516 /* Discover which regno's use subregs, extracts or
2517 strict_low_parts. */
2518 FOR_EACH_BB (bb)
2519 {
2520 rtx insn;
2521 FOR_BB_INSNS (bb, insn)
2522 {
2523 if (INSN_P (insn))
2524 {
2525 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2526 df_byte_lr_check_regs (DF_INSN_INFO_DEFS (insn_info));
2527 df_byte_lr_check_regs (DF_INSN_INFO_USES (insn_info));
2528 }
2529 }
2530 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, bb->index);
2531 }
2532
2533 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2534 bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2535
2536 /* Allocate the slots for each regno. */
2537 for (regno = 0; regno < max_reg; regno++)
2538 {
2539 int len;
2540 problem_data->regno_start[regno] = index;
2541 if (bitmap_bit_p (problem_data->needs_expansion, regno))
2542 len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno]));
2543 else
2544 len = 1;
2545
2546 problem_data->regno_len[regno] = len;
2547 index += len;
2548 }
2549
2550 df_byte_lr_expand_bitmap (problem_data->hardware_regs_used,
2551 df->hardware_regs_used);
2552 df_byte_lr_expand_bitmap (problem_data->invalidated_by_call,
2553 regs_invalidated_by_call_regset);
2554
2555 EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2556 {
2557 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
2558 if (bb_info)
2559 {
2560 bitmap_clear (bb_info->def);
2561 bitmap_clear (bb_info->use);
2562 }
2563 else
2564 {
2565 bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool);
2566 df_byte_lr_set_bb_info (bb_index, bb_info);
2567 bb_info->use = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2568 bb_info->def = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2569 bb_info->in = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2570 bb_info->out = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps);
2571 }
2572 }
2573
2574 df_byte_lr->optional_p = true;
2575 }
2576
2577
2578 /* Reset the global solution for recalculation. */
2579
2580 static void
2581 df_byte_lr_reset (bitmap all_blocks)
2582 {
2583 unsigned int bb_index;
2584 bitmap_iterator bi;
2585
2586 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2587 {
2588 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
2589 gcc_assert (bb_info);
2590 bitmap_clear (bb_info->in);
2591 bitmap_clear (bb_info->out);
2592 }
2593 }
2594
2595
2596 /* Compute local live register info for basic block BB. */
2597
2598 static void
2599 df_byte_lr_bb_local_compute (unsigned int bb_index)
2600 {
2601 struct df_byte_lr_problem_data *problem_data
2602 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2603 basic_block bb = BASIC_BLOCK (bb_index);
2604 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
2605 rtx insn;
2606 df_ref *def_rec;
2607 df_ref *use_rec;
2608
2609 /* Process the registers set in an exception handler. */
2610 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2611 {
2612 df_ref def = *def_rec;
2613 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2614 {
2615 unsigned int dregno = DF_REF_REGNO (def);
2616 unsigned int start = problem_data->regno_start[dregno];
2617 unsigned int len = problem_data->regno_len[dregno];
2618 bitmap_set_range (bb_info->def, start, len);
2619 bitmap_clear_range (bb_info->use, start, len);
2620 }
2621 }
2622
2623 /* Process the hardware registers that are always live. */
2624 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2625 {
2626 df_ref use = *use_rec;
2627 /* Add use to set of uses in this BB. */
2628 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
2629 {
2630 unsigned int uregno = DF_REF_REGNO (use);
2631 unsigned int start = problem_data->regno_start[uregno];
2632 unsigned int len = problem_data->regno_len[uregno];
2633 bitmap_set_range (bb_info->use, start, len);
2634 }
2635 }
2636
2637 FOR_BB_INSNS_REVERSE (bb, insn)
2638 {
2639 unsigned int uid = INSN_UID (insn);
2640
2641 if (!INSN_P (insn))
2642 continue;
2643
2644 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2645 {
2646 df_ref def = *def_rec;
2647 /* If the def is to only part of the reg, it does
2648 not kill the other defs that reach here. */
2649 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2650 {
2651 unsigned int dregno = DF_REF_REGNO (def);
2652 unsigned int start = problem_data->regno_start[dregno];
2653 unsigned int len = problem_data->regno_len[dregno];
2654 unsigned int sb;
2655 unsigned int lb;
2656 if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb))
2657 {
2658 start += sb;
2659 len = lb - sb;
2660 }
2661 if (len)
2662 {
2663 bitmap_set_range (bb_info->def, start, len);
2664 bitmap_clear_range (bb_info->use, start, len);
2665 }
2666 }
2667 }
2668
2669 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2670 {
2671 df_ref use = *use_rec;
2672 unsigned int uregno = DF_REF_REGNO (use);
2673 unsigned int start = problem_data->regno_start[uregno];
2674 unsigned int len = problem_data->regno_len[uregno];
2675 unsigned int sb;
2676 unsigned int lb;
2677 if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb))
2678 {
2679 start += sb;
2680 len = lb - sb;
2681 }
2682 /* Add use to set of uses in this BB. */
2683 if (len)
2684 bitmap_set_range (bb_info->use, start, len);
2685 }
2686 }
2687
2688 /* Process the registers set in an exception handler or the hard
2689 frame pointer if this block is the target of a non local
2690 goto. */
2691 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2692 {
2693 df_ref def = *def_rec;
2694 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2695 {
2696 unsigned int dregno = DF_REF_REGNO (def);
2697 unsigned int start = problem_data->regno_start[dregno];
2698 unsigned int len = problem_data->regno_len[dregno];
2699 bitmap_set_range (bb_info->def, start, len);
2700 bitmap_clear_range (bb_info->use, start, len);
2701 }
2702 }
2703
2704 #ifdef EH_USES
2705 /* Process the uses that are live into an exception handler. */
2706 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2707 {
2708 df_ref use = *use_rec;
2709 /* Add use to set of uses in this BB. */
2710 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
2711 {
2712 unsigned int uregno = DF_REF_REGNO (use);
2713 unsigned int start = problem_data->regno_start[uregno];
2714 unsigned int len = problem_data->regno_len[uregno];
2715 bitmap_set_range (bb_info->use, start, len);
2716 }
2717 }
2718 #endif
2719 }
2720
2721
2722 /* Compute local live register info for each basic block within BLOCKS. */
2723
2724 static void
2725 df_byte_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2726 {
2727 unsigned int bb_index;
2728 bitmap_iterator bi;
2729
2730 EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2731 {
2732 if (bb_index == EXIT_BLOCK)
2733 {
2734 /* The exit block is special for this problem and its bits are
2735 computed from thin air. */
2736 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (EXIT_BLOCK);
2737 df_byte_lr_expand_bitmap (bb_info->use, df->exit_block_uses);
2738 }
2739 else
2740 df_byte_lr_bb_local_compute (bb_index);
2741 }
2742
2743 bitmap_clear (df_byte_lr->out_of_date_transfer_functions);
2744 }
2745
2746
2747 /* Initialize the solution vectors. */
2748
2749 static void
2750 df_byte_lr_init (bitmap all_blocks)
2751 {
2752 unsigned int bb_index;
2753 bitmap_iterator bi;
2754
2755 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2756 {
2757 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
2758 bitmap_copy (bb_info->in, bb_info->use);
2759 bitmap_clear (bb_info->out);
2760 }
2761 }
2762
2763
2764 /* Confluence function that processes infinite loops. This might be a
2765 noreturn function that throws. And even if it isn't, getting the
2766 unwind info right helps debugging. */
2767 static void
2768 df_byte_lr_confluence_0 (basic_block bb)
2769 {
2770 struct df_byte_lr_problem_data *problem_data
2771 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2772 bitmap op1 = df_byte_lr_get_bb_info (bb->index)->out;
2773 if (bb != EXIT_BLOCK_PTR)
2774 bitmap_copy (op1, problem_data->hardware_regs_used);
2775 }
2776
2777
2778 /* Confluence function that ignores fake edges. */
2779
2780 static void
2781 df_byte_lr_confluence_n (edge e)
2782 {
2783 struct df_byte_lr_problem_data *problem_data
2784 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2785 bitmap op1 = df_byte_lr_get_bb_info (e->src->index)->out;
2786 bitmap op2 = df_byte_lr_get_bb_info (e->dest->index)->in;
2787
2788 /* Call-clobbered registers die across exception and call edges. */
2789 /* ??? Abnormal call edges ignored for the moment, as this gets
2790 confused by sibling call edges, which crashes reg-stack. */
2791 if (e->flags & EDGE_EH)
2792 bitmap_ior_and_compl_into (op1, op2, problem_data->invalidated_by_call);
2793 else
2794 bitmap_ior_into (op1, op2);
2795
2796 bitmap_ior_into (op1, problem_data->hardware_regs_used);
2797 }
2798
2799
2800 /* Transfer function. */
2801
2802 static bool
2803 df_byte_lr_transfer_function (int bb_index)
2804 {
2805 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index);
2806 bitmap in = bb_info->in;
2807 bitmap out = bb_info->out;
2808 bitmap use = bb_info->use;
2809 bitmap def = bb_info->def;
2810
2811 return bitmap_ior_and_compl (in, use, out, def);
2812 }
2813
2814
2815 /* Free all storage associated with the problem. */
2816
2817 static void
2818 df_byte_lr_free (void)
2819 {
2820 struct df_byte_lr_problem_data *problem_data
2821 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2822
2823
2824 if (df_byte_lr->block_info)
2825 {
2826 free_alloc_pool (df_byte_lr->block_pool);
2827 df_byte_lr->block_info_size = 0;
2828 free (df_byte_lr->block_info);
2829 }
2830
2831 BITMAP_FREE (df_byte_lr->out_of_date_transfer_functions);
2832 bitmap_obstack_release (&problem_data->byte_lr_bitmaps);
2833 free (problem_data->regno_start);
2834 free (problem_data->regno_len);
2835 free (problem_data);
2836 free (df_byte_lr);
2837 }
2838
2839
2840 /* Debugging info at top of bb. */
2841
2842 static void
2843 df_byte_lr_top_dump (basic_block bb, FILE *file)
2844 {
2845 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
2846 if (!bb_info || !bb_info->in)
2847 return;
2848
2849 fprintf (file, ";; blr in \t");
2850 df_print_byte_regset (file, bb_info->in);
2851 fprintf (file, ";; blr use \t");
2852 df_print_byte_regset (file, bb_info->use);
2853 fprintf (file, ";; blr def \t");
2854 df_print_byte_regset (file, bb_info->def);
2855 }
2856
2857
2858 /* Debugging info at bottom of bb. */
2859
2860 static void
2861 df_byte_lr_bottom_dump (basic_block bb, FILE *file)
2862 {
2863 struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index);
2864 if (!bb_info || !bb_info->out)
2865 return;
2866
2867 fprintf (file, ";; blr out \t");
2868 df_print_byte_regset (file, bb_info->out);
2869 }
2870
2871
2872 /* All of the information associated with every instance of the problem. */
2873
2874 static struct df_problem problem_BYTE_LR =
2875 {
2876 DF_BYTE_LR, /* Problem id. */
2877 DF_BACKWARD, /* Direction. */
2878 df_byte_lr_alloc, /* Allocate the problem specific data. */
2879 df_byte_lr_reset, /* Reset global information. */
2880 df_byte_lr_free_bb_info, /* Free basic block info. */
2881 df_byte_lr_local_compute, /* Local compute function. */
2882 df_byte_lr_init, /* Init the solution specific data. */
2883 df_worklist_dataflow, /* Worklist solver. */
2884 df_byte_lr_confluence_0, /* Confluence operator 0. */
2885 df_byte_lr_confluence_n, /* Confluence operator n. */
2886 df_byte_lr_transfer_function, /* Transfer function. */
2887 NULL, /* Finalize function. */
2888 df_byte_lr_free, /* Free all of the problem information. */
2889 df_byte_lr_free, /* Remove this problem from the stack of dataflow problems. */
2890 NULL, /* Debugging. */
2891 df_byte_lr_top_dump, /* Debugging start block. */
2892 df_byte_lr_bottom_dump, /* Debugging end block. */
2893 NULL, /* Incremental solution verify start. */
2894 NULL, /* Incremental solution verify end. */
2895 NULL, /* Dependent problem. */
2896 TV_DF_BYTE_LR, /* Timing variable. */
2897 false /* Reset blocks on dropping out of blocks_to_analyze. */
2898 };
2899
2900
2901 /* Create a new DATAFLOW instance and add it to an existing instance
2902 of DF. The returned structure is what is used to get at the
2903 solution. */
2904
2905 void
2906 df_byte_lr_add_problem (void)
2907 {
2908 df_add_problem (&problem_BYTE_LR);
2909 /* These will be initialized when df_scan_blocks processes each
2910 block. */
2911 df_byte_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
2912 }
2913
2914
2915 /* Simulate the effects of the defs of INSN on LIVE. */
2916
2917 void
2918 df_byte_lr_simulate_defs (rtx insn, bitmap live)
2919 {
2920 struct df_byte_lr_problem_data *problem_data
2921 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2922 df_ref *def_rec;
2923 unsigned int uid = INSN_UID (insn);
2924
2925 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2926 {
2927 df_ref def = *def_rec;
2928
2929 /* If the def is to only part of the reg, it does
2930 not kill the other defs that reach here. */
2931 if (!(DF_REF_FLAGS (def) & DF_REF_CONDITIONAL))
2932 {
2933 unsigned int dregno = DF_REF_REGNO (def);
2934 unsigned int start = problem_data->regno_start[dregno];
2935 unsigned int len = problem_data->regno_len[dregno];
2936 unsigned int sb;
2937 unsigned int lb;
2938 if (!df_compute_accessed_bytes (def, DF_MM_MUST, &sb, &lb))
2939 {
2940 start += sb;
2941 len = lb - sb;
2942 }
2943
2944 if (len)
2945 bitmap_clear_range (live, start, len);
2946 }
2947 }
2948 }
2949
2950
2951 /* Simulate the effects of the uses of INSN on LIVE. */
2952
2953 void
2954 df_byte_lr_simulate_uses (rtx insn, bitmap live)
2955 {
2956 struct df_byte_lr_problem_data *problem_data
2957 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2958 df_ref *use_rec;
2959 unsigned int uid = INSN_UID (insn);
2960
2961 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2962 {
2963 df_ref use = *use_rec;
2964 unsigned int uregno = DF_REF_REGNO (use);
2965 unsigned int start = problem_data->regno_start[uregno];
2966 unsigned int len = problem_data->regno_len[uregno];
2967 unsigned int sb;
2968 unsigned int lb;
2969
2970 if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb))
2971 {
2972 start += sb;
2973 len = lb - sb;
2974 }
2975
2976 /* Add use to set of uses in this BB. */
2977 if (len)
2978 bitmap_set_range (live, start, len);
2979 }
2980 }
2981
2982
2983 /* Apply the artificial uses and defs at the top of BB in a forwards
2984 direction. */
2985
2986 void
2987 df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
2988 {
2989 struct df_byte_lr_problem_data *problem_data
2990 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
2991 df_ref *def_rec;
2992 #ifdef EH_USES
2993 df_ref *use_rec;
2994 #endif
2995 int bb_index = bb->index;
2996
2997 #ifdef EH_USES
2998 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2999 {
3000 df_ref use = *use_rec;
3001 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3002 {
3003 unsigned int uregno = DF_REF_REGNO (use);
3004 unsigned int start = problem_data->regno_start[uregno];
3005 unsigned int len = problem_data->regno_len[uregno];
3006 bitmap_set_range (live, start, len);
3007 }
3008 }
3009 #endif
3010
3011 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3012 {
3013 df_ref def = *def_rec;
3014 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3015 {
3016 unsigned int dregno = DF_REF_REGNO (def);
3017 unsigned int start = problem_data->regno_start[dregno];
3018 unsigned int len = problem_data->regno_len[dregno];
3019 bitmap_clear_range (live, start, len);
3020 }
3021 }
3022 }
3023
3024
3025 /* Apply the artificial uses and defs at the end of BB in a backwards
3026 direction. */
3027
3028 void
3029 df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
3030 {
3031 struct df_byte_lr_problem_data *problem_data
3032 = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;
3033 df_ref *def_rec;
3034 df_ref *use_rec;
3035 int bb_index = bb->index;
3036
3037 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3038 {
3039 df_ref def = *def_rec;
3040 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3041 {
3042 unsigned int dregno = DF_REF_REGNO (def);
3043 unsigned int start = problem_data->regno_start[dregno];
3044 unsigned int len = problem_data->regno_len[dregno];
3045 bitmap_clear_range (live, start, len);
3046 }
3047 }
3048
3049 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3050 {
3051 df_ref use = *use_rec;
3052 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3053 {
3054 unsigned int uregno = DF_REF_REGNO (use);
3055 unsigned int start = problem_data->regno_start[uregno];
3056 unsigned int len = problem_data->regno_len[uregno];
3057 bitmap_set_range (live, start, len);
3058 }
3059 }
3060 }
3061
3062
3063 \f
3064 /*----------------------------------------------------------------------------
3065 This problem computes REG_DEAD and REG_UNUSED notes.
3066 ----------------------------------------------------------------------------*/
3067
3068 static void
3069 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
3070 {
3071 df_note->optional_p = true;
3072 }
3073
3074 #ifdef REG_DEAD_DEBUGGING
3075 static void
3076 df_print_note (const char *prefix, rtx insn, rtx note)
3077 {
3078 if (dump_file)
3079 {
3080 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
3081 print_rtl (dump_file, note);
3082 fprintf (dump_file, "\n");
3083 }
3084 }
3085 #endif
3086
3087
3088 /* After reg-stack, the x86 floating point stack regs are difficult to
3089 analyze because of all of the pushes, pops and rotations. Thus, we
3090 just leave the notes alone. */
3091
3092 #ifdef STACK_REGS
3093 static inline bool
3094 df_ignore_stack_reg (int regno)
3095 {
3096 return regstack_completed
3097 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
3098 }
3099 #else
3100 static inline bool
3101 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
3102 {
3103 return false;
3104 }
3105 #endif
3106
3107
3108 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add
3109 them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. */
3110
3111 static void
3112 df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes)
3113 {
3114 rtx *pprev = &REG_NOTES (insn);
3115 rtx link = *pprev;
3116 rtx dead = NULL;
3117 rtx unused = NULL;
3118
3119 while (link)
3120 {
3121 switch (REG_NOTE_KIND (link))
3122 {
3123 case REG_DEAD:
3124 /* After reg-stack, we need to ignore any unused notes
3125 for the stack registers. */
3126 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3127 {
3128 pprev = &XEXP (link, 1);
3129 link = *pprev;
3130 }
3131 else
3132 {
3133 rtx next = XEXP (link, 1);
3134 #ifdef REG_DEAD_DEBUGGING
3135 df_print_note ("deleting: ", insn, link);
3136 #endif
3137 XEXP (link, 1) = dead;
3138 dead = link;
3139 *pprev = link = next;
3140 }
3141 break;
3142
3143 case REG_UNUSED:
3144 /* After reg-stack, we need to ignore any unused notes
3145 for the stack registers. */
3146 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3147 {
3148 pprev = &XEXP (link, 1);
3149 link = *pprev;
3150 }
3151 else
3152 {
3153 rtx next = XEXP (link, 1);
3154 #ifdef REG_DEAD_DEBUGGING
3155 df_print_note ("deleting: ", insn, link);
3156 #endif
3157 XEXP (link, 1) = unused;
3158 unused = link;
3159 *pprev = link = next;
3160 }
3161 break;
3162
3163 default:
3164 pprev = &XEXP (link, 1);
3165 link = *pprev;
3166 break;
3167 }
3168 }
3169
3170 *old_dead_notes = dead;
3171 *old_unused_notes = unused;
3172 }
3173
3174
3175 /* Set a NOTE_TYPE note for REG in INSN. Try to pull it from the OLD
3176 list, otherwise create a new one. */
3177
3178 static inline rtx
3179 df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg)
3180 {
3181 rtx curr = old;
3182 rtx prev = NULL;
3183
3184 gcc_assert (!DEBUG_INSN_P (insn));
3185
3186 while (curr)
3187 if (XEXP (curr, 0) == reg)
3188 {
3189 if (prev)
3190 XEXP (prev, 1) = XEXP (curr, 1);
3191 else
3192 old = XEXP (curr, 1);
3193 XEXP (curr, 1) = REG_NOTES (insn);
3194 REG_NOTES (insn) = curr;
3195 return old;
3196 }
3197 else
3198 {
3199 prev = curr;
3200 curr = XEXP (curr, 1);
3201 }
3202
3203 /* Did not find the note. */
3204 add_reg_note (insn, note_type, reg);
3205 return old;
3206 }
3207
3208 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
3209 arguments. Return true if the register value described by MWS's
3210 mw_reg is known to be completely unused, and if mw_reg can therefore
3211 be used in a REG_UNUSED note. */
3212
3213 static bool
3214 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
3215 bitmap live, bitmap artificial_uses)
3216 {
3217 unsigned int r;
3218
3219 /* If MWS describes a partial reference, create REG_UNUSED notes for
3220 individual hard registers. */
3221 if (mws->flags & DF_REF_PARTIAL)
3222 return false;
3223
3224 /* Likewise if some part of the register is used. */
3225 for (r = mws->start_regno; r <= mws->end_regno; r++)
3226 if (bitmap_bit_p (live, r)
3227 || bitmap_bit_p (artificial_uses, r))
3228 return false;
3229
3230 gcc_assert (REG_P (mws->mw_reg));
3231 return true;
3232 }
3233
3234 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3235 based on the bits in LIVE. Do not generate notes for registers in
3236 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
3237 not generated if the reg is both read and written by the
3238 instruction.
3239 */
3240
3241 static rtx
3242 df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
3243 bitmap live, bitmap do_not_gen,
3244 bitmap artificial_uses)
3245 {
3246 unsigned int r;
3247
3248 #ifdef REG_DEAD_DEBUGGING
3249 if (dump_file)
3250 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
3251 mws->start_regno, mws->end_regno);
3252 #endif
3253
3254 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
3255 {
3256 unsigned int regno = mws->start_regno;
3257 old = df_set_note (REG_UNUSED, insn, old, mws->mw_reg);
3258
3259 #ifdef REG_DEAD_DEBUGGING
3260 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3261 #endif
3262 bitmap_set_bit (do_not_gen, regno);
3263 /* Only do this if the value is totally dead. */
3264 }
3265 else
3266 for (r = mws->start_regno; r <= mws->end_regno; r++)
3267 {
3268 if (!bitmap_bit_p (live, r)
3269 && !bitmap_bit_p (artificial_uses, r))
3270 {
3271 old = df_set_note (REG_UNUSED, insn, old, regno_reg_rtx[r]);
3272 #ifdef REG_DEAD_DEBUGGING
3273 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3274 #endif
3275 }
3276 bitmap_set_bit (do_not_gen, r);
3277 }
3278 return old;
3279 }
3280
3281
3282 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
3283 arguments. Return true if the register value described by MWS's
3284 mw_reg is known to be completely dead, and if mw_reg can therefore
3285 be used in a REG_DEAD note. */
3286
3287 static bool
3288 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
3289 bitmap live, bitmap artificial_uses,
3290 bitmap do_not_gen)
3291 {
3292 unsigned int r;
3293
3294 /* If MWS describes a partial reference, create REG_DEAD notes for
3295 individual hard registers. */
3296 if (mws->flags & DF_REF_PARTIAL)
3297 return false;
3298
3299 /* Likewise if some part of the register is not dead. */
3300 for (r = mws->start_regno; r <= mws->end_regno; r++)
3301 if (bitmap_bit_p (live, r)
3302 || bitmap_bit_p (artificial_uses, r)
3303 || bitmap_bit_p (do_not_gen, r))
3304 return false;
3305
3306 gcc_assert (REG_P (mws->mw_reg));
3307 return true;
3308 }
3309
3310 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3311 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3312 from being set if the instruction both reads and writes the
3313 register. */
3314
3315 static rtx
3316 df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
3317 bitmap live, bitmap do_not_gen,
3318 bitmap artificial_uses, bool *added_notes_p)
3319 {
3320 unsigned int r;
3321 bool is_debug = *added_notes_p;
3322
3323 *added_notes_p = false;
3324
3325 #ifdef REG_DEAD_DEBUGGING
3326 if (dump_file)
3327 {
3328 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
3329 mws->start_regno, mws->end_regno);
3330 df_print_regset (dump_file, do_not_gen);
3331 fprintf (dump_file, " live =");
3332 df_print_regset (dump_file, live);
3333 fprintf (dump_file, " artificial uses =");
3334 df_print_regset (dump_file, artificial_uses);
3335 }
3336 #endif
3337
3338 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
3339 {
3340 /* Add a dead note for the entire multi word register. */
3341 if (is_debug)
3342 {
3343 *added_notes_p = true;
3344 return old;
3345 }
3346 old = df_set_note (REG_DEAD, insn, old, mws->mw_reg);
3347 #ifdef REG_DEAD_DEBUGGING
3348 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3349 #endif
3350 }
3351 else
3352 {
3353 for (r = mws->start_regno; r <= mws->end_regno; r++)
3354 if (!bitmap_bit_p (live, r)
3355 && !bitmap_bit_p (artificial_uses, r)
3356 && !bitmap_bit_p (do_not_gen, r))
3357 {
3358 if (is_debug)
3359 {
3360 *added_notes_p = true;
3361 return old;
3362 }
3363 old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]);
3364 #ifdef REG_DEAD_DEBUGGING
3365 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3366 #endif
3367 }
3368 }
3369 return old;
3370 }
3371
3372
3373 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3374 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3375
3376 static rtx
3377 df_create_unused_note (rtx insn, rtx old, df_ref def,
3378 bitmap live, bitmap artificial_uses)
3379 {
3380 unsigned int dregno = DF_REF_REGNO (def);
3381
3382 #ifdef REG_DEAD_DEBUGGING
3383 if (dump_file)
3384 {
3385 fprintf (dump_file, " regular looking at def ");
3386 df_ref_debug (def, dump_file);
3387 }
3388 #endif
3389
3390 if (!(bitmap_bit_p (live, dregno)
3391 || (DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3392 || bitmap_bit_p (artificial_uses, dregno)
3393 || df_ignore_stack_reg (dregno)))
3394 {
3395 rtx reg = (DF_REF_LOC (def))
3396 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3397 old = df_set_note (REG_UNUSED, insn, old, reg);
3398 #ifdef REG_DEAD_DEBUGGING
3399 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3400 #endif
3401 }
3402
3403 return old;
3404 }
3405
3406
3407 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3408 info: lifetime, bb, and number of defs and uses for basic block
3409 BB. The three bitvectors are scratch regs used here. */
3410
3411 static void
3412 df_note_bb_compute (unsigned int bb_index,
3413 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3414 {
3415 basic_block bb = BASIC_BLOCK (bb_index);
3416 rtx insn;
3417 df_ref *def_rec;
3418 df_ref *use_rec;
3419
3420 bitmap_copy (live, df_get_live_out (bb));
3421 bitmap_clear (artificial_uses);
3422
3423 #ifdef REG_DEAD_DEBUGGING
3424 if (dump_file)
3425 {
3426 fprintf (dump_file, "live at bottom ");
3427 df_print_regset (dump_file, live);
3428 }
3429 #endif
3430
3431 /* Process the artificial defs and uses at the bottom of the block
3432 to begin processing. */
3433 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3434 {
3435 df_ref def = *def_rec;
3436 #ifdef REG_DEAD_DEBUGGING
3437 if (dump_file)
3438 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3439 #endif
3440
3441 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3442 bitmap_clear_bit (live, DF_REF_REGNO (def));
3443 }
3444
3445 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3446 {
3447 df_ref use = *use_rec;
3448 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3449 {
3450 unsigned int regno = DF_REF_REGNO (use);
3451 bitmap_set_bit (live, regno);
3452
3453 /* Notes are not generated for any of the artificial registers
3454 at the bottom of the block. */
3455 bitmap_set_bit (artificial_uses, regno);
3456 }
3457 }
3458
3459 #ifdef REG_DEAD_DEBUGGING
3460 if (dump_file)
3461 {
3462 fprintf (dump_file, "live before artificials out ");
3463 df_print_regset (dump_file, live);
3464 }
3465 #endif
3466
3467 FOR_BB_INSNS_REVERSE (bb, insn)
3468 {
3469 unsigned int uid = INSN_UID (insn);
3470 struct df_mw_hardreg **mws_rec;
3471 rtx old_dead_notes;
3472 rtx old_unused_notes;
3473 int debug_insn;
3474
3475 if (!INSN_P (insn))
3476 continue;
3477
3478 debug_insn = DEBUG_INSN_P (insn);
3479
3480 bitmap_clear (do_not_gen);
3481 df_kill_notes (insn, &old_dead_notes, &old_unused_notes);
3482
3483 /* Process the defs. */
3484 if (CALL_P (insn))
3485 {
3486 #ifdef REG_DEAD_DEBUGGING
3487 if (dump_file)
3488 {
3489 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3490 df_print_regset (dump_file, live);
3491 }
3492 #endif
3493 /* We only care about real sets for calls. Clobbers cannot
3494 be depended on to really die. */
3495 mws_rec = DF_INSN_UID_MWS (uid);
3496 while (*mws_rec)
3497 {
3498 struct df_mw_hardreg *mws = *mws_rec;
3499 if ((DF_MWS_REG_DEF_P (mws))
3500 && !df_ignore_stack_reg (mws->start_regno))
3501 old_unused_notes
3502 = df_set_unused_notes_for_mw (insn, old_unused_notes,
3503 mws, live, do_not_gen,
3504 artificial_uses);
3505 mws_rec++;
3506 }
3507
3508 /* All of the defs except the return value are some sort of
3509 clobber. This code is for the return. */
3510 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3511 {
3512 df_ref def = *def_rec;
3513 unsigned int dregno = DF_REF_REGNO (def);
3514 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3515 {
3516 old_unused_notes
3517 = df_create_unused_note (insn, old_unused_notes,
3518 def, live, artificial_uses);
3519 bitmap_set_bit (do_not_gen, dregno);
3520 }
3521
3522 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3523 bitmap_clear_bit (live, dregno);
3524 }
3525 }
3526 else
3527 {
3528 /* Regular insn. */
3529 mws_rec = DF_INSN_UID_MWS (uid);
3530 while (*mws_rec)
3531 {
3532 struct df_mw_hardreg *mws = *mws_rec;
3533 if (DF_MWS_REG_DEF_P (mws))
3534 old_unused_notes
3535 = df_set_unused_notes_for_mw (insn, old_unused_notes,
3536 mws, live, do_not_gen,
3537 artificial_uses);
3538 mws_rec++;
3539 }
3540
3541 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3542 {
3543 df_ref def = *def_rec;
3544 unsigned int dregno = DF_REF_REGNO (def);
3545 old_unused_notes
3546 = df_create_unused_note (insn, old_unused_notes,
3547 def, live, artificial_uses);
3548
3549 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3550 bitmap_set_bit (do_not_gen, dregno);
3551
3552 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3553 bitmap_clear_bit (live, dregno);
3554 }
3555 }
3556
3557 /* Process the uses. */
3558 mws_rec = DF_INSN_UID_MWS (uid);
3559 while (*mws_rec)
3560 {
3561 struct df_mw_hardreg *mws = *mws_rec;
3562 if ((DF_MWS_REG_DEF_P (mws))
3563 && !df_ignore_stack_reg (mws->start_regno))
3564 {
3565 bool really_add_notes = debug_insn != 0;
3566
3567 old_dead_notes
3568 = df_set_dead_notes_for_mw (insn, old_dead_notes,
3569 mws, live, do_not_gen,
3570 artificial_uses,
3571 &really_add_notes);
3572
3573 if (really_add_notes)
3574 debug_insn = -1;
3575 }
3576 mws_rec++;
3577 }
3578
3579 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3580 {
3581 df_ref use = *use_rec;
3582 unsigned int uregno = DF_REF_REGNO (use);
3583
3584 #ifdef REG_DEAD_DEBUGGING
3585 if (dump_file && !debug_insn)
3586 {
3587 fprintf (dump_file, " regular looking at use ");
3588 df_ref_debug (use, dump_file);
3589 }
3590 #endif
3591 if (!bitmap_bit_p (live, uregno))
3592 {
3593 if (debug_insn)
3594 {
3595 debug_insn = -1;
3596 break;
3597 }
3598
3599 if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
3600 && (!bitmap_bit_p (do_not_gen, uregno))
3601 && (!bitmap_bit_p (artificial_uses, uregno))
3602 && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
3603 && (!df_ignore_stack_reg (uregno)))
3604 {
3605 rtx reg = (DF_REF_LOC (use))
3606 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3607 old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg);
3608
3609 #ifdef REG_DEAD_DEBUGGING
3610 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3611 #endif
3612 }
3613 /* This register is now live. */
3614 bitmap_set_bit (live, uregno);
3615 }
3616 }
3617
3618 while (old_unused_notes)
3619 {
3620 rtx next = XEXP (old_unused_notes, 1);
3621 free_EXPR_LIST_node (old_unused_notes);
3622 old_unused_notes = next;
3623 }
3624 while (old_dead_notes)
3625 {
3626 rtx next = XEXP (old_dead_notes, 1);
3627 free_EXPR_LIST_node (old_dead_notes);
3628 old_dead_notes = next;
3629 }
3630
3631 if (debug_insn == -1)
3632 {
3633 /* ??? We could probably do better here, replacing dead
3634 registers with their definitions. */
3635 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3636 df_insn_rescan_debug_internal (insn);
3637 }
3638 }
3639 }
3640
3641
3642 /* Compute register info: lifetime, bb, and number of defs and uses. */
3643 static void
3644 df_note_compute (bitmap all_blocks)
3645 {
3646 unsigned int bb_index;
3647 bitmap_iterator bi;
3648 bitmap live = BITMAP_ALLOC (&df_bitmap_obstack);
3649 bitmap do_not_gen = BITMAP_ALLOC (&df_bitmap_obstack);
3650 bitmap artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
3651
3652 #ifdef REG_DEAD_DEBUGGING
3653 if (dump_file)
3654 print_rtl_with_bb (dump_file, get_insns());
3655 #endif
3656
3657 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3658 {
3659 df_note_bb_compute (bb_index, live, do_not_gen, artificial_uses);
3660 }
3661
3662 BITMAP_FREE (live);
3663 BITMAP_FREE (do_not_gen);
3664 BITMAP_FREE (artificial_uses);
3665 }
3666
3667
3668 /* Free all storage associated with the problem. */
3669
3670 static void
3671 df_note_free (void)
3672 {
3673 free (df_note);
3674 }
3675
3676
3677 /* All of the information associated every instance of the problem. */
3678
3679 static struct df_problem problem_NOTE =
3680 {
3681 DF_NOTE, /* Problem id. */
3682 DF_NONE, /* Direction. */
3683 df_note_alloc, /* Allocate the problem specific data. */
3684 NULL, /* Reset global information. */
3685 NULL, /* Free basic block info. */
3686 df_note_compute, /* Local compute function. */
3687 NULL, /* Init the solution specific data. */
3688 NULL, /* Iterative solver. */
3689 NULL, /* Confluence operator 0. */
3690 NULL, /* Confluence operator n. */
3691 NULL, /* Transfer function. */
3692 NULL, /* Finalize function. */
3693 df_note_free, /* Free all of the problem information. */
3694 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3695 NULL, /* Debugging. */
3696 NULL, /* Debugging start block. */
3697 NULL, /* Debugging end block. */
3698 NULL, /* Incremental solution verify start. */
3699 NULL, /* Incremental solution verify end. */
3700 &problem_LR, /* Dependent problem. */
3701 TV_DF_NOTE, /* Timing variable. */
3702 false /* Reset blocks on dropping out of blocks_to_analyze. */
3703 };
3704
3705
3706 /* Create a new DATAFLOW instance and add it to an existing instance
3707 of DF. The returned structure is what is used to get at the
3708 solution. */
3709
3710 void
3711 df_note_add_problem (void)
3712 {
3713 df_add_problem (&problem_NOTE);
3714 }
3715
3716
3717
3718 \f
3719 /*----------------------------------------------------------------------------
3720 Functions for simulating the effects of single insns.
3721
3722 You can either simulate in the forwards direction, starting from
3723 the top of a block or the backwards direction from the end of the
3724 block. If you go backwards, defs are examined first to clear bits,
3725 then uses are examined to set bits. If you go forwards, defs are
3726 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3727 are examined to clear bits. In either case, the result of examining
3728 a def can be undone (respectively by a use or a REG_UNUSED note).
3729
3730 If you start at the top of the block, use one of DF_LIVE_IN or
3731 DF_LR_IN. If you start at the bottom of the block use one of
3732 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3733 THEY WILL BE DESTROYED.
3734 ----------------------------------------------------------------------------*/
3735
3736
3737 /* Find the set of DEFs for INSN. */
3738
3739 void
3740 df_simulate_find_defs (rtx insn, bitmap defs)
3741 {
3742 df_ref *def_rec;
3743 unsigned int uid = INSN_UID (insn);
3744
3745 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3746 {
3747 df_ref def = *def_rec;
3748 bitmap_set_bit (defs, DF_REF_REGNO (def));
3749 }
3750 }
3751
3752 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3753
3754 void
3755 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3756 {
3757 df_ref *def_rec;
3758 unsigned int uid = INSN_UID (insn);
3759
3760 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3761 {
3762 df_ref def = *def_rec;
3763 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3764 bitmap_set_bit (defs, DF_REF_REGNO (def));
3765 }
3766 }
3767
3768
3769 /* Simulate the effects of the defs of INSN on LIVE. */
3770
3771 void
3772 df_simulate_defs (rtx insn, bitmap live)
3773 {
3774 df_ref *def_rec;
3775 unsigned int uid = INSN_UID (insn);
3776
3777 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3778 {
3779 df_ref def = *def_rec;
3780 unsigned int dregno = DF_REF_REGNO (def);
3781
3782 /* If the def is to only part of the reg, it does
3783 not kill the other defs that reach here. */
3784 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3785 bitmap_clear_bit (live, dregno);
3786 }
3787 }
3788
3789
3790 /* Simulate the effects of the uses of INSN on LIVE. */
3791
3792 void
3793 df_simulate_uses (rtx insn, bitmap live)
3794 {
3795 df_ref *use_rec;
3796 unsigned int uid = INSN_UID (insn);
3797
3798 if (DEBUG_INSN_P (insn))
3799 return;
3800
3801 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3802 {
3803 df_ref use = *use_rec;
3804 /* Add use to set of uses in this BB. */
3805 bitmap_set_bit (live, DF_REF_REGNO (use));
3806 }
3807 }
3808
3809
3810 /* Add back the always live regs in BB to LIVE. */
3811
3812 static inline void
3813 df_simulate_fixup_sets (basic_block bb, bitmap live)
3814 {
3815 /* These regs are considered always live so if they end up dying
3816 because of some def, we need to bring the back again. */
3817 if (bb_has_eh_pred (bb))
3818 bitmap_ior_into (live, df->eh_block_artificial_uses);
3819 else
3820 bitmap_ior_into (live, df->regular_block_artificial_uses);
3821 }
3822
3823
3824 /*----------------------------------------------------------------------------
3825 The following three functions are used only for BACKWARDS scanning:
3826 i.e. they process the defs before the uses.
3827
3828 df_simulate_initialize_backwards should be called first with a
3829 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3830 df_simulate_one_insn_backwards should be called for each insn in
3831 the block, starting with the last one. Finally,
3832 df_simulate_finalize_backwards can be called to get a new value
3833 of the sets at the top of the block (this is rarely used).
3834 ----------------------------------------------------------------------------*/
3835
3836 /* Apply the artificial uses and defs at the end of BB in a backwards
3837 direction. */
3838
3839 void
3840 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3841 {
3842 df_ref *def_rec;
3843 df_ref *use_rec;
3844 int bb_index = bb->index;
3845
3846 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3847 {
3848 df_ref def = *def_rec;
3849 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3850 bitmap_clear_bit (live, DF_REF_REGNO (def));
3851 }
3852
3853 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3854 {
3855 df_ref use = *use_rec;
3856 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3857 bitmap_set_bit (live, DF_REF_REGNO (use));
3858 }
3859 }
3860
3861
3862 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3863
3864 void
3865 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3866 {
3867 if (!NONDEBUG_INSN_P (insn))
3868 return;
3869
3870 df_simulate_defs (insn, live);
3871 df_simulate_uses (insn, live);
3872 df_simulate_fixup_sets (bb, live);
3873 }
3874
3875
3876 /* Apply the artificial uses and defs at the top of BB in a backwards
3877 direction. */
3878
3879 void
3880 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3881 {
3882 df_ref *def_rec;
3883 #ifdef EH_USES
3884 df_ref *use_rec;
3885 #endif
3886 int bb_index = bb->index;
3887
3888 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3889 {
3890 df_ref def = *def_rec;
3891 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3892 bitmap_clear_bit (live, DF_REF_REGNO (def));
3893 }
3894
3895 #ifdef EH_USES
3896 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3897 {
3898 df_ref use = *use_rec;
3899 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3900 bitmap_set_bit (live, DF_REF_REGNO (use));
3901 }
3902 #endif
3903 }
3904 /*----------------------------------------------------------------------------
3905 The following three functions are used only for FORWARDS scanning:
3906 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3907 Thus it is important to add the DF_NOTES problem to the stack of
3908 problems computed before using these functions.
3909
3910 df_simulate_initialize_forwards should be called first with a
3911 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3912 df_simulate_one_insn_forwards should be called for each insn in
3913 the block, starting with the first one.
3914 ----------------------------------------------------------------------------*/
3915
3916 /* Apply the artificial uses and defs at the top of BB in a forwards
3917 direction. ??? This is wrong; defs mark the point where a pseudo
3918 becomes live when scanning forwards (unless a def is unused). Since
3919 there are no REG_UNUSED notes for artificial defs, passes that
3920 require artificial defs probably should not call this function
3921 unless (as is the case for fwprop) they are correct when liveness
3922 bitmaps are *under*estimated. */
3923
3924 void
3925 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3926 {
3927 df_ref *def_rec;
3928 int bb_index = bb->index;
3929
3930 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3931 {
3932 df_ref def = *def_rec;
3933 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3934 bitmap_clear_bit (live, DF_REF_REGNO (def));
3935 }
3936 }
3937
3938 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3939
3940 void
3941 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3942 {
3943 rtx link;
3944 if (! INSN_P (insn))
3945 return;
3946
3947 /* Make sure that DF_NOTE really is an active df problem. */
3948 gcc_assert (df_note);
3949
3950 /* Note that this is the opposite as how the problem is defined, because
3951 in the LR problem defs _kill_ liveness. However, they do so backwards,
3952 while here the scan is performed forwards! So, first assume that the
3953 def is live, and if this is not true REG_UNUSED notes will rectify the
3954 situation. */
3955 df_simulate_find_noclobber_defs (insn, live);
3956
3957 /* Clear all of the registers that go dead. */
3958 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3959 {
3960 switch (REG_NOTE_KIND (link))
3961 {
3962 case REG_DEAD:
3963 case REG_UNUSED:
3964 {
3965 rtx reg = XEXP (link, 0);
3966 int regno = REGNO (reg);
3967 if (regno < FIRST_PSEUDO_REGISTER)
3968 {
3969 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3970 while (--n >= 0)
3971 bitmap_clear_bit (live, regno + n);
3972 }
3973 else
3974 bitmap_clear_bit (live, regno);
3975 }
3976 break;
3977 default:
3978 break;
3979 }
3980 }
3981 df_simulate_fixup_sets (bb, live);
3982 }
3983
3984
3985 \f
3986 /*----------------------------------------------------------------------------
3987 MULTIPLE DEFINITIONS
3988
3989 Find the locations in the function reached by multiple definition sites
3990 for a live pseudo. In and out bitvectors are built for each basic
3991 block. They are restricted for efficiency to live registers.
3992
3993 The gen and kill sets for the problem are obvious. Together they
3994 include all defined registers in a basic block; the gen set includes
3995 registers where a partial or conditional or may-clobber definition is
3996 last in the BB, while the kill set includes registers with a complete
3997 definition coming last. However, the computation of the dataflow
3998 itself is interesting.
3999
4000 The idea behind it comes from SSA form's iterated dominance frontier
4001 criterion for inserting PHI functions. Just like in that case, we can use
4002 the dominance frontier to find places where multiple definitions meet;
4003 a register X defined in a basic block BB1 has multiple definitions in
4004 basic blocks in BB1's dominance frontier.
4005
4006 So, the in-set of a basic block BB2 is not just the union of the
4007 out-sets of BB2's predecessors, but includes some more bits that come
4008 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4009 the previous paragraph). I called this set the init-set of BB2.
4010
4011 (Note: I actually use the kill-set only to build the init-set.
4012 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4013
4014 For example, if you have
4015
4016 BB1 : r10 = 0
4017 r11 = 0
4018 if <...> goto BB2 else goto BB3;
4019
4020 BB2 : r10 = 1
4021 r12 = 1
4022 goto BB3;
4023
4024 BB3 :
4025
4026 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4027 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4028 not need to iterate the dominance frontier, because we do not insert
4029 anything like PHI functions there! Instead, dataflow will take care of
4030 propagating the information to BB3's successors.
4031 ---------------------------------------------------------------------------*/
4032
4033 /* Scratch var used by transfer functions. This is used to do md analysis
4034 only for live registers. */
4035 static bitmap df_md_scratch;
4036
4037 /* Set basic block info. */
4038
4039 static void
4040 df_md_set_bb_info (unsigned int index,
4041 struct df_md_bb_info *bb_info)
4042 {
4043 gcc_assert (df_md);
4044 gcc_assert (index < df_md->block_info_size);
4045 df_md->block_info[index] = bb_info;
4046 }
4047
4048
4049 static void
4050 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4051 void *vbb_info)
4052 {
4053 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4054 if (bb_info)
4055 {
4056 BITMAP_FREE (bb_info->kill);
4057 BITMAP_FREE (bb_info->gen);
4058 BITMAP_FREE (bb_info->init);
4059 BITMAP_FREE (bb_info->in);
4060 BITMAP_FREE (bb_info->out);
4061 pool_free (df_md->block_pool, bb_info);
4062 }
4063 }
4064
4065
4066 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4067 not touched unless the block is new. */
4068
4069 static void
4070 df_md_alloc (bitmap all_blocks)
4071 {
4072 unsigned int bb_index;
4073 bitmap_iterator bi;
4074
4075 if (!df_md->block_pool)
4076 df_md->block_pool = create_alloc_pool ("df_md_block pool",
4077 sizeof (struct df_md_bb_info), 50);
4078
4079 df_grow_bb_info (df_md);
4080 df_md_scratch = BITMAP_ALLOC (NULL);
4081
4082 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4083 {
4084 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4085 if (bb_info)
4086 {
4087 bitmap_clear (bb_info->init);
4088 bitmap_clear (bb_info->gen);
4089 bitmap_clear (bb_info->kill);
4090 bitmap_clear (bb_info->in);
4091 bitmap_clear (bb_info->out);
4092 }
4093 else
4094 {
4095 bb_info = (struct df_md_bb_info *) pool_alloc (df_md->block_pool);
4096 df_md_set_bb_info (bb_index, bb_info);
4097 bb_info->init = BITMAP_ALLOC (NULL);
4098 bb_info->gen = BITMAP_ALLOC (NULL);
4099 bb_info->kill = BITMAP_ALLOC (NULL);
4100 bb_info->in = BITMAP_ALLOC (NULL);
4101 bb_info->out = BITMAP_ALLOC (NULL);
4102 }
4103 }
4104
4105 df_md->optional_p = true;
4106 }
4107
4108 /* Add the effect of the top artificial defs of BB to the multiple definitions
4109 bitmap LOCAL_MD. */
4110
4111 void
4112 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4113 {
4114 int bb_index = bb->index;
4115 df_ref *def_rec;
4116 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4117 {
4118 df_ref def = *def_rec;
4119 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4120 {
4121 unsigned int dregno = DF_REF_REGNO (def);
4122 if (DF_REF_FLAGS (def)
4123 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4124 bitmap_set_bit (local_md, dregno);
4125 else
4126 bitmap_clear_bit (local_md, dregno);
4127 }
4128 }
4129 }
4130
4131
4132 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4133 LOCAL_MD. */
4134
4135 void
4136 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4137 bitmap local_md)
4138 {
4139 unsigned uid = INSN_UID (insn);
4140 df_ref *def_rec;
4141
4142 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4143 {
4144 df_ref def = *def_rec;
4145 unsigned int dregno = DF_REF_REGNO (def);
4146 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4147 || (dregno >= FIRST_PSEUDO_REGISTER))
4148 {
4149 if (DF_REF_FLAGS (def)
4150 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4151 bitmap_set_bit (local_md, DF_REF_ID (def));
4152 else
4153 bitmap_clear_bit (local_md, DF_REF_ID (def));
4154 }
4155 }
4156 }
4157
4158 static void
4159 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4160 df_ref *def_rec,
4161 int top_flag)
4162 {
4163 df_ref def;
4164 bitmap_clear (seen_in_insn);
4165
4166 while ((def = *def_rec++) != NULL)
4167 {
4168 unsigned int dregno = DF_REF_REGNO (def);
4169 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4170 || (dregno >= FIRST_PSEUDO_REGISTER))
4171 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4172 {
4173 if (!bitmap_bit_p (seen_in_insn, dregno))
4174 {
4175 if (DF_REF_FLAGS (def)
4176 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4177 {
4178 bitmap_set_bit (bb_info->gen, dregno);
4179 bitmap_clear_bit (bb_info->kill, dregno);
4180 }
4181 else
4182 {
4183 /* When we find a clobber and a regular def,
4184 make sure the regular def wins. */
4185 bitmap_set_bit (seen_in_insn, dregno);
4186 bitmap_set_bit (bb_info->kill, dregno);
4187 bitmap_clear_bit (bb_info->gen, dregno);
4188 }
4189 }
4190 }
4191 }
4192 }
4193
4194
4195 /* Compute local multiple def info for basic block BB. */
4196
4197 static void
4198 df_md_bb_local_compute (unsigned int bb_index)
4199 {
4200 basic_block bb = BASIC_BLOCK (bb_index);
4201 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4202 rtx insn;
4203
4204 /* Artificials are only hard regs. */
4205 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4206 df_md_bb_local_compute_process_def (bb_info,
4207 df_get_artificial_defs (bb_index),
4208 DF_REF_AT_TOP);
4209
4210 FOR_BB_INSNS (bb, insn)
4211 {
4212 unsigned int uid = INSN_UID (insn);
4213 if (!INSN_P (insn))
4214 continue;
4215
4216 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4217 }
4218
4219 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4220 df_md_bb_local_compute_process_def (bb_info,
4221 df_get_artificial_defs (bb_index),
4222 0);
4223 }
4224
4225 /* Compute local reaching def info for each basic block within BLOCKS. */
4226
4227 static void
4228 df_md_local_compute (bitmap all_blocks)
4229 {
4230 unsigned int bb_index, df_bb_index;
4231 bitmap_iterator bi1, bi2;
4232 basic_block bb;
4233 bitmap *frontiers;
4234
4235 seen_in_insn = BITMAP_ALLOC (NULL);
4236
4237 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4238 {
4239 df_md_bb_local_compute (bb_index);
4240 }
4241
4242 BITMAP_FREE (seen_in_insn);
4243
4244 frontiers = XNEWVEC (bitmap, last_basic_block);
4245 FOR_ALL_BB (bb)
4246 frontiers[bb->index] = BITMAP_ALLOC (NULL);
4247
4248 compute_dominance_frontiers (frontiers);
4249
4250 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4251 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4252 {
4253 bitmap kill = df_md_get_bb_info (bb_index)->kill;
4254 EXECUTE_IF_SET_IN_BITMAP (frontiers[bb_index], 0, df_bb_index, bi2)
4255 {
4256 basic_block bb = BASIC_BLOCK (df_bb_index);
4257 if (bitmap_bit_p (all_blocks, df_bb_index))
4258 bitmap_ior_and_into (df_md_get_bb_info (df_bb_index)->init, kill,
4259 df_get_live_in (bb));
4260 }
4261 }
4262
4263 FOR_ALL_BB (bb)
4264 BITMAP_FREE (frontiers[bb->index]);
4265 free (frontiers);
4266 }
4267
4268
4269 /* Reset the global solution for recalculation. */
4270
4271 static void
4272 df_md_reset (bitmap all_blocks)
4273 {
4274 unsigned int bb_index;
4275 bitmap_iterator bi;
4276
4277 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4278 {
4279 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4280 gcc_assert (bb_info);
4281 bitmap_clear (bb_info->in);
4282 bitmap_clear (bb_info->out);
4283 }
4284 }
4285
4286 static bool
4287 df_md_transfer_function (int bb_index)
4288 {
4289 basic_block bb = BASIC_BLOCK (bb_index);
4290 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4291 bitmap in = bb_info->in;
4292 bitmap out = bb_info->out;
4293 bitmap gen = bb_info->gen;
4294 bitmap kill = bb_info->kill;
4295
4296 /* We need to use a scratch set here so that the value returned from this
4297 function invocation properly reflects whether the sets changed in a
4298 significant way; i.e. not just because the live set was anded in. */
4299 bitmap_and (df_md_scratch, gen, df_get_live_out (bb));
4300
4301 /* Multiple definitions of a register are not relevant if it is not
4302 live. Thus we trim the result to the places where it is live. */
4303 bitmap_and_into (in, df_get_live_in (bb));
4304
4305 return bitmap_ior_and_compl (out, df_md_scratch, in, kill);
4306 }
4307
4308 /* Initialize the solution bit vectors for problem. */
4309
4310 static void
4311 df_md_init (bitmap all_blocks)
4312 {
4313 unsigned int bb_index;
4314 bitmap_iterator bi;
4315
4316 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4317 {
4318 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4319
4320 bitmap_copy (bb_info->in, bb_info->init);
4321 df_md_transfer_function (bb_index);
4322 }
4323 }
4324
4325 static void
4326 df_md_confluence_0 (basic_block bb)
4327 {
4328 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4329 bitmap_copy (bb_info->in, bb_info->init);
4330 }
4331
4332 /* In of target gets or of out of source. */
4333
4334 static void
4335 df_md_confluence_n (edge e)
4336 {
4337 bitmap op1 = df_md_get_bb_info (e->dest->index)->in;
4338 bitmap op2 = df_md_get_bb_info (e->src->index)->out;
4339
4340 if (e->flags & EDGE_FAKE)
4341 return;
4342
4343 if (e->flags & EDGE_EH)
4344 bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
4345 else
4346 bitmap_ior_into (op1, op2);
4347 }
4348
4349 /* Free all storage associated with the problem. */
4350
4351 static void
4352 df_md_free (void)
4353 {
4354 unsigned int i;
4355 for (i = 0; i < df_md->block_info_size; i++)
4356 {
4357 struct df_md_bb_info *bb_info = df_md_get_bb_info (i);
4358 if (bb_info)
4359 {
4360 BITMAP_FREE (bb_info->kill);
4361 BITMAP_FREE (bb_info->gen);
4362 BITMAP_FREE (bb_info->init);
4363 BITMAP_FREE (bb_info->in);
4364 BITMAP_FREE (bb_info->out);
4365 }
4366 }
4367
4368 BITMAP_FREE (df_md_scratch);
4369 free_alloc_pool (df_md->block_pool);
4370
4371 df_md->block_info_size = 0;
4372 free (df_md->block_info);
4373 free (df_md);
4374 }
4375
4376
4377 /* Debugging info at top of bb. */
4378
4379 static void
4380 df_md_top_dump (basic_block bb, FILE *file)
4381 {
4382 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4383 if (!bb_info || !bb_info->in)
4384 return;
4385
4386 fprintf (file, ";; md in \t");
4387 df_print_regset (file, bb_info->in);
4388 fprintf (file, ";; md init \t");
4389 df_print_regset (file, bb_info->init);
4390 fprintf (file, ";; md gen \t");
4391 df_print_regset (file, bb_info->gen);
4392 fprintf (file, ";; md kill \t");
4393 df_print_regset (file, bb_info->kill);
4394 }
4395
4396 /* Debugging info at bottom of bb. */
4397
4398 static void
4399 df_md_bottom_dump (basic_block bb, FILE *file)
4400 {
4401 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4402 if (!bb_info || !bb_info->out)
4403 return;
4404
4405 fprintf (file, ";; md out \t");
4406 df_print_regset (file, bb_info->out);
4407 }
4408
4409 static struct df_problem problem_MD =
4410 {
4411 DF_MD, /* Problem id. */
4412 DF_FORWARD, /* Direction. */
4413 df_md_alloc, /* Allocate the problem specific data. */
4414 df_md_reset, /* Reset global information. */
4415 df_md_free_bb_info, /* Free basic block info. */
4416 df_md_local_compute, /* Local compute function. */
4417 df_md_init, /* Init the solution specific data. */
4418 df_worklist_dataflow, /* Worklist solver. */
4419 df_md_confluence_0, /* Confluence operator 0. */
4420 df_md_confluence_n, /* Confluence operator n. */
4421 df_md_transfer_function, /* Transfer function. */
4422 NULL, /* Finalize function. */
4423 df_md_free, /* Free all of the problem information. */
4424 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4425 NULL, /* Debugging. */
4426 df_md_top_dump, /* Debugging start block. */
4427 df_md_bottom_dump, /* Debugging end block. */
4428 NULL, /* Incremental solution verify start. */
4429 NULL, /* Incremental solution verify end. */
4430 NULL, /* Dependent problem. */
4431 TV_DF_MD, /* Timing variable. */
4432 false /* Reset blocks on dropping out of blocks_to_analyze. */
4433 };
4434
4435 /* Create a new MD instance and add it to the existing instance
4436 of DF. */
4437
4438 void
4439 df_md_add_problem (void)
4440 {
4441 df_add_problem (&problem_MD);
4442 }
4443
4444
4445