Daily bump.
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "alloc-pool.h"
36 #include "flags.h"
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
39 #include "sbitmap.h"
40 #include "bitmap.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "df.h"
44 #include "except.h"
45 #include "dce.h"
46 #include "valtrack.h"
47 #include "dumpfile.h"
48
49 /* Note that turning REG_DEAD_DEBUGGING on will cause
50 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
51 addresses in the dumps. */
52 #define REG_DEAD_DEBUGGING 0
53
54 #define DF_SPARSE_THRESHOLD 32
55
56 static bitmap_head seen_in_block;
57 static bitmap_head seen_in_insn;
58
59 /*----------------------------------------------------------------------------
60 Utility functions.
61 ----------------------------------------------------------------------------*/
62
63 /* Generic versions to get the void* version of the block info. Only
64 used inside the problem instance vectors. */
65
66 /* Dump a def-use or use-def chain for REF to FILE. */
67
68 void
69 df_chain_dump (struct df_link *link, FILE *file)
70 {
71 fprintf (file, "{ ");
72 for (; link; link = link->next)
73 {
74 fprintf (file, "%c%d(bb %d insn %d) ",
75 DF_REF_REG_DEF_P (link->ref)
76 ? 'd'
77 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
78 DF_REF_ID (link->ref),
79 DF_REF_BBNO (link->ref),
80 DF_REF_IS_ARTIFICIAL (link->ref)
81 ? -1 : DF_REF_INSN_UID (link->ref));
82 }
83 fprintf (file, "}");
84 }
85
86
87 /* Print some basic block info as part of df_dump. */
88
89 void
90 df_print_bb_index (basic_block bb, FILE *file)
91 {
92 edge e;
93 edge_iterator ei;
94
95 fprintf (file, "\n( ");
96 FOR_EACH_EDGE (e, ei, bb->preds)
97 {
98 basic_block pred = e->src;
99 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
100 }
101 fprintf (file, ")->[%d]->( ", bb->index);
102 FOR_EACH_EDGE (e, ei, bb->succs)
103 {
104 basic_block succ = e->dest;
105 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
106 }
107 fprintf (file, ")\n");
108 }
109
110 \f
111 /*----------------------------------------------------------------------------
112 REACHING DEFINITIONS
113
114 Find the locations in the function where each definition site for a
115 pseudo reaches. In and out bitvectors are built for each basic
116 block. The id field in the ref is used to index into these sets.
117 See df.h for details.
118
119 If the DF_RD_PRUNE_DEAD_DEFS changable flag is set, only DEFs reaching
120 existing uses are included in the global reaching DEFs set, or in other
121 words only DEFs that are still live. This is a kind of pruned version
122 of the traditional reaching definitions problem that is much less
123 complex to compute and produces enough information to compute UD-chains.
124 In this context, live must be interpreted in the DF_LR sense: Uses that
125 are upward exposed but maybe not initialized on all paths through the
126 CFG. For a USE that is not reached by a DEF on all paths, we still want
127 to make those DEFs that do reach the USE visible, and pruning based on
128 DF_LIVE would make that impossible.
129 ----------------------------------------------------------------------------*/
130
131 /* This problem plays a large number of games for the sake of
132 efficiency.
133
134 1) The order of the bits in the bitvectors. After the scanning
135 phase, all of the defs are sorted. All of the defs for the reg 0
136 are first, followed by all defs for reg 1 and so on.
137
138 2) There are two kill sets, one if the number of defs is less or
139 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
140 greater.
141
142 <= : Data is built directly in the kill set.
143
144 > : One level of indirection is used to keep from generating long
145 strings of 1 bits in the kill sets. Bitvectors that are indexed
146 by the regnum are used to represent that there is a killing def
147 for the register. The confluence and transfer functions use
148 these along with the bitmap_clear_range call to remove ranges of
149 bits without actually generating a knockout vector.
150
151 The kill and sparse_kill and the dense_invalidated_by_call and
152 sparse_invalidated_by_call both play this game. */
153
154 /* Private data used to compute the solution for this problem. These
155 data structures are not accessible outside of this module. */
156 struct df_rd_problem_data
157 {
158 /* The set of defs to regs invalidated by call. */
159 bitmap_head sparse_invalidated_by_call;
160 /* The set of defs to regs invalidate by call for rd. */
161 bitmap_head dense_invalidated_by_call;
162 /* An obstack for the bitmaps we need for this problem. */
163 bitmap_obstack rd_bitmaps;
164 };
165
166
167 /* Free basic block info. */
168
169 static void
170 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
171 void *vbb_info)
172 {
173 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
174 if (bb_info)
175 {
176 bitmap_clear (&bb_info->kill);
177 bitmap_clear (&bb_info->sparse_kill);
178 bitmap_clear (&bb_info->gen);
179 bitmap_clear (&bb_info->in);
180 bitmap_clear (&bb_info->out);
181 }
182 }
183
184
185 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
186 not touched unless the block is new. */
187
188 static void
189 df_rd_alloc (bitmap all_blocks)
190 {
191 unsigned int bb_index;
192 bitmap_iterator bi;
193 struct df_rd_problem_data *problem_data;
194
195 if (df_rd->problem_data)
196 {
197 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
198 bitmap_clear (&problem_data->sparse_invalidated_by_call);
199 bitmap_clear (&problem_data->dense_invalidated_by_call);
200 }
201 else
202 {
203 problem_data = XNEW (struct df_rd_problem_data);
204 df_rd->problem_data = problem_data;
205
206 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
207 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
208 &problem_data->rd_bitmaps);
209 bitmap_initialize (&problem_data->dense_invalidated_by_call,
210 &problem_data->rd_bitmaps);
211 }
212
213 df_grow_bb_info (df_rd);
214
215 /* Because of the clustering of all use sites for the same pseudo,
216 we have to process all of the blocks before doing the analysis. */
217
218 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
219 {
220 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
221
222 /* When bitmaps are already initialized, just clear them. */
223 if (bb_info->kill.obstack)
224 {
225 bitmap_clear (&bb_info->kill);
226 bitmap_clear (&bb_info->sparse_kill);
227 bitmap_clear (&bb_info->gen);
228 }
229 else
230 {
231 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
232 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
233 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
234 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
235 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
236 }
237 }
238 df_rd->optional_p = true;
239 }
240
241
242 /* Add the effect of the top artificial defs of BB to the reaching definitions
243 bitmap LOCAL_RD. */
244
245 void
246 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
247 {
248 int bb_index = bb->index;
249 df_ref *def_rec;
250 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
251 {
252 df_ref def = *def_rec;
253 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
254 {
255 unsigned int dregno = DF_REF_REGNO (def);
256 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
257 bitmap_clear_range (local_rd,
258 DF_DEFS_BEGIN (dregno),
259 DF_DEFS_COUNT (dregno));
260 bitmap_set_bit (local_rd, DF_REF_ID (def));
261 }
262 }
263 }
264
265 /* Add the effect of the defs of INSN to the reaching definitions bitmap
266 LOCAL_RD. */
267
268 void
269 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
270 bitmap local_rd)
271 {
272 unsigned uid = INSN_UID (insn);
273 df_ref *def_rec;
274
275 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
276 {
277 df_ref def = *def_rec;
278 unsigned int dregno = DF_REF_REGNO (def);
279 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
280 || (dregno >= FIRST_PSEUDO_REGISTER))
281 {
282 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
283 bitmap_clear_range (local_rd,
284 DF_DEFS_BEGIN (dregno),
285 DF_DEFS_COUNT (dregno));
286 if (!(DF_REF_FLAGS (def)
287 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
288 bitmap_set_bit (local_rd, DF_REF_ID (def));
289 }
290 }
291 }
292
293 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
294 more complicated than just simulating, because we must produce the
295 gen and kill sets and hence deal with the two possible representations
296 of kill sets. */
297
298 static void
299 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
300 df_ref *def_rec,
301 int top_flag)
302 {
303 while (*def_rec)
304 {
305 df_ref def = *def_rec;
306 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
307 {
308 unsigned int regno = DF_REF_REGNO (def);
309 unsigned int begin = DF_DEFS_BEGIN (regno);
310 unsigned int n_defs = DF_DEFS_COUNT (regno);
311
312 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
313 || (regno >= FIRST_PSEUDO_REGISTER))
314 {
315 /* Only the last def(s) for a regno in the block has any
316 effect. */
317 if (!bitmap_bit_p (&seen_in_block, regno))
318 {
319 /* The first def for regno in insn gets to knock out the
320 defs from other instructions. */
321 if ((!bitmap_bit_p (&seen_in_insn, regno))
322 /* If the def is to only part of the reg, it does
323 not kill the other defs that reach here. */
324 && (!(DF_REF_FLAGS (def) &
325 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
326 {
327 if (n_defs > DF_SPARSE_THRESHOLD)
328 {
329 bitmap_set_bit (&bb_info->sparse_kill, regno);
330 bitmap_clear_range(&bb_info->gen, begin, n_defs);
331 }
332 else
333 {
334 bitmap_set_range (&bb_info->kill, begin, n_defs);
335 bitmap_clear_range (&bb_info->gen, begin, n_defs);
336 }
337 }
338
339 bitmap_set_bit (&seen_in_insn, regno);
340 /* All defs for regno in the instruction may be put into
341 the gen set. */
342 if (!(DF_REF_FLAGS (def)
343 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
344 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
345 }
346 }
347 }
348 def_rec++;
349 }
350 }
351
352 /* Compute local reaching def info for basic block BB. */
353
354 static void
355 df_rd_bb_local_compute (unsigned int bb_index)
356 {
357 basic_block bb = BASIC_BLOCK (bb_index);
358 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
359 rtx insn;
360
361 bitmap_clear (&seen_in_block);
362 bitmap_clear (&seen_in_insn);
363
364 /* Artificials are only hard regs. */
365 if (!(df->changeable_flags & DF_NO_HARD_REGS))
366 df_rd_bb_local_compute_process_def (bb_info,
367 df_get_artificial_defs (bb_index),
368 0);
369
370 FOR_BB_INSNS_REVERSE (bb, insn)
371 {
372 unsigned int uid = INSN_UID (insn);
373
374 if (!INSN_P (insn))
375 continue;
376
377 df_rd_bb_local_compute_process_def (bb_info,
378 DF_INSN_UID_DEFS (uid), 0);
379
380 /* This complex dance with the two bitmaps is required because
381 instructions can assign twice to the same pseudo. This
382 generally happens with calls that will have one def for the
383 result and another def for the clobber. If only one vector
384 is used and the clobber goes first, the result will be
385 lost. */
386 bitmap_ior_into (&seen_in_block, &seen_in_insn);
387 bitmap_clear (&seen_in_insn);
388 }
389
390 /* Process the artificial defs at the top of the block last since we
391 are going backwards through the block and these are logically at
392 the start. */
393 if (!(df->changeable_flags & DF_NO_HARD_REGS))
394 df_rd_bb_local_compute_process_def (bb_info,
395 df_get_artificial_defs (bb_index),
396 DF_REF_AT_TOP);
397 }
398
399
400 /* Compute local reaching def info for each basic block within BLOCKS. */
401
402 static void
403 df_rd_local_compute (bitmap all_blocks)
404 {
405 unsigned int bb_index;
406 bitmap_iterator bi;
407 unsigned int regno;
408 struct df_rd_problem_data *problem_data
409 = (struct df_rd_problem_data *) df_rd->problem_data;
410 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
411 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
412
413 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
414 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
415
416 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
417
418 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
419 {
420 df_rd_bb_local_compute (bb_index);
421 }
422
423 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
424 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
425 {
426 if (! HARD_REGISTER_NUM_P (regno)
427 || !(df->changeable_flags & DF_NO_HARD_REGS))
428 {
429 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
430 bitmap_set_bit (sparse_invalidated, regno);
431 else
432 bitmap_set_range (dense_invalidated,
433 DF_DEFS_BEGIN (regno),
434 DF_DEFS_COUNT (regno));
435 }
436 }
437
438 bitmap_clear (&seen_in_block);
439 bitmap_clear (&seen_in_insn);
440 }
441
442
443 /* Initialize the solution bit vectors for problem. */
444
445 static void
446 df_rd_init_solution (bitmap all_blocks)
447 {
448 unsigned int bb_index;
449 bitmap_iterator bi;
450
451 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
452 {
453 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
454
455 bitmap_copy (&bb_info->out, &bb_info->gen);
456 bitmap_clear (&bb_info->in);
457 }
458 }
459
460 /* In of target gets or of out of source. */
461
462 static bool
463 df_rd_confluence_n (edge e)
464 {
465 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
466 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
467 bool changed = false;
468
469 if (e->flags & EDGE_FAKE)
470 return false;
471
472 if (e->flags & EDGE_EH)
473 {
474 struct df_rd_problem_data *problem_data
475 = (struct df_rd_problem_data *) df_rd->problem_data;
476 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
477 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
478 bitmap_iterator bi;
479 unsigned int regno;
480 bitmap_head tmp;
481
482 bitmap_initialize (&tmp, &df_bitmap_obstack);
483 bitmap_copy (&tmp, op2);
484 bitmap_and_compl_into (&tmp, dense_invalidated);
485
486 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
487 {
488 bitmap_clear_range (&tmp,
489 DF_DEFS_BEGIN (regno),
490 DF_DEFS_COUNT (regno));
491 }
492 changed |= bitmap_ior_into (op1, &tmp);
493 bitmap_clear (&tmp);
494 return changed;
495 }
496 else
497 return bitmap_ior_into (op1, op2);
498 }
499
500
501 /* Transfer function. */
502
503 static bool
504 df_rd_transfer_function (int bb_index)
505 {
506 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
507 unsigned int regno;
508 bitmap_iterator bi;
509 bitmap in = &bb_info->in;
510 bitmap out = &bb_info->out;
511 bitmap gen = &bb_info->gen;
512 bitmap kill = &bb_info->kill;
513 bitmap sparse_kill = &bb_info->sparse_kill;
514 bool changed = false;
515
516 if (bitmap_empty_p (sparse_kill))
517 changed = bitmap_ior_and_compl (out, gen, in, kill);
518 else
519 {
520 struct df_rd_problem_data *problem_data;
521 bitmap_head tmp;
522
523 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
524 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
525 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
526 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
527
528 bitmap_copy (&tmp, in);
529 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
530 {
531 bitmap_clear_range (&tmp,
532 DF_DEFS_BEGIN (regno),
533 DF_DEFS_COUNT (regno));
534 }
535 bitmap_and_compl_into (&tmp, kill);
536 bitmap_ior_into (&tmp, gen);
537 changed = !bitmap_equal_p (&tmp, out);
538 if (changed)
539 {
540 bitmap_clear (out);
541 bb_info->out = tmp;
542 }
543 else
544 bitmap_clear (&tmp);
545 }
546
547 if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
548 {
549 /* Create a mask of DEFs for all registers live at the end of this
550 basic block, and mask out DEFs of registers that are not live.
551 Computing the mask looks costly, but the benefit of the pruning
552 outweighs the cost. */
553 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
554 bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
555 bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
556 unsigned int regno;
557 bitmap_iterator bi;
558
559 EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi)
560 bitmap_set_range (live_defs,
561 DF_DEFS_BEGIN (regno),
562 DF_DEFS_COUNT (regno));
563 changed |= bitmap_and_into (&bb_info->out, live_defs);
564 BITMAP_FREE (live_defs);
565 }
566
567 return changed;
568 }
569
570 /* Free all storage associated with the problem. */
571
572 static void
573 df_rd_free (void)
574 {
575 struct df_rd_problem_data *problem_data
576 = (struct df_rd_problem_data *) df_rd->problem_data;
577
578 if (problem_data)
579 {
580 bitmap_obstack_release (&problem_data->rd_bitmaps);
581
582 df_rd->block_info_size = 0;
583 free (df_rd->block_info);
584 df_rd->block_info = NULL;
585 free (df_rd->problem_data);
586 }
587 free (df_rd);
588 }
589
590
591 /* Debugging info. */
592
593 static void
594 df_rd_start_dump (FILE *file)
595 {
596 struct df_rd_problem_data *problem_data
597 = (struct df_rd_problem_data *) df_rd->problem_data;
598 unsigned int m = DF_REG_SIZE(df);
599 unsigned int regno;
600
601 if (!df_rd->block_info)
602 return;
603
604 fprintf (file, ";; Reaching defs:\n");
605
606 fprintf (file, ";; sparse invalidated \t");
607 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
608 fprintf (file, ";; dense invalidated \t");
609 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
610
611 fprintf (file, ";; reg->defs[] map:\t");
612 for (regno = 0; regno < m; regno++)
613 if (DF_DEFS_COUNT (regno))
614 fprintf (file, "%d[%d,%d] ", regno,
615 DF_DEFS_BEGIN (regno),
616 DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1);
617 fprintf (file, "\n");
618 }
619
620
621 static void
622 df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
623 {
624 bitmap_head tmp;
625 unsigned int regno;
626 unsigned int m = DF_REG_SIZE(df);
627 bool first_reg = true;
628
629 fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
630
631 bitmap_initialize (&tmp, &df_bitmap_obstack);
632 for (regno = 0; regno < m; regno++)
633 {
634 if (HARD_REGISTER_NUM_P (regno)
635 && (df->changeable_flags & DF_NO_HARD_REGS))
636 continue;
637 bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
638 bitmap_and_into (&tmp, defs_set);
639 if (! bitmap_empty_p (&tmp))
640 {
641 bitmap_iterator bi;
642 unsigned int ix;
643 bool first_def = true;
644
645 if (! first_reg)
646 fprintf (file, ",");
647 first_reg = false;
648
649 fprintf (file, "%u[", regno);
650 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi)
651 {
652 fprintf (file, "%s%u", first_def ? "" : ",", ix);
653 first_def = false;
654 }
655 fprintf (file, "]");
656 }
657 bitmap_clear (&tmp);
658 }
659
660 fprintf (file, "\n");
661 bitmap_clear (&tmp);
662 }
663
664 /* Debugging info at top of bb. */
665
666 static void
667 df_rd_top_dump (basic_block bb, FILE *file)
668 {
669 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
670 if (!bb_info)
671 return;
672
673 df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file);
674 df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file);
675 df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file);
676 }
677
678
679 /* Debugging info at bottom of bb. */
680
681 static void
682 df_rd_bottom_dump (basic_block bb, FILE *file)
683 {
684 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
685 if (!bb_info)
686 return;
687
688 df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file);
689 }
690
691 /* All of the information associated with every instance of the problem. */
692
693 static struct df_problem problem_RD =
694 {
695 DF_RD, /* Problem id. */
696 DF_FORWARD, /* Direction. */
697 df_rd_alloc, /* Allocate the problem specific data. */
698 NULL, /* Reset global information. */
699 df_rd_free_bb_info, /* Free basic block info. */
700 df_rd_local_compute, /* Local compute function. */
701 df_rd_init_solution, /* Init the solution specific data. */
702 df_worklist_dataflow, /* Worklist solver. */
703 NULL, /* Confluence operator 0. */
704 df_rd_confluence_n, /* Confluence operator n. */
705 df_rd_transfer_function, /* Transfer function. */
706 NULL, /* Finalize function. */
707 df_rd_free, /* Free all of the problem information. */
708 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
709 df_rd_start_dump, /* Debugging. */
710 df_rd_top_dump, /* Debugging start block. */
711 df_rd_bottom_dump, /* Debugging end block. */
712 NULL, /* Debugging start insn. */
713 NULL, /* Debugging end insn. */
714 NULL, /* Incremental solution verify start. */
715 NULL, /* Incremental solution verify end. */
716 NULL, /* Dependent problem. */
717 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
718 TV_DF_RD, /* Timing variable. */
719 true /* Reset blocks on dropping out of blocks_to_analyze. */
720 };
721
722
723
724 /* Create a new RD instance and add it to the existing instance
725 of DF. */
726
727 void
728 df_rd_add_problem (void)
729 {
730 df_add_problem (&problem_RD);
731 }
732
733
734 \f
735 /*----------------------------------------------------------------------------
736 LIVE REGISTERS
737
738 Find the locations in the function where any use of a pseudo can
739 reach in the backwards direction. In and out bitvectors are built
740 for each basic block. The regno is used to index into these sets.
741 See df.h for details.
742 ----------------------------------------------------------------------------*/
743
744 /* Private data used to verify the solution for this problem. */
745 struct df_lr_problem_data
746 {
747 bitmap_head *in;
748 bitmap_head *out;
749 /* An obstack for the bitmaps we need for this problem. */
750 bitmap_obstack lr_bitmaps;
751 };
752
753 /* Free basic block info. */
754
755 static void
756 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
757 void *vbb_info)
758 {
759 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
760 if (bb_info)
761 {
762 bitmap_clear (&bb_info->use);
763 bitmap_clear (&bb_info->def);
764 bitmap_clear (&bb_info->in);
765 bitmap_clear (&bb_info->out);
766 }
767 }
768
769
770 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
771 not touched unless the block is new. */
772
773 static void
774 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
775 {
776 unsigned int bb_index;
777 bitmap_iterator bi;
778 struct df_lr_problem_data *problem_data;
779
780 df_grow_bb_info (df_lr);
781 if (df_lr->problem_data)
782 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
783 else
784 {
785 problem_data = XNEW (struct df_lr_problem_data);
786 df_lr->problem_data = problem_data;
787
788 problem_data->out = NULL;
789 problem_data->in = NULL;
790 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
791 }
792
793 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
794 {
795 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
796
797 /* When bitmaps are already initialized, just clear them. */
798 if (bb_info->use.obstack)
799 {
800 bitmap_clear (&bb_info->def);
801 bitmap_clear (&bb_info->use);
802 }
803 else
804 {
805 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
806 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
807 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
808 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
809 }
810 }
811
812 df_lr->optional_p = false;
813 }
814
815
816 /* Reset the global solution for recalculation. */
817
818 static void
819 df_lr_reset (bitmap all_blocks)
820 {
821 unsigned int bb_index;
822 bitmap_iterator bi;
823
824 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
825 {
826 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
827 gcc_assert (bb_info);
828 bitmap_clear (&bb_info->in);
829 bitmap_clear (&bb_info->out);
830 }
831 }
832
833
834 /* Compute local live register info for basic block BB. */
835
836 static void
837 df_lr_bb_local_compute (unsigned int bb_index)
838 {
839 basic_block bb = BASIC_BLOCK (bb_index);
840 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
841 rtx insn;
842 df_ref *def_rec;
843 df_ref *use_rec;
844
845 /* Process the registers set in an exception handler. */
846 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
847 {
848 df_ref def = *def_rec;
849 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
850 {
851 unsigned int dregno = DF_REF_REGNO (def);
852 bitmap_set_bit (&bb_info->def, dregno);
853 bitmap_clear_bit (&bb_info->use, dregno);
854 }
855 }
856
857 /* Process the hardware registers that are always live. */
858 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
859 {
860 df_ref use = *use_rec;
861 /* Add use to set of uses in this BB. */
862 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
863 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
864 }
865
866 FOR_BB_INSNS_REVERSE (bb, insn)
867 {
868 unsigned int uid = INSN_UID (insn);
869
870 if (!NONDEBUG_INSN_P (insn))
871 continue;
872
873 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
874 {
875 df_ref def = *def_rec;
876 /* If the def is to only part of the reg, it does
877 not kill the other defs that reach here. */
878 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
879 {
880 unsigned int dregno = DF_REF_REGNO (def);
881 bitmap_set_bit (&bb_info->def, dregno);
882 bitmap_clear_bit (&bb_info->use, dregno);
883 }
884 }
885
886 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
887 {
888 df_ref use = *use_rec;
889 /* Add use to set of uses in this BB. */
890 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
891 }
892 }
893
894 /* Process the registers set in an exception handler or the hard
895 frame pointer if this block is the target of a non local
896 goto. */
897 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
898 {
899 df_ref def = *def_rec;
900 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
901 {
902 unsigned int dregno = DF_REF_REGNO (def);
903 bitmap_set_bit (&bb_info->def, dregno);
904 bitmap_clear_bit (&bb_info->use, dregno);
905 }
906 }
907
908 #ifdef EH_USES
909 /* Process the uses that are live into an exception handler. */
910 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
911 {
912 df_ref use = *use_rec;
913 /* Add use to set of uses in this BB. */
914 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
915 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
916 }
917 #endif
918
919 /* If the df_live problem is not defined, such as at -O0 and -O1, we
920 still need to keep the luids up to date. This is normally done
921 in the df_live problem since this problem has a forwards
922 scan. */
923 if (!df_live)
924 df_recompute_luids (bb);
925 }
926
927
928 /* Compute local live register info for each basic block within BLOCKS. */
929
930 static void
931 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
932 {
933 unsigned int bb_index, i;
934 bitmap_iterator bi;
935
936 bitmap_clear (&df->hardware_regs_used);
937
938 /* The all-important stack pointer must always be live. */
939 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
940
941 /* Global regs are always live, too. */
942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
943 if (global_regs[i])
944 bitmap_set_bit (&df->hardware_regs_used, i);
945
946 /* Before reload, there are a few registers that must be forced
947 live everywhere -- which might not already be the case for
948 blocks within infinite loops. */
949 if (!reload_completed)
950 {
951 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
952 /* Any reference to any pseudo before reload is a potential
953 reference of the frame pointer. */
954 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
955
956 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
957 /* Pseudos with argument area equivalences may require
958 reloading via the argument pointer. */
959 if (fixed_regs[ARG_POINTER_REGNUM])
960 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
961 #endif
962
963 /* Any constant, or pseudo with constant equivalences, may
964 require reloading from memory using the pic register. */
965 if (pic_offset_table_regnum != INVALID_REGNUM
966 && fixed_regs[pic_offset_table_regnum])
967 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
968 }
969
970 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
971 {
972 if (bb_index == EXIT_BLOCK)
973 {
974 /* The exit block is special for this problem and its bits are
975 computed from thin air. */
976 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
977 bitmap_copy (&bb_info->use, df->exit_block_uses);
978 }
979 else
980 df_lr_bb_local_compute (bb_index);
981 }
982
983 bitmap_clear (df_lr->out_of_date_transfer_functions);
984 }
985
986
987 /* Initialize the solution vectors. */
988
989 static void
990 df_lr_init (bitmap all_blocks)
991 {
992 unsigned int bb_index;
993 bitmap_iterator bi;
994
995 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
996 {
997 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
998 bitmap_copy (&bb_info->in, &bb_info->use);
999 bitmap_clear (&bb_info->out);
1000 }
1001 }
1002
1003
1004 /* Confluence function that processes infinite loops. This might be a
1005 noreturn function that throws. And even if it isn't, getting the
1006 unwind info right helps debugging. */
1007 static void
1008 df_lr_confluence_0 (basic_block bb)
1009 {
1010 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
1011 if (bb != EXIT_BLOCK_PTR)
1012 bitmap_copy (op1, &df->hardware_regs_used);
1013 }
1014
1015
1016 /* Confluence function that ignores fake edges. */
1017
1018 static bool
1019 df_lr_confluence_n (edge e)
1020 {
1021 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
1022 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
1023 bool changed = false;
1024
1025 /* Call-clobbered registers die across exception and call edges. */
1026 /* ??? Abnormal call edges ignored for the moment, as this gets
1027 confused by sibling call edges, which crashes reg-stack. */
1028 if (e->flags & EDGE_EH)
1029 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
1030 else
1031 changed = bitmap_ior_into (op1, op2);
1032
1033 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
1034 return changed;
1035 }
1036
1037
1038 /* Transfer function. */
1039
1040 static bool
1041 df_lr_transfer_function (int bb_index)
1042 {
1043 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1044 bitmap in = &bb_info->in;
1045 bitmap out = &bb_info->out;
1046 bitmap use = &bb_info->use;
1047 bitmap def = &bb_info->def;
1048
1049 return bitmap_ior_and_compl (in, use, out, def);
1050 }
1051
1052
1053 /* Run the fast dce as a side effect of building LR. */
1054
1055 static void
1056 df_lr_finalize (bitmap all_blocks)
1057 {
1058 df_lr->solutions_dirty = false;
1059 if (df->changeable_flags & DF_LR_RUN_DCE)
1060 {
1061 run_fast_df_dce ();
1062
1063 /* If dce deletes some instructions, we need to recompute the lr
1064 solution before proceeding further. The problem is that fast
1065 dce is a pessimestic dataflow algorithm. In the case where
1066 it deletes a statement S inside of a loop, the uses inside of
1067 S may not be deleted from the dataflow solution because they
1068 were carried around the loop. While it is conservatively
1069 correct to leave these extra bits, the standards of df
1070 require that we maintain the best possible (least fixed
1071 point) solution. The only way to do that is to redo the
1072 iteration from the beginning. See PR35805 for an
1073 example. */
1074 if (df_lr->solutions_dirty)
1075 {
1076 df_clear_flags (DF_LR_RUN_DCE);
1077 df_lr_alloc (all_blocks);
1078 df_lr_local_compute (all_blocks);
1079 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1080 df_lr_finalize (all_blocks);
1081 df_set_flags (DF_LR_RUN_DCE);
1082 }
1083 }
1084 }
1085
1086
1087 /* Free all storage associated with the problem. */
1088
1089 static void
1090 df_lr_free (void)
1091 {
1092 struct df_lr_problem_data *problem_data
1093 = (struct df_lr_problem_data *) df_lr->problem_data;
1094 if (df_lr->block_info)
1095 {
1096
1097 df_lr->block_info_size = 0;
1098 free (df_lr->block_info);
1099 df_lr->block_info = NULL;
1100 bitmap_obstack_release (&problem_data->lr_bitmaps);
1101 free (df_lr->problem_data);
1102 df_lr->problem_data = NULL;
1103 }
1104
1105 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1106 free (df_lr);
1107 }
1108
1109
1110 /* Debugging info at top of bb. */
1111
1112 static void
1113 df_lr_top_dump (basic_block bb, FILE *file)
1114 {
1115 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1116 struct df_lr_problem_data *problem_data;
1117 if (!bb_info)
1118 return;
1119
1120 fprintf (file, ";; lr in \t");
1121 df_print_regset (file, &bb_info->in);
1122 if (df_lr->problem_data)
1123 {
1124 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1125 if (problem_data->in)
1126 {
1127 fprintf (file, ";; old in \t");
1128 df_print_regset (file, &problem_data->in[bb->index]);
1129 }
1130 }
1131 fprintf (file, ";; lr use \t");
1132 df_print_regset (file, &bb_info->use);
1133 fprintf (file, ";; lr def \t");
1134 df_print_regset (file, &bb_info->def);
1135 }
1136
1137
1138 /* Debugging info at bottom of bb. */
1139
1140 static void
1141 df_lr_bottom_dump (basic_block bb, FILE *file)
1142 {
1143 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1144 struct df_lr_problem_data *problem_data;
1145 if (!bb_info)
1146 return;
1147
1148 fprintf (file, ";; lr out \t");
1149 df_print_regset (file, &bb_info->out);
1150 if (df_lr->problem_data)
1151 {
1152 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1153 if (problem_data->out)
1154 {
1155 fprintf (file, ";; old out \t");
1156 df_print_regset (file, &problem_data->out[bb->index]);
1157 }
1158 }
1159 }
1160
1161
1162 /* Build the datastructure to verify that the solution to the dataflow
1163 equations is not dirty. */
1164
1165 static void
1166 df_lr_verify_solution_start (void)
1167 {
1168 basic_block bb;
1169 struct df_lr_problem_data *problem_data;
1170 if (df_lr->solutions_dirty)
1171 return;
1172
1173 /* Set it true so that the solution is recomputed. */
1174 df_lr->solutions_dirty = true;
1175
1176 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1177 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1178 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1179
1180 FOR_ALL_BB (bb)
1181 {
1182 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1183 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1184 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1185 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1186 }
1187 }
1188
1189
1190 /* Compare the saved datastructure and the new solution to the dataflow
1191 equations. */
1192
1193 static void
1194 df_lr_verify_solution_end (void)
1195 {
1196 struct df_lr_problem_data *problem_data;
1197 basic_block bb;
1198
1199 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1200
1201 if (!problem_data->out)
1202 return;
1203
1204 if (df_lr->solutions_dirty)
1205 /* Do not check if the solution is still dirty. See the comment
1206 in df_lr_finalize for details. */
1207 df_lr->solutions_dirty = false;
1208 else
1209 FOR_ALL_BB (bb)
1210 {
1211 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1212 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1213 {
1214 /*df_dump (stderr);*/
1215 gcc_unreachable ();
1216 }
1217 }
1218
1219 /* Cannot delete them immediately because you may want to dump them
1220 if the comparison fails. */
1221 FOR_ALL_BB (bb)
1222 {
1223 bitmap_clear (&problem_data->in[bb->index]);
1224 bitmap_clear (&problem_data->out[bb->index]);
1225 }
1226
1227 free (problem_data->in);
1228 free (problem_data->out);
1229 problem_data->in = NULL;
1230 problem_data->out = NULL;
1231 }
1232
1233
1234 /* All of the information associated with every instance of the problem. */
1235
1236 static struct df_problem problem_LR =
1237 {
1238 DF_LR, /* Problem id. */
1239 DF_BACKWARD, /* Direction. */
1240 df_lr_alloc, /* Allocate the problem specific data. */
1241 df_lr_reset, /* Reset global information. */
1242 df_lr_free_bb_info, /* Free basic block info. */
1243 df_lr_local_compute, /* Local compute function. */
1244 df_lr_init, /* Init the solution specific data. */
1245 df_worklist_dataflow, /* Worklist solver. */
1246 df_lr_confluence_0, /* Confluence operator 0. */
1247 df_lr_confluence_n, /* Confluence operator n. */
1248 df_lr_transfer_function, /* Transfer function. */
1249 df_lr_finalize, /* Finalize function. */
1250 df_lr_free, /* Free all of the problem information. */
1251 NULL, /* Remove this problem from the stack of dataflow problems. */
1252 NULL, /* Debugging. */
1253 df_lr_top_dump, /* Debugging start block. */
1254 df_lr_bottom_dump, /* Debugging end block. */
1255 NULL, /* Debugging start insn. */
1256 NULL, /* Debugging end insn. */
1257 df_lr_verify_solution_start,/* Incremental solution verify start. */
1258 df_lr_verify_solution_end, /* Incremental solution verify end. */
1259 NULL, /* Dependent problem. */
1260 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1261 TV_DF_LR, /* Timing variable. */
1262 false /* Reset blocks on dropping out of blocks_to_analyze. */
1263 };
1264
1265
1266 /* Create a new DATAFLOW instance and add it to an existing instance
1267 of DF. The returned structure is what is used to get at the
1268 solution. */
1269
1270 void
1271 df_lr_add_problem (void)
1272 {
1273 df_add_problem (&problem_LR);
1274 /* These will be initialized when df_scan_blocks processes each
1275 block. */
1276 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1277 }
1278
1279
1280 /* Verify that all of the lr related info is consistent and
1281 correct. */
1282
1283 void
1284 df_lr_verify_transfer_functions (void)
1285 {
1286 basic_block bb;
1287 bitmap_head saved_def;
1288 bitmap_head saved_use;
1289 bitmap_head all_blocks;
1290
1291 if (!df)
1292 return;
1293
1294 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1295 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1296 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1297
1298 FOR_ALL_BB (bb)
1299 {
1300 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1301 bitmap_set_bit (&all_blocks, bb->index);
1302
1303 if (bb_info)
1304 {
1305 /* Make a copy of the transfer functions and then compute
1306 new ones to see if the transfer functions have
1307 changed. */
1308 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1309 bb->index))
1310 {
1311 bitmap_copy (&saved_def, &bb_info->def);
1312 bitmap_copy (&saved_use, &bb_info->use);
1313 bitmap_clear (&bb_info->def);
1314 bitmap_clear (&bb_info->use);
1315
1316 df_lr_bb_local_compute (bb->index);
1317 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1318 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1319 }
1320 }
1321 else
1322 {
1323 /* If we do not have basic block info, the block must be in
1324 the list of dirty blocks or else some one has added a
1325 block behind our backs. */
1326 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1327 bb->index));
1328 }
1329 /* Make sure no one created a block without following
1330 procedures. */
1331 gcc_assert (df_scan_get_bb_info (bb->index));
1332 }
1333
1334 /* Make sure there are no dirty bits in blocks that have been deleted. */
1335 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1336 &all_blocks));
1337
1338 bitmap_clear (&saved_def);
1339 bitmap_clear (&saved_use);
1340 bitmap_clear (&all_blocks);
1341 }
1342
1343
1344 \f
1345 /*----------------------------------------------------------------------------
1346 LIVE AND MUST-INITIALIZED REGISTERS.
1347
1348 This problem first computes the IN and OUT bitvectors for the
1349 must-initialized registers problems, which is a forward problem.
1350 It gives the set of registers for which we MUST have an available
1351 definition on any path from the entry block to the entry/exit of
1352 a basic block. Sets generate a definition, while clobbers kill
1353 a definition.
1354
1355 In and out bitvectors are built for each basic block and are indexed by
1356 regnum (see df.h for details). In and out bitvectors in struct
1357 df_live_bb_info actually refers to the must-initialized problem;
1358
1359 Then, the in and out sets for the LIVE problem itself are computed.
1360 These are the logical AND of the IN and OUT sets from the LR problem
1361 and the must-initialized problem.
1362 ----------------------------------------------------------------------------*/
1363
1364 /* Private data used to verify the solution for this problem. */
1365 struct df_live_problem_data
1366 {
1367 bitmap_head *in;
1368 bitmap_head *out;
1369 /* An obstack for the bitmaps we need for this problem. */
1370 bitmap_obstack live_bitmaps;
1371 };
1372
1373 /* Scratch var used by transfer functions. This is used to implement
1374 an optimization to reduce the amount of space used to compute the
1375 combined lr and live analysis. */
1376 static bitmap_head df_live_scratch;
1377
1378
1379 /* Free basic block info. */
1380
1381 static void
1382 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1383 void *vbb_info)
1384 {
1385 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1386 if (bb_info)
1387 {
1388 bitmap_clear (&bb_info->gen);
1389 bitmap_clear (&bb_info->kill);
1390 bitmap_clear (&bb_info->in);
1391 bitmap_clear (&bb_info->out);
1392 }
1393 }
1394
1395
1396 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1397 not touched unless the block is new. */
1398
1399 static void
1400 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1401 {
1402 unsigned int bb_index;
1403 bitmap_iterator bi;
1404 struct df_live_problem_data *problem_data;
1405
1406 if (df_live->problem_data)
1407 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1408 else
1409 {
1410 problem_data = XNEW (struct df_live_problem_data);
1411 df_live->problem_data = problem_data;
1412
1413 problem_data->out = NULL;
1414 problem_data->in = NULL;
1415 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1416 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1417 }
1418
1419 df_grow_bb_info (df_live);
1420
1421 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1422 {
1423 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1424
1425 /* When bitmaps are already initialized, just clear them. */
1426 if (bb_info->kill.obstack)
1427 {
1428 bitmap_clear (&bb_info->kill);
1429 bitmap_clear (&bb_info->gen);
1430 }
1431 else
1432 {
1433 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1434 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1435 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1436 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1437 }
1438 }
1439 df_live->optional_p = (optimize <= 1);
1440 }
1441
1442
1443 /* Reset the global solution for recalculation. */
1444
1445 static void
1446 df_live_reset (bitmap all_blocks)
1447 {
1448 unsigned int bb_index;
1449 bitmap_iterator bi;
1450
1451 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1452 {
1453 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1454 gcc_assert (bb_info);
1455 bitmap_clear (&bb_info->in);
1456 bitmap_clear (&bb_info->out);
1457 }
1458 }
1459
1460
1461 /* Compute local uninitialized register info for basic block BB. */
1462
1463 static void
1464 df_live_bb_local_compute (unsigned int bb_index)
1465 {
1466 basic_block bb = BASIC_BLOCK (bb_index);
1467 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1468 rtx insn;
1469 df_ref *def_rec;
1470 int luid = 0;
1471
1472 FOR_BB_INSNS (bb, insn)
1473 {
1474 unsigned int uid = INSN_UID (insn);
1475 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1476
1477 /* Inserting labels does not always trigger the incremental
1478 rescanning. */
1479 if (!insn_info)
1480 {
1481 gcc_assert (!INSN_P (insn));
1482 insn_info = df_insn_create_insn_record (insn);
1483 }
1484
1485 DF_INSN_INFO_LUID (insn_info) = luid;
1486 if (!INSN_P (insn))
1487 continue;
1488
1489 luid++;
1490 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1491 {
1492 df_ref def = *def_rec;
1493 unsigned int regno = DF_REF_REGNO (def);
1494
1495 if (DF_REF_FLAGS_IS_SET (def,
1496 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1497 /* All partial or conditional def
1498 seen are included in the gen set. */
1499 bitmap_set_bit (&bb_info->gen, regno);
1500 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1501 /* Only must clobbers for the entire reg destroy the
1502 value. */
1503 bitmap_set_bit (&bb_info->kill, regno);
1504 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1505 bitmap_set_bit (&bb_info->gen, regno);
1506 }
1507 }
1508
1509 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1510 {
1511 df_ref def = *def_rec;
1512 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1513 }
1514 }
1515
1516
1517 /* Compute local uninitialized register info. */
1518
1519 static void
1520 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1521 {
1522 unsigned int bb_index;
1523 bitmap_iterator bi;
1524
1525 df_grow_insn_info ();
1526
1527 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1528 0, bb_index, bi)
1529 {
1530 df_live_bb_local_compute (bb_index);
1531 }
1532
1533 bitmap_clear (df_live->out_of_date_transfer_functions);
1534 }
1535
1536
1537 /* Initialize the solution vectors. */
1538
1539 static void
1540 df_live_init (bitmap all_blocks)
1541 {
1542 unsigned int bb_index;
1543 bitmap_iterator bi;
1544
1545 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1546 {
1547 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1548 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1549
1550 /* No register may reach a location where it is not used. Thus
1551 we trim the rr result to the places where it is used. */
1552 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1553 bitmap_clear (&bb_info->in);
1554 }
1555 }
1556
1557 /* Forward confluence function that ignores fake edges. */
1558
1559 static bool
1560 df_live_confluence_n (edge e)
1561 {
1562 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1563 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1564
1565 if (e->flags & EDGE_FAKE)
1566 return false;
1567
1568 return bitmap_ior_into (op1, op2);
1569 }
1570
1571
1572 /* Transfer function for the forwards must-initialized problem. */
1573
1574 static bool
1575 df_live_transfer_function (int bb_index)
1576 {
1577 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1578 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1579 bitmap in = &bb_info->in;
1580 bitmap out = &bb_info->out;
1581 bitmap gen = &bb_info->gen;
1582 bitmap kill = &bb_info->kill;
1583
1584 /* We need to use a scratch set here so that the value returned from this
1585 function invocation properly reflects whether the sets changed in a
1586 significant way; i.e. not just because the lr set was anded in. */
1587 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1588 /* No register may reach a location where it is not used. Thus
1589 we trim the rr result to the places where it is used. */
1590 bitmap_and_into (in, &bb_lr_info->in);
1591
1592 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1593 }
1594
1595
1596 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1597
1598 static void
1599 df_live_finalize (bitmap all_blocks)
1600 {
1601
1602 if (df_live->solutions_dirty)
1603 {
1604 bitmap_iterator bi;
1605 unsigned int bb_index;
1606
1607 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1608 {
1609 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1610 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1611
1612 /* No register may reach a location where it is not used. Thus
1613 we trim the rr result to the places where it is used. */
1614 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1615 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1616 }
1617
1618 df_live->solutions_dirty = false;
1619 }
1620 }
1621
1622
1623 /* Free all storage associated with the problem. */
1624
1625 static void
1626 df_live_free (void)
1627 {
1628 struct df_live_problem_data *problem_data
1629 = (struct df_live_problem_data *) df_live->problem_data;
1630 if (df_live->block_info)
1631 {
1632 df_live->block_info_size = 0;
1633 free (df_live->block_info);
1634 df_live->block_info = NULL;
1635 bitmap_clear (&df_live_scratch);
1636 bitmap_obstack_release (&problem_data->live_bitmaps);
1637 free (problem_data);
1638 df_live->problem_data = NULL;
1639 }
1640 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1641 free (df_live);
1642 }
1643
1644
1645 /* Debugging info at top of bb. */
1646
1647 static void
1648 df_live_top_dump (basic_block bb, FILE *file)
1649 {
1650 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1651 struct df_live_problem_data *problem_data;
1652
1653 if (!bb_info)
1654 return;
1655
1656 fprintf (file, ";; live in \t");
1657 df_print_regset (file, &bb_info->in);
1658 if (df_live->problem_data)
1659 {
1660 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1661 if (problem_data->in)
1662 {
1663 fprintf (file, ";; old in \t");
1664 df_print_regset (file, &problem_data->in[bb->index]);
1665 }
1666 }
1667 fprintf (file, ";; live gen \t");
1668 df_print_regset (file, &bb_info->gen);
1669 fprintf (file, ";; live kill\t");
1670 df_print_regset (file, &bb_info->kill);
1671 }
1672
1673
1674 /* Debugging info at bottom of bb. */
1675
1676 static void
1677 df_live_bottom_dump (basic_block bb, FILE *file)
1678 {
1679 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1680 struct df_live_problem_data *problem_data;
1681
1682 if (!bb_info)
1683 return;
1684
1685 fprintf (file, ";; live out \t");
1686 df_print_regset (file, &bb_info->out);
1687 if (df_live->problem_data)
1688 {
1689 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1690 if (problem_data->out)
1691 {
1692 fprintf (file, ";; old out \t");
1693 df_print_regset (file, &problem_data->out[bb->index]);
1694 }
1695 }
1696 }
1697
1698
1699 /* Build the datastructure to verify that the solution to the dataflow
1700 equations is not dirty. */
1701
1702 static void
1703 df_live_verify_solution_start (void)
1704 {
1705 basic_block bb;
1706 struct df_live_problem_data *problem_data;
1707 if (df_live->solutions_dirty)
1708 return;
1709
1710 /* Set it true so that the solution is recomputed. */
1711 df_live->solutions_dirty = true;
1712
1713 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1714 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1715 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1716
1717 FOR_ALL_BB (bb)
1718 {
1719 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1720 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1721 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1722 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1723 }
1724 }
1725
1726
1727 /* Compare the saved datastructure and the new solution to the dataflow
1728 equations. */
1729
1730 static void
1731 df_live_verify_solution_end (void)
1732 {
1733 struct df_live_problem_data *problem_data;
1734 basic_block bb;
1735
1736 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1737 if (!problem_data->out)
1738 return;
1739
1740 FOR_ALL_BB (bb)
1741 {
1742 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1743 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1744 {
1745 /*df_dump (stderr);*/
1746 gcc_unreachable ();
1747 }
1748 }
1749
1750 /* Cannot delete them immediately because you may want to dump them
1751 if the comparison fails. */
1752 FOR_ALL_BB (bb)
1753 {
1754 bitmap_clear (&problem_data->in[bb->index]);
1755 bitmap_clear (&problem_data->out[bb->index]);
1756 }
1757
1758 free (problem_data->in);
1759 free (problem_data->out);
1760 free (problem_data);
1761 df_live->problem_data = NULL;
1762 }
1763
1764
1765 /* All of the information associated with every instance of the problem. */
1766
1767 static struct df_problem problem_LIVE =
1768 {
1769 DF_LIVE, /* Problem id. */
1770 DF_FORWARD, /* Direction. */
1771 df_live_alloc, /* Allocate the problem specific data. */
1772 df_live_reset, /* Reset global information. */
1773 df_live_free_bb_info, /* Free basic block info. */
1774 df_live_local_compute, /* Local compute function. */
1775 df_live_init, /* Init the solution specific data. */
1776 df_worklist_dataflow, /* Worklist solver. */
1777 NULL, /* Confluence operator 0. */
1778 df_live_confluence_n, /* Confluence operator n. */
1779 df_live_transfer_function, /* Transfer function. */
1780 df_live_finalize, /* Finalize function. */
1781 df_live_free, /* Free all of the problem information. */
1782 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1783 NULL, /* Debugging. */
1784 df_live_top_dump, /* Debugging start block. */
1785 df_live_bottom_dump, /* Debugging end block. */
1786 NULL, /* Debugging start insn. */
1787 NULL, /* Debugging end insn. */
1788 df_live_verify_solution_start,/* Incremental solution verify start. */
1789 df_live_verify_solution_end, /* Incremental solution verify end. */
1790 &problem_LR, /* Dependent problem. */
1791 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1792 TV_DF_LIVE, /* Timing variable. */
1793 false /* Reset blocks on dropping out of blocks_to_analyze. */
1794 };
1795
1796
1797 /* Create a new DATAFLOW instance and add it to an existing instance
1798 of DF. The returned structure is what is used to get at the
1799 solution. */
1800
1801 void
1802 df_live_add_problem (void)
1803 {
1804 df_add_problem (&problem_LIVE);
1805 /* These will be initialized when df_scan_blocks processes each
1806 block. */
1807 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1808 }
1809
1810
1811 /* Set all of the blocks as dirty. This needs to be done if this
1812 problem is added after all of the insns have been scanned. */
1813
1814 void
1815 df_live_set_all_dirty (void)
1816 {
1817 basic_block bb;
1818 FOR_ALL_BB (bb)
1819 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1820 bb->index);
1821 }
1822
1823
1824 /* Verify that all of the lr related info is consistent and
1825 correct. */
1826
1827 void
1828 df_live_verify_transfer_functions (void)
1829 {
1830 basic_block bb;
1831 bitmap_head saved_gen;
1832 bitmap_head saved_kill;
1833 bitmap_head all_blocks;
1834
1835 if (!df)
1836 return;
1837
1838 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1839 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1840 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1841
1842 df_grow_insn_info ();
1843
1844 FOR_ALL_BB (bb)
1845 {
1846 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1847 bitmap_set_bit (&all_blocks, bb->index);
1848
1849 if (bb_info)
1850 {
1851 /* Make a copy of the transfer functions and then compute
1852 new ones to see if the transfer functions have
1853 changed. */
1854 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1855 bb->index))
1856 {
1857 bitmap_copy (&saved_gen, &bb_info->gen);
1858 bitmap_copy (&saved_kill, &bb_info->kill);
1859 bitmap_clear (&bb_info->gen);
1860 bitmap_clear (&bb_info->kill);
1861
1862 df_live_bb_local_compute (bb->index);
1863 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1864 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1865 }
1866 }
1867 else
1868 {
1869 /* If we do not have basic block info, the block must be in
1870 the list of dirty blocks or else some one has added a
1871 block behind our backs. */
1872 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1873 bb->index));
1874 }
1875 /* Make sure no one created a block without following
1876 procedures. */
1877 gcc_assert (df_scan_get_bb_info (bb->index));
1878 }
1879
1880 /* Make sure there are no dirty bits in blocks that have been deleted. */
1881 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1882 &all_blocks));
1883 bitmap_clear (&saved_gen);
1884 bitmap_clear (&saved_kill);
1885 bitmap_clear (&all_blocks);
1886 }
1887 \f
1888 /*----------------------------------------------------------------------------
1889 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1890
1891 Link either the defs to the uses and / or the uses to the defs.
1892
1893 These problems are set up like the other dataflow problems so that
1894 they nicely fit into the framework. They are much simpler and only
1895 involve a single traversal of instructions and an examination of
1896 the reaching defs information (the dependent problem).
1897 ----------------------------------------------------------------------------*/
1898
1899 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1900
1901 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1902
1903 struct df_link *
1904 df_chain_create (df_ref src, df_ref dst)
1905 {
1906 struct df_link *head = DF_REF_CHAIN (src);
1907 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1908
1909 DF_REF_CHAIN (src) = link;
1910 link->next = head;
1911 link->ref = dst;
1912 return link;
1913 }
1914
1915
1916 /* Delete any du or ud chains that start at REF and point to
1917 TARGET. */
1918 static void
1919 df_chain_unlink_1 (df_ref ref, df_ref target)
1920 {
1921 struct df_link *chain = DF_REF_CHAIN (ref);
1922 struct df_link *prev = NULL;
1923
1924 while (chain)
1925 {
1926 if (chain->ref == target)
1927 {
1928 if (prev)
1929 prev->next = chain->next;
1930 else
1931 DF_REF_CHAIN (ref) = chain->next;
1932 pool_free (df_chain->block_pool, chain);
1933 return;
1934 }
1935 prev = chain;
1936 chain = chain->next;
1937 }
1938 }
1939
1940
1941 /* Delete a du or ud chain that leave or point to REF. */
1942
1943 void
1944 df_chain_unlink (df_ref ref)
1945 {
1946 struct df_link *chain = DF_REF_CHAIN (ref);
1947 while (chain)
1948 {
1949 struct df_link *next = chain->next;
1950 /* Delete the other side if it exists. */
1951 df_chain_unlink_1 (chain->ref, ref);
1952 pool_free (df_chain->block_pool, chain);
1953 chain = next;
1954 }
1955 DF_REF_CHAIN (ref) = NULL;
1956 }
1957
1958
1959 /* Copy the du or ud chain starting at FROM_REF and attach it to
1960 TO_REF. */
1961
1962 void
1963 df_chain_copy (df_ref to_ref,
1964 struct df_link *from_ref)
1965 {
1966 while (from_ref)
1967 {
1968 df_chain_create (to_ref, from_ref->ref);
1969 from_ref = from_ref->next;
1970 }
1971 }
1972
1973
1974 /* Remove this problem from the stack of dataflow problems. */
1975
1976 static void
1977 df_chain_remove_problem (void)
1978 {
1979 bitmap_iterator bi;
1980 unsigned int bb_index;
1981
1982 /* Wholesale destruction of the old chains. */
1983 if (df_chain->block_pool)
1984 free_alloc_pool (df_chain->block_pool);
1985
1986 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1987 {
1988 rtx insn;
1989 df_ref *def_rec;
1990 df_ref *use_rec;
1991 basic_block bb = BASIC_BLOCK (bb_index);
1992
1993 if (df_chain_problem_p (DF_DU_CHAIN))
1994 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1995 DF_REF_CHAIN (*def_rec) = NULL;
1996 if (df_chain_problem_p (DF_UD_CHAIN))
1997 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1998 DF_REF_CHAIN (*use_rec) = NULL;
1999
2000 FOR_BB_INSNS (bb, insn)
2001 {
2002 unsigned int uid = INSN_UID (insn);
2003
2004 if (INSN_P (insn))
2005 {
2006 if (df_chain_problem_p (DF_DU_CHAIN))
2007 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2008 DF_REF_CHAIN (*def_rec) = NULL;
2009 if (df_chain_problem_p (DF_UD_CHAIN))
2010 {
2011 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2012 DF_REF_CHAIN (*use_rec) = NULL;
2013 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
2014 DF_REF_CHAIN (*use_rec) = NULL;
2015 }
2016 }
2017 }
2018 }
2019
2020 bitmap_clear (df_chain->out_of_date_transfer_functions);
2021 df_chain->block_pool = NULL;
2022 }
2023
2024
2025 /* Remove the chain problem completely. */
2026
2027 static void
2028 df_chain_fully_remove_problem (void)
2029 {
2030 df_chain_remove_problem ();
2031 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2032 free (df_chain);
2033 }
2034
2035
2036 /* Create def-use or use-def chains. */
2037
2038 static void
2039 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2040 {
2041 df_chain_remove_problem ();
2042 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
2043 sizeof (struct df_link), 50);
2044 df_chain->optional_p = true;
2045 }
2046
2047
2048 /* Reset all of the chains when the set of basic blocks changes. */
2049
2050 static void
2051 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2052 {
2053 df_chain_remove_problem ();
2054 }
2055
2056
2057 /* Create the chains for a list of USEs. */
2058
2059 static void
2060 df_chain_create_bb_process_use (bitmap local_rd,
2061 df_ref *use_rec,
2062 int top_flag)
2063 {
2064 bitmap_iterator bi;
2065 unsigned int def_index;
2066
2067 while (*use_rec)
2068 {
2069 df_ref use = *use_rec;
2070 unsigned int uregno = DF_REF_REGNO (use);
2071 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2072 || (uregno >= FIRST_PSEUDO_REGISTER))
2073 {
2074 /* Do not want to go through this for an uninitialized var. */
2075 int count = DF_DEFS_COUNT (uregno);
2076 if (count)
2077 {
2078 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2079 {
2080 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2081 unsigned int last_index = first_index + count - 1;
2082
2083 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2084 {
2085 df_ref def;
2086 if (def_index > last_index)
2087 break;
2088
2089 def = DF_DEFS_GET (def_index);
2090 if (df_chain_problem_p (DF_DU_CHAIN))
2091 df_chain_create (def, use);
2092 if (df_chain_problem_p (DF_UD_CHAIN))
2093 df_chain_create (use, def);
2094 }
2095 }
2096 }
2097 }
2098
2099 use_rec++;
2100 }
2101 }
2102
2103
2104 /* Create chains from reaching defs bitmaps for basic block BB. */
2105
2106 static void
2107 df_chain_create_bb (unsigned int bb_index)
2108 {
2109 basic_block bb = BASIC_BLOCK (bb_index);
2110 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2111 rtx insn;
2112 bitmap_head cpy;
2113
2114 bitmap_initialize (&cpy, &bitmap_default_obstack);
2115 bitmap_copy (&cpy, &bb_info->in);
2116 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2117
2118 /* Since we are going forwards, process the artificial uses first
2119 then the artificial defs second. */
2120
2121 #ifdef EH_USES
2122 /* Create the chains for the artificial uses from the EH_USES at the
2123 beginning of the block. */
2124
2125 /* Artificials are only hard regs. */
2126 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2127 df_chain_create_bb_process_use (&cpy,
2128 df_get_artificial_uses (bb->index),
2129 DF_REF_AT_TOP);
2130 #endif
2131
2132 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2133
2134 /* Process the regular instructions next. */
2135 FOR_BB_INSNS (bb, insn)
2136 if (INSN_P (insn))
2137 {
2138 unsigned int uid = INSN_UID (insn);
2139
2140 /* First scan the uses and link them up with the defs that remain
2141 in the cpy vector. */
2142 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2143 if (df->changeable_flags & DF_EQ_NOTES)
2144 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2145
2146 /* Since we are going forwards, process the defs second. */
2147 df_rd_simulate_one_insn (bb, insn, &cpy);
2148 }
2149
2150 /* Create the chains for the artificial uses of the hard registers
2151 at the end of the block. */
2152 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2153 df_chain_create_bb_process_use (&cpy,
2154 df_get_artificial_uses (bb->index),
2155 0);
2156
2157 bitmap_clear (&cpy);
2158 }
2159
2160 /* Create def-use chains from reaching use bitmaps for basic blocks
2161 in BLOCKS. */
2162
2163 static void
2164 df_chain_finalize (bitmap all_blocks)
2165 {
2166 unsigned int bb_index;
2167 bitmap_iterator bi;
2168
2169 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2170 {
2171 df_chain_create_bb (bb_index);
2172 }
2173 }
2174
2175
2176 /* Free all storage associated with the problem. */
2177
2178 static void
2179 df_chain_free (void)
2180 {
2181 free_alloc_pool (df_chain->block_pool);
2182 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2183 free (df_chain);
2184 }
2185
2186
2187 /* Debugging info. */
2188
2189 static void
2190 df_chain_bb_dump (basic_block bb, FILE *file, bool top)
2191 {
2192 /* Artificials are only hard regs. */
2193 if (df->changeable_flags & DF_NO_HARD_REGS)
2194 return;
2195 if (df_chain_problem_p (DF_UD_CHAIN))
2196 {
2197 fprintf (file,
2198 ";; UD chains for artificial uses at %s\n",
2199 top ? "top" : "bottom");
2200 df_ref *use_rec = df_get_artificial_uses (bb->index);
2201 if (*use_rec)
2202 {
2203 while (*use_rec)
2204 {
2205 df_ref use = *use_rec;
2206 if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2207 || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
2208 {
2209 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2210 df_chain_dump (DF_REF_CHAIN (use), file);
2211 fprintf (file, "\n");
2212 }
2213 use_rec++;
2214 }
2215 }
2216 }
2217 if (df_chain_problem_p (DF_DU_CHAIN))
2218 {
2219 fprintf (file,
2220 ";; DU chains for artificial defs at %s\n",
2221 top ? "top" : "bottom");
2222 df_ref *def_rec = df_get_artificial_defs (bb->index);
2223 if (*def_rec)
2224 {
2225 while (*def_rec)
2226 {
2227 df_ref def = *def_rec;
2228
2229 if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
2230 || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
2231 {
2232 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2233 df_chain_dump (DF_REF_CHAIN (def), file);
2234 fprintf (file, "\n");
2235 }
2236 def_rec++;
2237 }
2238 }
2239 }
2240 }
2241
2242 static void
2243 df_chain_top_dump (basic_block bb, FILE *file)
2244 {
2245 df_chain_bb_dump (bb, file, /*top=*/true);
2246 }
2247
2248 static void
2249 df_chain_bottom_dump (basic_block bb, FILE *file)
2250 {
2251 df_chain_bb_dump (bb, file, /*top=*/false);
2252 }
2253
2254 static void
2255 df_chain_insn_top_dump (const_rtx insn, FILE *file)
2256 {
2257 if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
2258 {
2259 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2260 df_ref *use_rec = DF_INSN_INFO_USES (insn_info);
2261 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2262 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2263 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2264 if (*use_rec || *eq_use_rec)
2265 {
2266 while (*use_rec)
2267 {
2268 df_ref use = *use_rec;
2269 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2270 || !(df->changeable_flags & DF_NO_HARD_REGS))
2271 {
2272 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2273 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2274 fprintf (file, "read/write ");
2275 df_chain_dump (DF_REF_CHAIN (use), file);
2276 fprintf (file, "\n");
2277 }
2278 use_rec++;
2279 }
2280 while (*eq_use_rec)
2281 {
2282 df_ref use = *eq_use_rec;
2283 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2284 || !(df->changeable_flags & DF_NO_HARD_REGS))
2285 {
2286 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2287 df_chain_dump (DF_REF_CHAIN (use), file);
2288 fprintf (file, "\n");
2289 }
2290 eq_use_rec++;
2291 }
2292 }
2293 }
2294 }
2295
2296 static void
2297 df_chain_insn_bottom_dump (const_rtx insn, FILE *file)
2298 {
2299 if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
2300 {
2301 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2302 df_ref *def_rec = DF_INSN_INFO_DEFS (insn_info);
2303 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2304 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2305 if (*def_rec)
2306 {
2307 while (*def_rec)
2308 {
2309 df_ref def = *def_rec;
2310 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
2311 || !(df->changeable_flags & DF_NO_HARD_REGS))
2312 {
2313 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2314 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2315 fprintf (file, "read/write ");
2316 df_chain_dump (DF_REF_CHAIN (def), file);
2317 fprintf (file, "\n");
2318 }
2319 def_rec++;
2320 }
2321 }
2322 fprintf (file, "\n");
2323 }
2324 }
2325
2326 static struct df_problem problem_CHAIN =
2327 {
2328 DF_CHAIN, /* Problem id. */
2329 DF_NONE, /* Direction. */
2330 df_chain_alloc, /* Allocate the problem specific data. */
2331 df_chain_reset, /* Reset global information. */
2332 NULL, /* Free basic block info. */
2333 NULL, /* Local compute function. */
2334 NULL, /* Init the solution specific data. */
2335 NULL, /* Iterative solver. */
2336 NULL, /* Confluence operator 0. */
2337 NULL, /* Confluence operator n. */
2338 NULL, /* Transfer function. */
2339 df_chain_finalize, /* Finalize function. */
2340 df_chain_free, /* Free all of the problem information. */
2341 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2342 NULL, /* Debugging. */
2343 df_chain_top_dump, /* Debugging start block. */
2344 df_chain_bottom_dump, /* Debugging end block. */
2345 df_chain_insn_top_dump, /* Debugging start insn. */
2346 df_chain_insn_bottom_dump, /* Debugging end insn. */
2347 NULL, /* Incremental solution verify start. */
2348 NULL, /* Incremental solution verify end. */
2349 &problem_RD, /* Dependent problem. */
2350 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2351 TV_DF_CHAIN, /* Timing variable. */
2352 false /* Reset blocks on dropping out of blocks_to_analyze. */
2353 };
2354
2355
2356 /* Create a new DATAFLOW instance and add it to an existing instance
2357 of DF. The returned structure is what is used to get at the
2358 solution. */
2359
2360 void
2361 df_chain_add_problem (unsigned int chain_flags)
2362 {
2363 df_add_problem (&problem_CHAIN);
2364 df_chain->local_flags = chain_flags;
2365 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2366 }
2367
2368 #undef df_chain_problem_p
2369
2370 \f
2371 /*----------------------------------------------------------------------------
2372 WORD LEVEL LIVE REGISTERS
2373
2374 Find the locations in the function where any use of a pseudo can
2375 reach in the backwards direction. In and out bitvectors are built
2376 for each basic block. We only track pseudo registers that have a
2377 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2378 contain two bits corresponding to each of the subwords.
2379
2380 ----------------------------------------------------------------------------*/
2381
2382 /* Private data used to verify the solution for this problem. */
2383 struct df_word_lr_problem_data
2384 {
2385 /* An obstack for the bitmaps we need for this problem. */
2386 bitmap_obstack word_lr_bitmaps;
2387 };
2388
2389
2390 /* Free basic block info. */
2391
2392 static void
2393 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2394 void *vbb_info)
2395 {
2396 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2397 if (bb_info)
2398 {
2399 bitmap_clear (&bb_info->use);
2400 bitmap_clear (&bb_info->def);
2401 bitmap_clear (&bb_info->in);
2402 bitmap_clear (&bb_info->out);
2403 }
2404 }
2405
2406
2407 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2408 not touched unless the block is new. */
2409
2410 static void
2411 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2412 {
2413 unsigned int bb_index;
2414 bitmap_iterator bi;
2415 basic_block bb;
2416 struct df_word_lr_problem_data *problem_data
2417 = XNEW (struct df_word_lr_problem_data);
2418
2419 df_word_lr->problem_data = problem_data;
2420
2421 df_grow_bb_info (df_word_lr);
2422
2423 /* Create the mapping from regnos to slots. This does not change
2424 unless the problem is destroyed and recreated. In particular, if
2425 we end up deleting the only insn that used a subreg, we do not
2426 want to redo the mapping because this would invalidate everything
2427 else. */
2428
2429 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2430
2431 FOR_EACH_BB (bb)
2432 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2433
2434 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2435 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2436
2437 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2438 {
2439 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2440
2441 /* When bitmaps are already initialized, just clear them. */
2442 if (bb_info->use.obstack)
2443 {
2444 bitmap_clear (&bb_info->def);
2445 bitmap_clear (&bb_info->use);
2446 }
2447 else
2448 {
2449 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2450 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2451 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2452 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2453 }
2454 }
2455
2456 df_word_lr->optional_p = true;
2457 }
2458
2459
2460 /* Reset the global solution for recalculation. */
2461
2462 static void
2463 df_word_lr_reset (bitmap all_blocks)
2464 {
2465 unsigned int bb_index;
2466 bitmap_iterator bi;
2467
2468 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2469 {
2470 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2471 gcc_assert (bb_info);
2472 bitmap_clear (&bb_info->in);
2473 bitmap_clear (&bb_info->out);
2474 }
2475 }
2476
2477 /* Examine REF, and if it is for a reg we're interested in, set or
2478 clear the bits corresponding to its subwords from the bitmap
2479 according to IS_SET. LIVE is the bitmap we should update. We do
2480 not track hard regs or pseudos of any size other than 2 *
2481 UNITS_PER_WORD.
2482 We return true if we changed the bitmap, or if we encountered a register
2483 we're not tracking. */
2484
2485 bool
2486 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2487 {
2488 rtx orig_reg = DF_REF_REG (ref);
2489 rtx reg = orig_reg;
2490 enum machine_mode reg_mode;
2491 unsigned regno;
2492 /* Left at -1 for whole accesses. */
2493 int which_subword = -1;
2494 bool changed = false;
2495
2496 if (GET_CODE (reg) == SUBREG)
2497 reg = SUBREG_REG (orig_reg);
2498 regno = REGNO (reg);
2499 reg_mode = GET_MODE (reg);
2500 if (regno < FIRST_PSEUDO_REGISTER
2501 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2502 return true;
2503
2504 if (GET_CODE (orig_reg) == SUBREG
2505 && df_read_modify_subreg_p (orig_reg))
2506 {
2507 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2508 if (subreg_lowpart_p (orig_reg))
2509 which_subword = 0;
2510 else
2511 which_subword = 1;
2512 }
2513 if (is_set)
2514 {
2515 if (which_subword != 1)
2516 changed |= bitmap_set_bit (live, regno * 2);
2517 if (which_subword != 0)
2518 changed |= bitmap_set_bit (live, regno * 2 + 1);
2519 }
2520 else
2521 {
2522 if (which_subword != 1)
2523 changed |= bitmap_clear_bit (live, regno * 2);
2524 if (which_subword != 0)
2525 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2526 }
2527 return changed;
2528 }
2529
2530 /* Compute local live register info for basic block BB. */
2531
2532 static void
2533 df_word_lr_bb_local_compute (unsigned int bb_index)
2534 {
2535 basic_block bb = BASIC_BLOCK (bb_index);
2536 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2537 rtx insn;
2538 df_ref *def_rec;
2539 df_ref *use_rec;
2540
2541 /* Ensure that artificial refs don't contain references to pseudos. */
2542 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2543 {
2544 df_ref def = *def_rec;
2545 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2546 }
2547
2548 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2549 {
2550 df_ref use = *use_rec;
2551 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2552 }
2553
2554 FOR_BB_INSNS_REVERSE (bb, insn)
2555 {
2556 unsigned int uid = INSN_UID (insn);
2557
2558 if (!NONDEBUG_INSN_P (insn))
2559 continue;
2560 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2561 {
2562 df_ref def = *def_rec;
2563 /* If the def is to only part of the reg, it does
2564 not kill the other defs that reach here. */
2565 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2566 {
2567 df_word_lr_mark_ref (def, true, &bb_info->def);
2568 df_word_lr_mark_ref (def, false, &bb_info->use);
2569 }
2570 }
2571 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2572 {
2573 df_ref use = *use_rec;
2574 df_word_lr_mark_ref (use, true, &bb_info->use);
2575 }
2576 }
2577 }
2578
2579
2580 /* Compute local live register info for each basic block within BLOCKS. */
2581
2582 static void
2583 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2584 {
2585 unsigned int bb_index;
2586 bitmap_iterator bi;
2587
2588 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2589 {
2590 if (bb_index == EXIT_BLOCK)
2591 {
2592 unsigned regno;
2593 bitmap_iterator bi;
2594 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2595 regno, bi)
2596 gcc_unreachable ();
2597 }
2598 else
2599 df_word_lr_bb_local_compute (bb_index);
2600 }
2601
2602 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2603 }
2604
2605
2606 /* Initialize the solution vectors. */
2607
2608 static void
2609 df_word_lr_init (bitmap all_blocks)
2610 {
2611 unsigned int bb_index;
2612 bitmap_iterator bi;
2613
2614 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2615 {
2616 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2617 bitmap_copy (&bb_info->in, &bb_info->use);
2618 bitmap_clear (&bb_info->out);
2619 }
2620 }
2621
2622
2623 /* Confluence function that ignores fake edges. */
2624
2625 static bool
2626 df_word_lr_confluence_n (edge e)
2627 {
2628 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2629 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2630
2631 return bitmap_ior_into (op1, op2);
2632 }
2633
2634
2635 /* Transfer function. */
2636
2637 static bool
2638 df_word_lr_transfer_function (int bb_index)
2639 {
2640 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2641 bitmap in = &bb_info->in;
2642 bitmap out = &bb_info->out;
2643 bitmap use = &bb_info->use;
2644 bitmap def = &bb_info->def;
2645
2646 return bitmap_ior_and_compl (in, use, out, def);
2647 }
2648
2649
2650 /* Free all storage associated with the problem. */
2651
2652 static void
2653 df_word_lr_free (void)
2654 {
2655 struct df_word_lr_problem_data *problem_data
2656 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2657
2658 if (df_word_lr->block_info)
2659 {
2660 df_word_lr->block_info_size = 0;
2661 free (df_word_lr->block_info);
2662 df_word_lr->block_info = NULL;
2663 }
2664
2665 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2666 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2667 free (problem_data);
2668 free (df_word_lr);
2669 }
2670
2671
2672 /* Debugging info at top of bb. */
2673
2674 static void
2675 df_word_lr_top_dump (basic_block bb, FILE *file)
2676 {
2677 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2678 if (!bb_info)
2679 return;
2680
2681 fprintf (file, ";; blr in \t");
2682 df_print_word_regset (file, &bb_info->in);
2683 fprintf (file, ";; blr use \t");
2684 df_print_word_regset (file, &bb_info->use);
2685 fprintf (file, ";; blr def \t");
2686 df_print_word_regset (file, &bb_info->def);
2687 }
2688
2689
2690 /* Debugging info at bottom of bb. */
2691
2692 static void
2693 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2694 {
2695 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2696 if (!bb_info)
2697 return;
2698
2699 fprintf (file, ";; blr out \t");
2700 df_print_word_regset (file, &bb_info->out);
2701 }
2702
2703
2704 /* All of the information associated with every instance of the problem. */
2705
2706 static struct df_problem problem_WORD_LR =
2707 {
2708 DF_WORD_LR, /* Problem id. */
2709 DF_BACKWARD, /* Direction. */
2710 df_word_lr_alloc, /* Allocate the problem specific data. */
2711 df_word_lr_reset, /* Reset global information. */
2712 df_word_lr_free_bb_info, /* Free basic block info. */
2713 df_word_lr_local_compute, /* Local compute function. */
2714 df_word_lr_init, /* Init the solution specific data. */
2715 df_worklist_dataflow, /* Worklist solver. */
2716 NULL, /* Confluence operator 0. */
2717 df_word_lr_confluence_n, /* Confluence operator n. */
2718 df_word_lr_transfer_function, /* Transfer function. */
2719 NULL, /* Finalize function. */
2720 df_word_lr_free, /* Free all of the problem information. */
2721 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2722 NULL, /* Debugging. */
2723 df_word_lr_top_dump, /* Debugging start block. */
2724 df_word_lr_bottom_dump, /* Debugging end block. */
2725 NULL, /* Debugging start insn. */
2726 NULL, /* Debugging end insn. */
2727 NULL, /* Incremental solution verify start. */
2728 NULL, /* Incremental solution verify end. */
2729 NULL, /* Dependent problem. */
2730 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2731 TV_DF_WORD_LR, /* Timing variable. */
2732 false /* Reset blocks on dropping out of blocks_to_analyze. */
2733 };
2734
2735
2736 /* Create a new DATAFLOW instance and add it to an existing instance
2737 of DF. The returned structure is what is used to get at the
2738 solution. */
2739
2740 void
2741 df_word_lr_add_problem (void)
2742 {
2743 df_add_problem (&problem_WORD_LR);
2744 /* These will be initialized when df_scan_blocks processes each
2745 block. */
2746 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2747 }
2748
2749
2750 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2751 any bits, which is used by the caller to determine whether a set is
2752 necessary. We also return true if there are other reasons not to delete
2753 an insn. */
2754
2755 bool
2756 df_word_lr_simulate_defs (rtx insn, bitmap live)
2757 {
2758 bool changed = false;
2759 df_ref *def_rec;
2760 unsigned int uid = INSN_UID (insn);
2761
2762 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2763 {
2764 df_ref def = *def_rec;
2765 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2766 changed = true;
2767 else
2768 changed |= df_word_lr_mark_ref (*def_rec, false, live);
2769 }
2770 return changed;
2771 }
2772
2773
2774 /* Simulate the effects of the uses of INSN on LIVE. */
2775
2776 void
2777 df_word_lr_simulate_uses (rtx insn, bitmap live)
2778 {
2779 df_ref *use_rec;
2780 unsigned int uid = INSN_UID (insn);
2781
2782 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2783 df_word_lr_mark_ref (*use_rec, true, live);
2784 }
2785 \f
2786 /*----------------------------------------------------------------------------
2787 This problem computes REG_DEAD and REG_UNUSED notes.
2788 ----------------------------------------------------------------------------*/
2789
2790 static void
2791 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2792 {
2793 df_note->optional_p = true;
2794 }
2795
2796 /* This is only used if REG_DEAD_DEBUGGING is in effect. */
2797 static void
2798 df_print_note (const char *prefix, rtx insn, rtx note)
2799 {
2800 if (dump_file)
2801 {
2802 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2803 print_rtl (dump_file, note);
2804 fprintf (dump_file, "\n");
2805 }
2806 }
2807
2808
2809 /* After reg-stack, the x86 floating point stack regs are difficult to
2810 analyze because of all of the pushes, pops and rotations. Thus, we
2811 just leave the notes alone. */
2812
2813 #ifdef STACK_REGS
2814 static inline bool
2815 df_ignore_stack_reg (int regno)
2816 {
2817 return regstack_completed
2818 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2819 }
2820 #else
2821 static inline bool
2822 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2823 {
2824 return false;
2825 }
2826 #endif
2827
2828
2829 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
2830
2831 static void
2832 df_remove_dead_and_unused_notes (rtx insn)
2833 {
2834 rtx *pprev = &REG_NOTES (insn);
2835 rtx link = *pprev;
2836
2837 while (link)
2838 {
2839 switch (REG_NOTE_KIND (link))
2840 {
2841 case REG_DEAD:
2842 /* After reg-stack, we need to ignore any unused notes
2843 for the stack registers. */
2844 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2845 {
2846 pprev = &XEXP (link, 1);
2847 link = *pprev;
2848 }
2849 else
2850 {
2851 rtx next = XEXP (link, 1);
2852 if (REG_DEAD_DEBUGGING)
2853 df_print_note ("deleting: ", insn, link);
2854 free_EXPR_LIST_node (link);
2855 *pprev = link = next;
2856 }
2857 break;
2858
2859 case REG_UNUSED:
2860 /* After reg-stack, we need to ignore any unused notes
2861 for the stack registers. */
2862 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2863 {
2864 pprev = &XEXP (link, 1);
2865 link = *pprev;
2866 }
2867 else
2868 {
2869 rtx next = XEXP (link, 1);
2870 if (REG_DEAD_DEBUGGING)
2871 df_print_note ("deleting: ", insn, link);
2872 free_EXPR_LIST_node (link);
2873 *pprev = link = next;
2874 }
2875 break;
2876
2877 default:
2878 pprev = &XEXP (link, 1);
2879 link = *pprev;
2880 break;
2881 }
2882 }
2883 }
2884
2885 /* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
2886 as the bitmap of currently live registers. */
2887
2888 static void
2889 df_remove_dead_eq_notes (rtx insn, bitmap live)
2890 {
2891 rtx *pprev = &REG_NOTES (insn);
2892 rtx link = *pprev;
2893
2894 while (link)
2895 {
2896 switch (REG_NOTE_KIND (link))
2897 {
2898 case REG_EQUAL:
2899 case REG_EQUIV:
2900 {
2901 /* Remove the notes that refer to dead registers. As we have at most
2902 one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
2903 so we need to purge the complete EQ_USES vector when removing
2904 the note using df_notes_rescan. */
2905 df_ref *use_rec;
2906 bool deleted = false;
2907
2908 for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
2909 {
2910 df_ref use = *use_rec;
2911 if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
2912 && DF_REF_LOC (use)
2913 && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
2914 && ! bitmap_bit_p (live, DF_REF_REGNO (use))
2915 && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
2916 {
2917 deleted = true;
2918 break;
2919 }
2920 }
2921 if (deleted)
2922 {
2923 rtx next;
2924 if (REG_DEAD_DEBUGGING)
2925 df_print_note ("deleting: ", insn, link);
2926 next = XEXP (link, 1);
2927 free_EXPR_LIST_node (link);
2928 *pprev = link = next;
2929 df_notes_rescan (insn);
2930 }
2931 else
2932 {
2933 pprev = &XEXP (link, 1);
2934 link = *pprev;
2935 }
2936 break;
2937 }
2938
2939 default:
2940 pprev = &XEXP (link, 1);
2941 link = *pprev;
2942 break;
2943 }
2944 }
2945 }
2946
2947 /* Set a NOTE_TYPE note for REG in INSN. */
2948
2949 static inline void
2950 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2951 {
2952 gcc_checking_assert (!DEBUG_INSN_P (insn));
2953 add_reg_note (insn, note_type, reg);
2954 }
2955
2956 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2957 arguments. Return true if the register value described by MWS's
2958 mw_reg is known to be completely unused, and if mw_reg can therefore
2959 be used in a REG_UNUSED note. */
2960
2961 static bool
2962 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2963 bitmap live, bitmap artificial_uses)
2964 {
2965 unsigned int r;
2966
2967 /* If MWS describes a partial reference, create REG_UNUSED notes for
2968 individual hard registers. */
2969 if (mws->flags & DF_REF_PARTIAL)
2970 return false;
2971
2972 /* Likewise if some part of the register is used. */
2973 for (r = mws->start_regno; r <= mws->end_regno; r++)
2974 if (bitmap_bit_p (live, r)
2975 || bitmap_bit_p (artificial_uses, r))
2976 return false;
2977
2978 gcc_assert (REG_P (mws->mw_reg));
2979 return true;
2980 }
2981
2982
2983 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2984 based on the bits in LIVE. Do not generate notes for registers in
2985 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2986 not generated if the reg is both read and written by the
2987 instruction.
2988 */
2989
2990 static void
2991 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2992 bitmap live, bitmap do_not_gen,
2993 bitmap artificial_uses,
2994 struct dead_debug_local *debug)
2995 {
2996 unsigned int r;
2997
2998 if (REG_DEAD_DEBUGGING && dump_file)
2999 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
3000 mws->start_regno, mws->end_regno);
3001
3002 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
3003 {
3004 unsigned int regno = mws->start_regno;
3005 df_set_note (REG_UNUSED, insn, mws->mw_reg);
3006 dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3007
3008 if (REG_DEAD_DEBUGGING)
3009 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3010
3011 bitmap_set_bit (do_not_gen, regno);
3012 /* Only do this if the value is totally dead. */
3013 }
3014 else
3015 for (r = mws->start_regno; r <= mws->end_regno; r++)
3016 {
3017 if (!bitmap_bit_p (live, r)
3018 && !bitmap_bit_p (artificial_uses, r))
3019 {
3020 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
3021 dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
3022 if (REG_DEAD_DEBUGGING)
3023 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3024 }
3025 bitmap_set_bit (do_not_gen, r);
3026 }
3027 }
3028
3029
3030 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
3031 arguments. Return true if the register value described by MWS's
3032 mw_reg is known to be completely dead, and if mw_reg can therefore
3033 be used in a REG_DEAD note. */
3034
3035 static bool
3036 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
3037 bitmap live, bitmap artificial_uses,
3038 bitmap do_not_gen)
3039 {
3040 unsigned int r;
3041
3042 /* If MWS describes a partial reference, create REG_DEAD notes for
3043 individual hard registers. */
3044 if (mws->flags & DF_REF_PARTIAL)
3045 return false;
3046
3047 /* Likewise if some part of the register is not dead. */
3048 for (r = mws->start_regno; r <= mws->end_regno; r++)
3049 if (bitmap_bit_p (live, r)
3050 || bitmap_bit_p (artificial_uses, r)
3051 || bitmap_bit_p (do_not_gen, r))
3052 return false;
3053
3054 gcc_assert (REG_P (mws->mw_reg));
3055 return true;
3056 }
3057
3058 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3059 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3060 from being set if the instruction both reads and writes the
3061 register. */
3062
3063 static void
3064 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3065 bitmap live, bitmap do_not_gen,
3066 bitmap artificial_uses, bool *added_notes_p)
3067 {
3068 unsigned int r;
3069 bool is_debug = *added_notes_p;
3070
3071 *added_notes_p = false;
3072
3073 if (REG_DEAD_DEBUGGING && dump_file)
3074 {
3075 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
3076 mws->start_regno, mws->end_regno);
3077 df_print_regset (dump_file, do_not_gen);
3078 fprintf (dump_file, " live =");
3079 df_print_regset (dump_file, live);
3080 fprintf (dump_file, " artificial uses =");
3081 df_print_regset (dump_file, artificial_uses);
3082 }
3083
3084 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
3085 {
3086 if (is_debug)
3087 {
3088 *added_notes_p = true;
3089 return;
3090 }
3091 /* Add a dead note for the entire multi word register. */
3092 df_set_note (REG_DEAD, insn, mws->mw_reg);
3093 if (REG_DEAD_DEBUGGING)
3094 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3095 }
3096 else
3097 {
3098 for (r = mws->start_regno; r <= mws->end_regno; r++)
3099 if (!bitmap_bit_p (live, r)
3100 && !bitmap_bit_p (artificial_uses, r)
3101 && !bitmap_bit_p (do_not_gen, r))
3102 {
3103 if (is_debug)
3104 {
3105 *added_notes_p = true;
3106 return;
3107 }
3108 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
3109 if (REG_DEAD_DEBUGGING)
3110 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3111 }
3112 }
3113 return;
3114 }
3115
3116
3117 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3118 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3119
3120 static void
3121 df_create_unused_note (rtx insn, df_ref def,
3122 bitmap live, bitmap artificial_uses,
3123 struct dead_debug_local *debug)
3124 {
3125 unsigned int dregno = DF_REF_REGNO (def);
3126
3127 if (REG_DEAD_DEBUGGING && dump_file)
3128 {
3129 fprintf (dump_file, " regular looking at def ");
3130 df_ref_debug (def, dump_file);
3131 }
3132
3133 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3134 || bitmap_bit_p (live, dregno)
3135 || bitmap_bit_p (artificial_uses, dregno)
3136 || df_ignore_stack_reg (dregno)))
3137 {
3138 rtx reg = (DF_REF_LOC (def))
3139 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3140 df_set_note (REG_UNUSED, insn, reg);
3141 dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3142 if (REG_DEAD_DEBUGGING)
3143 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3144 }
3145
3146 return;
3147 }
3148
3149
3150 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3151 info: lifetime, bb, and number of defs and uses for basic block
3152 BB. The three bitvectors are scratch regs used here. */
3153
3154 static void
3155 df_note_bb_compute (unsigned int bb_index,
3156 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3157 {
3158 basic_block bb = BASIC_BLOCK (bb_index);
3159 rtx insn;
3160 df_ref *def_rec;
3161 df_ref *use_rec;
3162 struct dead_debug_local debug;
3163
3164 dead_debug_local_init (&debug, NULL, NULL);
3165
3166 bitmap_copy (live, df_get_live_out (bb));
3167 bitmap_clear (artificial_uses);
3168
3169 if (REG_DEAD_DEBUGGING && dump_file)
3170 {
3171 fprintf (dump_file, "live at bottom ");
3172 df_print_regset (dump_file, live);
3173 }
3174
3175 /* Process the artificial defs and uses at the bottom of the block
3176 to begin processing. */
3177 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3178 {
3179 df_ref def = *def_rec;
3180
3181 if (REG_DEAD_DEBUGGING && dump_file)
3182 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3183
3184 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3185 bitmap_clear_bit (live, DF_REF_REGNO (def));
3186 }
3187
3188 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3189 {
3190 df_ref use = *use_rec;
3191 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3192 {
3193 unsigned int regno = DF_REF_REGNO (use);
3194 bitmap_set_bit (live, regno);
3195
3196 /* Notes are not generated for any of the artificial registers
3197 at the bottom of the block. */
3198 bitmap_set_bit (artificial_uses, regno);
3199 }
3200 }
3201
3202 if (REG_DEAD_DEBUGGING && dump_file)
3203 {
3204 fprintf (dump_file, "live before artificials out ");
3205 df_print_regset (dump_file, live);
3206 }
3207
3208 FOR_BB_INSNS_REVERSE (bb, insn)
3209 {
3210 unsigned int uid = INSN_UID (insn);
3211 struct df_mw_hardreg **mws_rec;
3212 int debug_insn;
3213
3214 if (!INSN_P (insn))
3215 continue;
3216
3217 debug_insn = DEBUG_INSN_P (insn);
3218
3219 bitmap_clear (do_not_gen);
3220 df_remove_dead_and_unused_notes (insn);
3221
3222 /* Process the defs. */
3223 if (CALL_P (insn))
3224 {
3225 if (REG_DEAD_DEBUGGING && dump_file)
3226 {
3227 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3228 df_print_regset (dump_file, live);
3229 }
3230
3231 /* We only care about real sets for calls. Clobbers cannot
3232 be depended on to really die. */
3233 mws_rec = DF_INSN_UID_MWS (uid);
3234 while (*mws_rec)
3235 {
3236 struct df_mw_hardreg *mws = *mws_rec;
3237 if ((DF_MWS_REG_DEF_P (mws))
3238 && !df_ignore_stack_reg (mws->start_regno))
3239 df_set_unused_notes_for_mw (insn,
3240 mws, live, do_not_gen,
3241 artificial_uses, &debug);
3242 mws_rec++;
3243 }
3244
3245 /* All of the defs except the return value are some sort of
3246 clobber. This code is for the return. */
3247 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3248 {
3249 df_ref def = *def_rec;
3250 unsigned int dregno = DF_REF_REGNO (def);
3251 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3252 {
3253 df_create_unused_note (insn,
3254 def, live, artificial_uses, &debug);
3255 bitmap_set_bit (do_not_gen, dregno);
3256 }
3257
3258 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3259 bitmap_clear_bit (live, dregno);
3260 }
3261 }
3262 else
3263 {
3264 /* Regular insn. */
3265 mws_rec = DF_INSN_UID_MWS (uid);
3266 while (*mws_rec)
3267 {
3268 struct df_mw_hardreg *mws = *mws_rec;
3269 if (DF_MWS_REG_DEF_P (mws))
3270 df_set_unused_notes_for_mw (insn,
3271 mws, live, do_not_gen,
3272 artificial_uses, &debug);
3273 mws_rec++;
3274 }
3275
3276 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3277 {
3278 df_ref def = *def_rec;
3279 unsigned int dregno = DF_REF_REGNO (def);
3280 df_create_unused_note (insn,
3281 def, live, artificial_uses, &debug);
3282
3283 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3284 bitmap_set_bit (do_not_gen, dregno);
3285
3286 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3287 bitmap_clear_bit (live, dregno);
3288 }
3289 }
3290
3291 /* Process the uses. */
3292 mws_rec = DF_INSN_UID_MWS (uid);
3293 while (*mws_rec)
3294 {
3295 struct df_mw_hardreg *mws = *mws_rec;
3296 if (DF_MWS_REG_USE_P (mws)
3297 && !df_ignore_stack_reg (mws->start_regno))
3298 {
3299 bool really_add_notes = debug_insn != 0;
3300
3301 df_set_dead_notes_for_mw (insn,
3302 mws, live, do_not_gen,
3303 artificial_uses,
3304 &really_add_notes);
3305
3306 if (really_add_notes)
3307 debug_insn = -1;
3308 }
3309 mws_rec++;
3310 }
3311
3312 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3313 {
3314 df_ref use = *use_rec;
3315 unsigned int uregno = DF_REF_REGNO (use);
3316
3317 if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
3318 {
3319 fprintf (dump_file, " regular looking at use ");
3320 df_ref_debug (use, dump_file);
3321 }
3322
3323 if (!bitmap_bit_p (live, uregno))
3324 {
3325 if (debug_insn)
3326 {
3327 if (debug_insn > 0)
3328 {
3329 /* We won't add REG_UNUSED or REG_DEAD notes for
3330 these, so we don't have to mess with them in
3331 debug insns either. */
3332 if (!bitmap_bit_p (artificial_uses, uregno)
3333 && !df_ignore_stack_reg (uregno))
3334 dead_debug_add (&debug, use, uregno);
3335 continue;
3336 }
3337 break;
3338 }
3339 else
3340 dead_debug_insert_temp (&debug, uregno, insn,
3341 DEBUG_TEMP_BEFORE_WITH_REG);
3342
3343 if ( (!(DF_REF_FLAGS (use)
3344 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3345 && (!bitmap_bit_p (do_not_gen, uregno))
3346 && (!bitmap_bit_p (artificial_uses, uregno))
3347 && (!df_ignore_stack_reg (uregno)))
3348 {
3349 rtx reg = (DF_REF_LOC (use))
3350 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3351 df_set_note (REG_DEAD, insn, reg);
3352
3353 if (REG_DEAD_DEBUGGING)
3354 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3355 }
3356 /* This register is now live. */
3357 bitmap_set_bit (live, uregno);
3358 }
3359 }
3360
3361 df_remove_dead_eq_notes (insn, live);
3362
3363 if (debug_insn == -1)
3364 {
3365 /* ??? We could probably do better here, replacing dead
3366 registers with their definitions. */
3367 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3368 df_insn_rescan_debug_internal (insn);
3369 }
3370 }
3371
3372 dead_debug_local_finish (&debug, NULL);
3373 }
3374
3375
3376 /* Compute register info: lifetime, bb, and number of defs and uses. */
3377 static void
3378 df_note_compute (bitmap all_blocks)
3379 {
3380 unsigned int bb_index;
3381 bitmap_iterator bi;
3382 bitmap_head live, do_not_gen, artificial_uses;
3383
3384 bitmap_initialize (&live, &df_bitmap_obstack);
3385 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3386 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3387
3388 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3389 {
3390 /* ??? Unlike fast DCE, we don't use global_debug for uses of dead
3391 pseudos in debug insns because we don't always (re)visit blocks
3392 with death points after visiting dead uses. Even changing this
3393 loop to postorder would still leave room for visiting a death
3394 point before visiting a subsequent debug use. */
3395 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3396 }
3397
3398 bitmap_clear (&live);
3399 bitmap_clear (&do_not_gen);
3400 bitmap_clear (&artificial_uses);
3401 }
3402
3403
3404 /* Free all storage associated with the problem. */
3405
3406 static void
3407 df_note_free (void)
3408 {
3409 free (df_note);
3410 }
3411
3412
3413 /* All of the information associated every instance of the problem. */
3414
3415 static struct df_problem problem_NOTE =
3416 {
3417 DF_NOTE, /* Problem id. */
3418 DF_NONE, /* Direction. */
3419 df_note_alloc, /* Allocate the problem specific data. */
3420 NULL, /* Reset global information. */
3421 NULL, /* Free basic block info. */
3422 df_note_compute, /* Local compute function. */
3423 NULL, /* Init the solution specific data. */
3424 NULL, /* Iterative solver. */
3425 NULL, /* Confluence operator 0. */
3426 NULL, /* Confluence operator n. */
3427 NULL, /* Transfer function. */
3428 NULL, /* Finalize function. */
3429 df_note_free, /* Free all of the problem information. */
3430 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3431 NULL, /* Debugging. */
3432 NULL, /* Debugging start block. */
3433 NULL, /* Debugging end block. */
3434 NULL, /* Debugging start insn. */
3435 NULL, /* Debugging end insn. */
3436 NULL, /* Incremental solution verify start. */
3437 NULL, /* Incremental solution verify end. */
3438 &problem_LR, /* Dependent problem. */
3439 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3440 TV_DF_NOTE, /* Timing variable. */
3441 false /* Reset blocks on dropping out of blocks_to_analyze. */
3442 };
3443
3444
3445 /* Create a new DATAFLOW instance and add it to an existing instance
3446 of DF. The returned structure is what is used to get at the
3447 solution. */
3448
3449 void
3450 df_note_add_problem (void)
3451 {
3452 df_add_problem (&problem_NOTE);
3453 }
3454
3455
3456
3457 \f
3458 /*----------------------------------------------------------------------------
3459 Functions for simulating the effects of single insns.
3460
3461 You can either simulate in the forwards direction, starting from
3462 the top of a block or the backwards direction from the end of the
3463 block. If you go backwards, defs are examined first to clear bits,
3464 then uses are examined to set bits. If you go forwards, defs are
3465 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3466 are examined to clear bits. In either case, the result of examining
3467 a def can be undone (respectively by a use or a REG_UNUSED note).
3468
3469 If you start at the top of the block, use one of DF_LIVE_IN or
3470 DF_LR_IN. If you start at the bottom of the block use one of
3471 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3472 THEY WILL BE DESTROYED.
3473 ----------------------------------------------------------------------------*/
3474
3475
3476 /* Find the set of DEFs for INSN. */
3477
3478 void
3479 df_simulate_find_defs (rtx insn, bitmap defs)
3480 {
3481 df_ref *def_rec;
3482 unsigned int uid = INSN_UID (insn);
3483
3484 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3485 {
3486 df_ref def = *def_rec;
3487 bitmap_set_bit (defs, DF_REF_REGNO (def));
3488 }
3489 }
3490
3491 /* Find the set of uses for INSN. This includes partial defs. */
3492
3493 static void
3494 df_simulate_find_uses (rtx insn, bitmap uses)
3495 {
3496 df_ref *rec;
3497 unsigned int uid = INSN_UID (insn);
3498
3499 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
3500 {
3501 df_ref def = *rec;
3502 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3503 bitmap_set_bit (uses, DF_REF_REGNO (def));
3504 }
3505 for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
3506 {
3507 df_ref use = *rec;
3508 bitmap_set_bit (uses, DF_REF_REGNO (use));
3509 }
3510 }
3511
3512 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3513
3514 void
3515 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3516 {
3517 df_ref *def_rec;
3518 unsigned int uid = INSN_UID (insn);
3519
3520 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3521 {
3522 df_ref def = *def_rec;
3523 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3524 bitmap_set_bit (defs, DF_REF_REGNO (def));
3525 }
3526 }
3527
3528
3529 /* Simulate the effects of the defs of INSN on LIVE. */
3530
3531 void
3532 df_simulate_defs (rtx insn, bitmap live)
3533 {
3534 df_ref *def_rec;
3535 unsigned int uid = INSN_UID (insn);
3536
3537 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3538 {
3539 df_ref def = *def_rec;
3540 unsigned int dregno = DF_REF_REGNO (def);
3541
3542 /* If the def is to only part of the reg, it does
3543 not kill the other defs that reach here. */
3544 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3545 bitmap_clear_bit (live, dregno);
3546 }
3547 }
3548
3549
3550 /* Simulate the effects of the uses of INSN on LIVE. */
3551
3552 void
3553 df_simulate_uses (rtx insn, bitmap live)
3554 {
3555 df_ref *use_rec;
3556 unsigned int uid = INSN_UID (insn);
3557
3558 if (DEBUG_INSN_P (insn))
3559 return;
3560
3561 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3562 {
3563 df_ref use = *use_rec;
3564 /* Add use to set of uses in this BB. */
3565 bitmap_set_bit (live, DF_REF_REGNO (use));
3566 }
3567 }
3568
3569
3570 /* Add back the always live regs in BB to LIVE. */
3571
3572 static inline void
3573 df_simulate_fixup_sets (basic_block bb, bitmap live)
3574 {
3575 /* These regs are considered always live so if they end up dying
3576 because of some def, we need to bring the back again. */
3577 if (bb_has_eh_pred (bb))
3578 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3579 else
3580 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3581 }
3582
3583
3584 /*----------------------------------------------------------------------------
3585 The following three functions are used only for BACKWARDS scanning:
3586 i.e. they process the defs before the uses.
3587
3588 df_simulate_initialize_backwards should be called first with a
3589 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3590 df_simulate_one_insn_backwards should be called for each insn in
3591 the block, starting with the last one. Finally,
3592 df_simulate_finalize_backwards can be called to get a new value
3593 of the sets at the top of the block (this is rarely used).
3594 ----------------------------------------------------------------------------*/
3595
3596 /* Apply the artificial uses and defs at the end of BB in a backwards
3597 direction. */
3598
3599 void
3600 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3601 {
3602 df_ref *def_rec;
3603 df_ref *use_rec;
3604 int bb_index = bb->index;
3605
3606 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3607 {
3608 df_ref def = *def_rec;
3609 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3610 bitmap_clear_bit (live, DF_REF_REGNO (def));
3611 }
3612
3613 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3614 {
3615 df_ref use = *use_rec;
3616 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3617 bitmap_set_bit (live, DF_REF_REGNO (use));
3618 }
3619 }
3620
3621
3622 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3623
3624 void
3625 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3626 {
3627 if (!NONDEBUG_INSN_P (insn))
3628 return;
3629
3630 df_simulate_defs (insn, live);
3631 df_simulate_uses (insn, live);
3632 df_simulate_fixup_sets (bb, live);
3633 }
3634
3635
3636 /* Apply the artificial uses and defs at the top of BB in a backwards
3637 direction. */
3638
3639 void
3640 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3641 {
3642 df_ref *def_rec;
3643 #ifdef EH_USES
3644 df_ref *use_rec;
3645 #endif
3646 int bb_index = bb->index;
3647
3648 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3649 {
3650 df_ref def = *def_rec;
3651 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3652 bitmap_clear_bit (live, DF_REF_REGNO (def));
3653 }
3654
3655 #ifdef EH_USES
3656 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3657 {
3658 df_ref use = *use_rec;
3659 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3660 bitmap_set_bit (live, DF_REF_REGNO (use));
3661 }
3662 #endif
3663 }
3664 /*----------------------------------------------------------------------------
3665 The following three functions are used only for FORWARDS scanning:
3666 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3667 Thus it is important to add the DF_NOTES problem to the stack of
3668 problems computed before using these functions.
3669
3670 df_simulate_initialize_forwards should be called first with a
3671 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3672 df_simulate_one_insn_forwards should be called for each insn in
3673 the block, starting with the first one.
3674 ----------------------------------------------------------------------------*/
3675
3676 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3677 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3678 defs live. */
3679
3680 void
3681 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3682 {
3683 df_ref *def_rec;
3684 int bb_index = bb->index;
3685
3686 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3687 {
3688 df_ref def = *def_rec;
3689 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3690 bitmap_set_bit (live, DF_REF_REGNO (def));
3691 }
3692 }
3693
3694 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3695
3696 void
3697 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3698 {
3699 rtx link;
3700 if (! INSN_P (insn))
3701 return;
3702
3703 /* Make sure that DF_NOTE really is an active df problem. */
3704 gcc_assert (df_note);
3705
3706 /* Note that this is the opposite as how the problem is defined, because
3707 in the LR problem defs _kill_ liveness. However, they do so backwards,
3708 while here the scan is performed forwards! So, first assume that the
3709 def is live, and if this is not true REG_UNUSED notes will rectify the
3710 situation. */
3711 df_simulate_find_noclobber_defs (insn, live);
3712
3713 /* Clear all of the registers that go dead. */
3714 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3715 {
3716 switch (REG_NOTE_KIND (link))
3717 {
3718 case REG_DEAD:
3719 case REG_UNUSED:
3720 {
3721 rtx reg = XEXP (link, 0);
3722 int regno = REGNO (reg);
3723 if (HARD_REGISTER_NUM_P (regno))
3724 bitmap_clear_range (live, regno,
3725 hard_regno_nregs[regno][GET_MODE (reg)]);
3726 else
3727 bitmap_clear_bit (live, regno);
3728 }
3729 break;
3730 default:
3731 break;
3732 }
3733 }
3734 df_simulate_fixup_sets (bb, live);
3735 }
3736 \f
3737 /* Used by the next two functions to encode information about the
3738 memory references we found. */
3739 #define MEMREF_NORMAL 1
3740 #define MEMREF_VOLATILE 2
3741
3742 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3743 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3744
3745 static int
3746 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3747 {
3748 rtx x = *px;
3749
3750 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3751 return MEMREF_VOLATILE;
3752
3753 if (!MEM_P (x))
3754 return 0;
3755 if (MEM_VOLATILE_P (x))
3756 return MEMREF_VOLATILE;
3757 if (MEM_READONLY_P (x))
3758 return 0;
3759
3760 return MEMREF_NORMAL;
3761 }
3762
3763 /* A subroutine of can_move_insns_across_p called through note_stores.
3764 DATA points to an integer in which we set either the bit for
3765 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3766 of either kind. */
3767
3768 static void
3769 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3770 void *data ATTRIBUTE_UNUSED)
3771 {
3772 int *pflags = (int *)data;
3773 if (GET_CODE (x) == SUBREG)
3774 x = XEXP (x, 0);
3775 /* Treat stores to SP as stores to memory, this will prevent problems
3776 when there are references to the stack frame. */
3777 if (x == stack_pointer_rtx)
3778 *pflags |= MEMREF_VOLATILE;
3779 if (!MEM_P (x))
3780 return;
3781 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3782 }
3783
3784 /* Scan BB backwards, using df_simulate functions to keep track of
3785 lifetimes, up to insn POINT. The result is stored in LIVE. */
3786
3787 void
3788 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
3789 {
3790 rtx insn;
3791 bitmap_copy (live, df_get_live_out (bb));
3792 df_simulate_initialize_backwards (bb, live);
3793
3794 /* Scan and update life information until we reach the point we're
3795 interested in. */
3796 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
3797 df_simulate_one_insn_backwards (bb, insn, live);
3798 }
3799
3800 /* Return true if it is safe to move a group of insns, described by
3801 the range FROM to TO, backwards across another group of insns,
3802 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
3803 are no insns between ACROSS_TO and FROM, but they may be in
3804 different basic blocks; MERGE_BB is the block from which the
3805 insns will be moved. The caller must pass in a regset MERGE_LIVE
3806 which specifies the registers live after TO.
3807
3808 This function may be called in one of two cases: either we try to
3809 move identical instructions from all successor blocks into their
3810 predecessor, or we try to move from only one successor block. If
3811 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
3812 the second case. It should contain a set of registers live at the
3813 end of ACROSS_TO which must not be clobbered by moving the insns.
3814 In that case, we're also more careful about moving memory references
3815 and trapping insns.
3816
3817 We return false if it is not safe to move the entire group, but it
3818 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
3819 is set to point at the last moveable insn in such a case. */
3820
3821 bool
3822 can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
3823 basic_block merge_bb, regset merge_live,
3824 regset other_branch_live, rtx *pmove_upto)
3825 {
3826 rtx insn, next, max_to;
3827 bitmap merge_set, merge_use, local_merge_live;
3828 bitmap test_set, test_use;
3829 unsigned i, fail = 0;
3830 bitmap_iterator bi;
3831 int memrefs_in_across = 0;
3832 int mem_sets_in_across = 0;
3833 bool trapping_insns_in_across = false;
3834
3835 if (pmove_upto != NULL)
3836 *pmove_upto = NULL_RTX;
3837
3838 /* Find real bounds, ignoring debug insns. */
3839 while (!NONDEBUG_INSN_P (from) && from != to)
3840 from = NEXT_INSN (from);
3841 while (!NONDEBUG_INSN_P (to) && from != to)
3842 to = PREV_INSN (to);
3843
3844 for (insn = across_to; ; insn = next)
3845 {
3846 if (CALL_P (insn))
3847 {
3848 if (RTL_CONST_OR_PURE_CALL_P (insn))
3849 /* Pure functions can read from memory. Const functions can
3850 read from arguments that the ABI has forced onto the stack.
3851 Neither sort of read can be volatile. */
3852 memrefs_in_across |= MEMREF_NORMAL;
3853 else
3854 {
3855 memrefs_in_across |= MEMREF_VOLATILE;
3856 mem_sets_in_across |= MEMREF_VOLATILE;
3857 }
3858 }
3859 if (NONDEBUG_INSN_P (insn))
3860 {
3861 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
3862 NULL);
3863 note_stores (PATTERN (insn), find_memory_stores,
3864 &mem_sets_in_across);
3865 /* This is used just to find sets of the stack pointer. */
3866 memrefs_in_across |= mem_sets_in_across;
3867 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
3868 }
3869 next = PREV_INSN (insn);
3870 if (insn == across_from)
3871 break;
3872 }
3873
3874 /* Collect:
3875 MERGE_SET = set of registers set in MERGE_BB
3876 MERGE_USE = set of registers used in MERGE_BB and live at its top
3877 MERGE_LIVE = set of registers live at the point inside the MERGE
3878 range that we've reached during scanning
3879 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
3880 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
3881 and live before ACROSS_FROM. */
3882
3883 merge_set = BITMAP_ALLOC (&reg_obstack);
3884 merge_use = BITMAP_ALLOC (&reg_obstack);
3885 local_merge_live = BITMAP_ALLOC (&reg_obstack);
3886 test_set = BITMAP_ALLOC (&reg_obstack);
3887 test_use = BITMAP_ALLOC (&reg_obstack);
3888
3889 /* Compute the set of registers set and used in the ACROSS range. */
3890 if (other_branch_live != NULL)
3891 bitmap_copy (test_use, other_branch_live);
3892 df_simulate_initialize_backwards (merge_bb, test_use);
3893 for (insn = across_to; ; insn = next)
3894 {
3895 if (NONDEBUG_INSN_P (insn))
3896 {
3897 df_simulate_find_defs (insn, test_set);
3898 df_simulate_defs (insn, test_use);
3899 df_simulate_uses (insn, test_use);
3900 }
3901 next = PREV_INSN (insn);
3902 if (insn == across_from)
3903 break;
3904 }
3905
3906 /* Compute an upper bound for the amount of insns moved, by finding
3907 the first insn in MERGE that sets a register in TEST_USE, or uses
3908 a register in TEST_SET. We also check for calls, trapping operations,
3909 and memory references. */
3910 max_to = NULL_RTX;
3911 for (insn = from; ; insn = next)
3912 {
3913 if (CALL_P (insn))
3914 break;
3915 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3916 break;
3917 if (NONDEBUG_INSN_P (insn))
3918 {
3919 if (may_trap_or_fault_p (PATTERN (insn))
3920 && (trapping_insns_in_across || other_branch_live != NULL))
3921 break;
3922
3923 /* We cannot move memory stores past each other, or move memory
3924 reads past stores, at least not without tracking them and
3925 calling true_dependence on every pair.
3926
3927 If there is no other branch and no memory references or
3928 sets in the ACROSS range, we can move memory references
3929 freely, even volatile ones.
3930
3931 Otherwise, the rules are as follows: volatile memory
3932 references and stores can't be moved at all, and any type
3933 of memory reference can't be moved if there are volatile
3934 accesses or stores in the ACROSS range. That leaves
3935 normal reads, which can be moved, as the trapping case is
3936 dealt with elsewhere. */
3937 if (other_branch_live != NULL || memrefs_in_across != 0)
3938 {
3939 int mem_ref_flags = 0;
3940 int mem_set_flags = 0;
3941 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
3942 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
3943 NULL);
3944 /* Catch sets of the stack pointer. */
3945 mem_ref_flags |= mem_set_flags;
3946
3947 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
3948 break;
3949 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
3950 break;
3951 if (mem_set_flags != 0
3952 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
3953 break;
3954 }
3955 df_simulate_find_uses (insn, merge_use);
3956 /* We're only interested in uses which use a value live at
3957 the top, not one previously set in this block. */
3958 bitmap_and_compl_into (merge_use, merge_set);
3959 df_simulate_find_defs (insn, merge_set);
3960 if (bitmap_intersect_p (merge_set, test_use)
3961 || bitmap_intersect_p (merge_use, test_set))
3962 break;
3963 #ifdef HAVE_cc0
3964 if (!sets_cc0_p (insn))
3965 #endif
3966 max_to = insn;
3967 }
3968 next = NEXT_INSN (insn);
3969 if (insn == to)
3970 break;
3971 }
3972 if (max_to != to)
3973 fail = 1;
3974
3975 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
3976 goto out;
3977
3978 /* Now, lower this upper bound by also taking into account that
3979 a range of insns moved across ACROSS must not leave a register
3980 live at the end that will be clobbered in ACROSS. We need to
3981 find a point where TEST_SET & LIVE == 0.
3982
3983 Insns in the MERGE range that set registers which are also set
3984 in the ACROSS range may still be moved as long as we also move
3985 later insns which use the results of the set, and make the
3986 register dead again. This is verified by the condition stated
3987 above. We only need to test it for registers that are set in
3988 the moved region.
3989
3990 MERGE_LIVE is provided by the caller and holds live registers after
3991 TO. */
3992 bitmap_copy (local_merge_live, merge_live);
3993 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
3994 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
3995
3996 /* We're not interested in registers that aren't set in the moved
3997 region at all. */
3998 bitmap_and_into (local_merge_live, merge_set);
3999 for (;;)
4000 {
4001 if (NONDEBUG_INSN_P (insn))
4002 {
4003 if (!bitmap_intersect_p (test_set, local_merge_live)
4004 #ifdef HAVE_cc0
4005 && !sets_cc0_p (insn)
4006 #endif
4007 )
4008 {
4009 max_to = insn;
4010 break;
4011 }
4012
4013 df_simulate_one_insn_backwards (merge_bb, insn,
4014 local_merge_live);
4015 }
4016 if (insn == from)
4017 {
4018 fail = 1;
4019 goto out;
4020 }
4021 insn = PREV_INSN (insn);
4022 }
4023
4024 if (max_to != to)
4025 fail = 1;
4026
4027 if (pmove_upto)
4028 *pmove_upto = max_to;
4029
4030 /* For small register class machines, don't lengthen lifetimes of
4031 hard registers before reload. */
4032 if (! reload_completed
4033 && targetm.small_register_classes_for_mode_p (VOIDmode))
4034 {
4035 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4036 {
4037 if (i < FIRST_PSEUDO_REGISTER
4038 && ! fixed_regs[i]
4039 && ! global_regs[i])
4040 fail = 1;
4041 }
4042 }
4043
4044 out:
4045 BITMAP_FREE (merge_set);
4046 BITMAP_FREE (merge_use);
4047 BITMAP_FREE (local_merge_live);
4048 BITMAP_FREE (test_set);
4049 BITMAP_FREE (test_use);
4050
4051 return !fail;
4052 }
4053
4054 \f
4055 /*----------------------------------------------------------------------------
4056 MULTIPLE DEFINITIONS
4057
4058 Find the locations in the function reached by multiple definition sites
4059 for a live pseudo. In and out bitvectors are built for each basic
4060 block. They are restricted for efficiency to live registers.
4061
4062 The gen and kill sets for the problem are obvious. Together they
4063 include all defined registers in a basic block; the gen set includes
4064 registers where a partial or conditional or may-clobber definition is
4065 last in the BB, while the kill set includes registers with a complete
4066 definition coming last. However, the computation of the dataflow
4067 itself is interesting.
4068
4069 The idea behind it comes from SSA form's iterated dominance frontier
4070 criterion for inserting PHI functions. Just like in that case, we can use
4071 the dominance frontier to find places where multiple definitions meet;
4072 a register X defined in a basic block BB1 has multiple definitions in
4073 basic blocks in BB1's dominance frontier.
4074
4075 So, the in-set of a basic block BB2 is not just the union of the
4076 out-sets of BB2's predecessors, but includes some more bits that come
4077 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4078 the previous paragraph). I called this set the init-set of BB2.
4079
4080 (Note: I actually use the kill-set only to build the init-set.
4081 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4082
4083 For example, if you have
4084
4085 BB1 : r10 = 0
4086 r11 = 0
4087 if <...> goto BB2 else goto BB3;
4088
4089 BB2 : r10 = 1
4090 r12 = 1
4091 goto BB3;
4092
4093 BB3 :
4094
4095 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4096 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4097 not need to iterate the dominance frontier, because we do not insert
4098 anything like PHI functions there! Instead, dataflow will take care of
4099 propagating the information to BB3's successors.
4100 ---------------------------------------------------------------------------*/
4101
4102 /* Private data used to verify the solution for this problem. */
4103 struct df_md_problem_data
4104 {
4105 /* An obstack for the bitmaps we need for this problem. */
4106 bitmap_obstack md_bitmaps;
4107 };
4108
4109 /* Scratch var used by transfer functions. This is used to do md analysis
4110 only for live registers. */
4111 static bitmap_head df_md_scratch;
4112
4113
4114 static void
4115 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4116 void *vbb_info)
4117 {
4118 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4119 if (bb_info)
4120 {
4121 bitmap_clear (&bb_info->kill);
4122 bitmap_clear (&bb_info->gen);
4123 bitmap_clear (&bb_info->init);
4124 bitmap_clear (&bb_info->in);
4125 bitmap_clear (&bb_info->out);
4126 }
4127 }
4128
4129
4130 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4131 not touched unless the block is new. */
4132
4133 static void
4134 df_md_alloc (bitmap all_blocks)
4135 {
4136 unsigned int bb_index;
4137 bitmap_iterator bi;
4138 struct df_md_problem_data *problem_data;
4139
4140 df_grow_bb_info (df_md);
4141 if (df_md->problem_data)
4142 problem_data = (struct df_md_problem_data *) df_md->problem_data;
4143 else
4144 {
4145 problem_data = XNEW (struct df_md_problem_data);
4146 df_md->problem_data = problem_data;
4147 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4148 }
4149 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4150
4151 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4152 {
4153 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4154 /* When bitmaps are already initialized, just clear them. */
4155 if (bb_info->init.obstack)
4156 {
4157 bitmap_clear (&bb_info->init);
4158 bitmap_clear (&bb_info->gen);
4159 bitmap_clear (&bb_info->kill);
4160 bitmap_clear (&bb_info->in);
4161 bitmap_clear (&bb_info->out);
4162 }
4163 else
4164 {
4165 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4166 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4167 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4168 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4169 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4170 }
4171 }
4172
4173 df_md->optional_p = true;
4174 }
4175
4176 /* Add the effect of the top artificial defs of BB to the multiple definitions
4177 bitmap LOCAL_MD. */
4178
4179 void
4180 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4181 {
4182 int bb_index = bb->index;
4183 df_ref *def_rec;
4184 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4185 {
4186 df_ref def = *def_rec;
4187 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4188 {
4189 unsigned int dregno = DF_REF_REGNO (def);
4190 if (DF_REF_FLAGS (def)
4191 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4192 bitmap_set_bit (local_md, dregno);
4193 else
4194 bitmap_clear_bit (local_md, dregno);
4195 }
4196 }
4197 }
4198
4199
4200 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4201 LOCAL_MD. */
4202
4203 void
4204 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4205 bitmap local_md)
4206 {
4207 unsigned uid = INSN_UID (insn);
4208 df_ref *def_rec;
4209
4210 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4211 {
4212 df_ref def = *def_rec;
4213 unsigned int dregno = DF_REF_REGNO (def);
4214 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4215 || (dregno >= FIRST_PSEUDO_REGISTER))
4216 {
4217 if (DF_REF_FLAGS (def)
4218 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4219 bitmap_set_bit (local_md, DF_REF_ID (def));
4220 else
4221 bitmap_clear_bit (local_md, DF_REF_ID (def));
4222 }
4223 }
4224 }
4225
4226 static void
4227 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4228 df_ref *def_rec,
4229 int top_flag)
4230 {
4231 df_ref def;
4232 bitmap_clear (&seen_in_insn);
4233
4234 while ((def = *def_rec++) != NULL)
4235 {
4236 unsigned int dregno = DF_REF_REGNO (def);
4237 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4238 || (dregno >= FIRST_PSEUDO_REGISTER))
4239 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4240 {
4241 if (!bitmap_bit_p (&seen_in_insn, dregno))
4242 {
4243 if (DF_REF_FLAGS (def)
4244 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4245 {
4246 bitmap_set_bit (&bb_info->gen, dregno);
4247 bitmap_clear_bit (&bb_info->kill, dregno);
4248 }
4249 else
4250 {
4251 /* When we find a clobber and a regular def,
4252 make sure the regular def wins. */
4253 bitmap_set_bit (&seen_in_insn, dregno);
4254 bitmap_set_bit (&bb_info->kill, dregno);
4255 bitmap_clear_bit (&bb_info->gen, dregno);
4256 }
4257 }
4258 }
4259 }
4260 }
4261
4262
4263 /* Compute local multiple def info for basic block BB. */
4264
4265 static void
4266 df_md_bb_local_compute (unsigned int bb_index)
4267 {
4268 basic_block bb = BASIC_BLOCK (bb_index);
4269 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4270 rtx insn;
4271
4272 /* Artificials are only hard regs. */
4273 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4274 df_md_bb_local_compute_process_def (bb_info,
4275 df_get_artificial_defs (bb_index),
4276 DF_REF_AT_TOP);
4277
4278 FOR_BB_INSNS (bb, insn)
4279 {
4280 unsigned int uid = INSN_UID (insn);
4281 if (!INSN_P (insn))
4282 continue;
4283
4284 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4285 }
4286
4287 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4288 df_md_bb_local_compute_process_def (bb_info,
4289 df_get_artificial_defs (bb_index),
4290 0);
4291 }
4292
4293 /* Compute local reaching def info for each basic block within BLOCKS. */
4294
4295 static void
4296 df_md_local_compute (bitmap all_blocks)
4297 {
4298 unsigned int bb_index, df_bb_index;
4299 bitmap_iterator bi1, bi2;
4300 basic_block bb;
4301 bitmap_head *frontiers;
4302
4303 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4304
4305 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4306 {
4307 df_md_bb_local_compute (bb_index);
4308 }
4309
4310 bitmap_clear (&seen_in_insn);
4311
4312 frontiers = XNEWVEC (bitmap_head, last_basic_block);
4313 FOR_ALL_BB (bb)
4314 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4315
4316 compute_dominance_frontiers (frontiers);
4317
4318 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4319 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4320 {
4321 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4322 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4323 {
4324 basic_block bb = BASIC_BLOCK (df_bb_index);
4325 if (bitmap_bit_p (all_blocks, df_bb_index))
4326 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4327 df_get_live_in (bb));
4328 }
4329 }
4330
4331 FOR_ALL_BB (bb)
4332 bitmap_clear (&frontiers[bb->index]);
4333 free (frontiers);
4334 }
4335
4336
4337 /* Reset the global solution for recalculation. */
4338
4339 static void
4340 df_md_reset (bitmap all_blocks)
4341 {
4342 unsigned int bb_index;
4343 bitmap_iterator bi;
4344
4345 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4346 {
4347 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4348 gcc_assert (bb_info);
4349 bitmap_clear (&bb_info->in);
4350 bitmap_clear (&bb_info->out);
4351 }
4352 }
4353
4354 static bool
4355 df_md_transfer_function (int bb_index)
4356 {
4357 basic_block bb = BASIC_BLOCK (bb_index);
4358 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4359 bitmap in = &bb_info->in;
4360 bitmap out = &bb_info->out;
4361 bitmap gen = &bb_info->gen;
4362 bitmap kill = &bb_info->kill;
4363
4364 /* We need to use a scratch set here so that the value returned from this
4365 function invocation properly reflects whether the sets changed in a
4366 significant way; i.e. not just because the live set was anded in. */
4367 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4368
4369 /* Multiple definitions of a register are not relevant if it is not
4370 live. Thus we trim the result to the places where it is live. */
4371 bitmap_and_into (in, df_get_live_in (bb));
4372
4373 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4374 }
4375
4376 /* Initialize the solution bit vectors for problem. */
4377
4378 static void
4379 df_md_init (bitmap all_blocks)
4380 {
4381 unsigned int bb_index;
4382 bitmap_iterator bi;
4383
4384 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4385 {
4386 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4387
4388 bitmap_copy (&bb_info->in, &bb_info->init);
4389 df_md_transfer_function (bb_index);
4390 }
4391 }
4392
4393 static void
4394 df_md_confluence_0 (basic_block bb)
4395 {
4396 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4397 bitmap_copy (&bb_info->in, &bb_info->init);
4398 }
4399
4400 /* In of target gets or of out of source. */
4401
4402 static bool
4403 df_md_confluence_n (edge e)
4404 {
4405 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4406 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4407
4408 if (e->flags & EDGE_FAKE)
4409 return false;
4410
4411 if (e->flags & EDGE_EH)
4412 return bitmap_ior_and_compl_into (op1, op2,
4413 regs_invalidated_by_call_regset);
4414 else
4415 return bitmap_ior_into (op1, op2);
4416 }
4417
4418 /* Free all storage associated with the problem. */
4419
4420 static void
4421 df_md_free (void)
4422 {
4423 struct df_md_problem_data *problem_data
4424 = (struct df_md_problem_data *) df_md->problem_data;
4425
4426 bitmap_obstack_release (&problem_data->md_bitmaps);
4427 free (problem_data);
4428 df_md->problem_data = NULL;
4429
4430 df_md->block_info_size = 0;
4431 free (df_md->block_info);
4432 df_md->block_info = NULL;
4433 free (df_md);
4434 }
4435
4436
4437 /* Debugging info at top of bb. */
4438
4439 static void
4440 df_md_top_dump (basic_block bb, FILE *file)
4441 {
4442 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4443 if (!bb_info)
4444 return;
4445
4446 fprintf (file, ";; md in \t");
4447 df_print_regset (file, &bb_info->in);
4448 fprintf (file, ";; md init \t");
4449 df_print_regset (file, &bb_info->init);
4450 fprintf (file, ";; md gen \t");
4451 df_print_regset (file, &bb_info->gen);
4452 fprintf (file, ";; md kill \t");
4453 df_print_regset (file, &bb_info->kill);
4454 }
4455
4456 /* Debugging info at bottom of bb. */
4457
4458 static void
4459 df_md_bottom_dump (basic_block bb, FILE *file)
4460 {
4461 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4462 if (!bb_info)
4463 return;
4464
4465 fprintf (file, ";; md out \t");
4466 df_print_regset (file, &bb_info->out);
4467 }
4468
4469 static struct df_problem problem_MD =
4470 {
4471 DF_MD, /* Problem id. */
4472 DF_FORWARD, /* Direction. */
4473 df_md_alloc, /* Allocate the problem specific data. */
4474 df_md_reset, /* Reset global information. */
4475 df_md_free_bb_info, /* Free basic block info. */
4476 df_md_local_compute, /* Local compute function. */
4477 df_md_init, /* Init the solution specific data. */
4478 df_worklist_dataflow, /* Worklist solver. */
4479 df_md_confluence_0, /* Confluence operator 0. */
4480 df_md_confluence_n, /* Confluence operator n. */
4481 df_md_transfer_function, /* Transfer function. */
4482 NULL, /* Finalize function. */
4483 df_md_free, /* Free all of the problem information. */
4484 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4485 NULL, /* Debugging. */
4486 df_md_top_dump, /* Debugging start block. */
4487 df_md_bottom_dump, /* Debugging end block. */
4488 NULL, /* Debugging start insn. */
4489 NULL, /* Debugging end insn. */
4490 NULL, /* Incremental solution verify start. */
4491 NULL, /* Incremental solution verify end. */
4492 NULL, /* Dependent problem. */
4493 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4494 TV_DF_MD, /* Timing variable. */
4495 false /* Reset blocks on dropping out of blocks_to_analyze. */
4496 };
4497
4498 /* Create a new MD instance and add it to the existing instance
4499 of DF. */
4500
4501 void
4502 df_md_add_problem (void)
4503 {
4504 df_add_problem (&problem_MD);
4505 }
4506
4507
4508