re PR fortran/54107 ([F03] Memory hog with abstract interface)
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999-2013 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "sbitmap.h"
39 #include "bitmap.h"
40 #include "target.h"
41 #include "timevar.h"
42 #include "df.h"
43 #include "except.h"
44 #include "dce.h"
45 #include "valtrack.h"
46 #include "dumpfile.h"
47
48 /* Note that turning REG_DEAD_DEBUGGING on will cause
49 gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
50 addresses in the dumps. */
51 #define REG_DEAD_DEBUGGING 0
52
53 #define DF_SPARSE_THRESHOLD 32
54
55 static bitmap_head seen_in_block;
56 static bitmap_head seen_in_insn;
57
58 /*----------------------------------------------------------------------------
59 Utility functions.
60 ----------------------------------------------------------------------------*/
61
62 /* Generic versions to get the void* version of the block info. Only
63 used inside the problem instance vectors. */
64
65 /* Dump a def-use or use-def chain for REF to FILE. */
66
67 void
68 df_chain_dump (struct df_link *link, FILE *file)
69 {
70 fprintf (file, "{ ");
71 for (; link; link = link->next)
72 {
73 fprintf (file, "%c%d(bb %d insn %d) ",
74 DF_REF_REG_DEF_P (link->ref)
75 ? 'd'
76 : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
77 DF_REF_ID (link->ref),
78 DF_REF_BBNO (link->ref),
79 DF_REF_IS_ARTIFICIAL (link->ref)
80 ? -1 : DF_REF_INSN_UID (link->ref));
81 }
82 fprintf (file, "}");
83 }
84
85
86 /* Print some basic block info as part of df_dump. */
87
88 void
89 df_print_bb_index (basic_block bb, FILE *file)
90 {
91 edge e;
92 edge_iterator ei;
93
94 fprintf (file, "\n( ");
95 FOR_EACH_EDGE (e, ei, bb->preds)
96 {
97 basic_block pred = e->src;
98 fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
99 }
100 fprintf (file, ")->[%d]->( ", bb->index);
101 FOR_EACH_EDGE (e, ei, bb->succs)
102 {
103 basic_block succ = e->dest;
104 fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
105 }
106 fprintf (file, ")\n");
107 }
108
109 \f
110 /*----------------------------------------------------------------------------
111 REACHING DEFINITIONS
112
113 Find the locations in the function where each definition site for a
114 pseudo reaches. In and out bitvectors are built for each basic
115 block. The id field in the ref is used to index into these sets.
116 See df.h for details.
117
118 If the DF_RD_PRUNE_DEAD_DEFS changable flag is set, only DEFs reaching
119 existing uses are included in the global reaching DEFs set, or in other
120 words only DEFs that are still live. This is a kind of pruned version
121 of the traditional reaching definitions problem that is much less
122 complex to compute and produces enough information to compute UD-chains.
123 In this context, live must be interpreted in the DF_LR sense: Uses that
124 are upward exposed but maybe not initialized on all paths through the
125 CFG. For a USE that is not reached by a DEF on all paths, we still want
126 to make those DEFs that do reach the USE visible, and pruning based on
127 DF_LIVE would make that impossible.
128 ----------------------------------------------------------------------------*/
129
130 /* This problem plays a large number of games for the sake of
131 efficiency.
132
133 1) The order of the bits in the bitvectors. After the scanning
134 phase, all of the defs are sorted. All of the defs for the reg 0
135 are first, followed by all defs for reg 1 and so on.
136
137 2) There are two kill sets, one if the number of defs is less or
138 equal to DF_SPARSE_THRESHOLD and another if the number of defs is
139 greater.
140
141 <= : Data is built directly in the kill set.
142
143 > : One level of indirection is used to keep from generating long
144 strings of 1 bits in the kill sets. Bitvectors that are indexed
145 by the regnum are used to represent that there is a killing def
146 for the register. The confluence and transfer functions use
147 these along with the bitmap_clear_range call to remove ranges of
148 bits without actually generating a knockout vector.
149
150 The kill and sparse_kill and the dense_invalidated_by_call and
151 sparse_invalidated_by_call both play this game. */
152
153 /* Private data used to compute the solution for this problem. These
154 data structures are not accessible outside of this module. */
155 struct df_rd_problem_data
156 {
157 /* The set of defs to regs invalidated by call. */
158 bitmap_head sparse_invalidated_by_call;
159 /* The set of defs to regs invalidate by call for rd. */
160 bitmap_head dense_invalidated_by_call;
161 /* An obstack for the bitmaps we need for this problem. */
162 bitmap_obstack rd_bitmaps;
163 };
164
165
166 /* Free basic block info. */
167
168 static void
169 df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
170 void *vbb_info)
171 {
172 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
173 if (bb_info)
174 {
175 bitmap_clear (&bb_info->kill);
176 bitmap_clear (&bb_info->sparse_kill);
177 bitmap_clear (&bb_info->gen);
178 bitmap_clear (&bb_info->in);
179 bitmap_clear (&bb_info->out);
180 }
181 }
182
183
184 /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
185 not touched unless the block is new. */
186
187 static void
188 df_rd_alloc (bitmap all_blocks)
189 {
190 unsigned int bb_index;
191 bitmap_iterator bi;
192 struct df_rd_problem_data *problem_data;
193
194 if (df_rd->problem_data)
195 {
196 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
197 bitmap_clear (&problem_data->sparse_invalidated_by_call);
198 bitmap_clear (&problem_data->dense_invalidated_by_call);
199 }
200 else
201 {
202 problem_data = XNEW (struct df_rd_problem_data);
203 df_rd->problem_data = problem_data;
204
205 bitmap_obstack_initialize (&problem_data->rd_bitmaps);
206 bitmap_initialize (&problem_data->sparse_invalidated_by_call,
207 &problem_data->rd_bitmaps);
208 bitmap_initialize (&problem_data->dense_invalidated_by_call,
209 &problem_data->rd_bitmaps);
210 }
211
212 df_grow_bb_info (df_rd);
213
214 /* Because of the clustering of all use sites for the same pseudo,
215 we have to process all of the blocks before doing the analysis. */
216
217 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
218 {
219 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
220
221 /* When bitmaps are already initialized, just clear them. */
222 if (bb_info->kill.obstack)
223 {
224 bitmap_clear (&bb_info->kill);
225 bitmap_clear (&bb_info->sparse_kill);
226 bitmap_clear (&bb_info->gen);
227 }
228 else
229 {
230 bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
231 bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
232 bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
233 bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
234 bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
235 }
236 }
237 df_rd->optional_p = true;
238 }
239
240
241 /* Add the effect of the top artificial defs of BB to the reaching definitions
242 bitmap LOCAL_RD. */
243
244 void
245 df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
246 {
247 int bb_index = bb->index;
248 df_ref *def_rec;
249 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
250 {
251 df_ref def = *def_rec;
252 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
253 {
254 unsigned int dregno = DF_REF_REGNO (def);
255 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
256 bitmap_clear_range (local_rd,
257 DF_DEFS_BEGIN (dregno),
258 DF_DEFS_COUNT (dregno));
259 bitmap_set_bit (local_rd, DF_REF_ID (def));
260 }
261 }
262 }
263
264 /* Add the effect of the defs of INSN to the reaching definitions bitmap
265 LOCAL_RD. */
266
267 void
268 df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
269 bitmap local_rd)
270 {
271 unsigned uid = INSN_UID (insn);
272 df_ref *def_rec;
273
274 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
275 {
276 df_ref def = *def_rec;
277 unsigned int dregno = DF_REF_REGNO (def);
278 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
279 || (dregno >= FIRST_PSEUDO_REGISTER))
280 {
281 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
282 bitmap_clear_range (local_rd,
283 DF_DEFS_BEGIN (dregno),
284 DF_DEFS_COUNT (dregno));
285 if (!(DF_REF_FLAGS (def)
286 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
287 bitmap_set_bit (local_rd, DF_REF_ID (def));
288 }
289 }
290 }
291
292 /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
293 more complicated than just simulating, because we must produce the
294 gen and kill sets and hence deal with the two possible representations
295 of kill sets. */
296
297 static void
298 df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
299 df_ref *def_rec,
300 int top_flag)
301 {
302 while (*def_rec)
303 {
304 df_ref def = *def_rec;
305 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
306 {
307 unsigned int regno = DF_REF_REGNO (def);
308 unsigned int begin = DF_DEFS_BEGIN (regno);
309 unsigned int n_defs = DF_DEFS_COUNT (regno);
310
311 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
312 || (regno >= FIRST_PSEUDO_REGISTER))
313 {
314 /* Only the last def(s) for a regno in the block has any
315 effect. */
316 if (!bitmap_bit_p (&seen_in_block, regno))
317 {
318 /* The first def for regno in insn gets to knock out the
319 defs from other instructions. */
320 if ((!bitmap_bit_p (&seen_in_insn, regno))
321 /* If the def is to only part of the reg, it does
322 not kill the other defs that reach here. */
323 && (!(DF_REF_FLAGS (def) &
324 (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
325 {
326 if (n_defs > DF_SPARSE_THRESHOLD)
327 {
328 bitmap_set_bit (&bb_info->sparse_kill, regno);
329 bitmap_clear_range(&bb_info->gen, begin, n_defs);
330 }
331 else
332 {
333 bitmap_set_range (&bb_info->kill, begin, n_defs);
334 bitmap_clear_range (&bb_info->gen, begin, n_defs);
335 }
336 }
337
338 bitmap_set_bit (&seen_in_insn, regno);
339 /* All defs for regno in the instruction may be put into
340 the gen set. */
341 if (!(DF_REF_FLAGS (def)
342 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
343 bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
344 }
345 }
346 }
347 def_rec++;
348 }
349 }
350
351 /* Compute local reaching def info for basic block BB. */
352
353 static void
354 df_rd_bb_local_compute (unsigned int bb_index)
355 {
356 basic_block bb = BASIC_BLOCK (bb_index);
357 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
358 rtx insn;
359
360 bitmap_clear (&seen_in_block);
361 bitmap_clear (&seen_in_insn);
362
363 /* Artificials are only hard regs. */
364 if (!(df->changeable_flags & DF_NO_HARD_REGS))
365 df_rd_bb_local_compute_process_def (bb_info,
366 df_get_artificial_defs (bb_index),
367 0);
368
369 FOR_BB_INSNS_REVERSE (bb, insn)
370 {
371 unsigned int uid = INSN_UID (insn);
372
373 if (!INSN_P (insn))
374 continue;
375
376 df_rd_bb_local_compute_process_def (bb_info,
377 DF_INSN_UID_DEFS (uid), 0);
378
379 /* This complex dance with the two bitmaps is required because
380 instructions can assign twice to the same pseudo. This
381 generally happens with calls that will have one def for the
382 result and another def for the clobber. If only one vector
383 is used and the clobber goes first, the result will be
384 lost. */
385 bitmap_ior_into (&seen_in_block, &seen_in_insn);
386 bitmap_clear (&seen_in_insn);
387 }
388
389 /* Process the artificial defs at the top of the block last since we
390 are going backwards through the block and these are logically at
391 the start. */
392 if (!(df->changeable_flags & DF_NO_HARD_REGS))
393 df_rd_bb_local_compute_process_def (bb_info,
394 df_get_artificial_defs (bb_index),
395 DF_REF_AT_TOP);
396 }
397
398
399 /* Compute local reaching def info for each basic block within BLOCKS. */
400
401 static void
402 df_rd_local_compute (bitmap all_blocks)
403 {
404 unsigned int bb_index;
405 bitmap_iterator bi;
406 unsigned int regno;
407 struct df_rd_problem_data *problem_data
408 = (struct df_rd_problem_data *) df_rd->problem_data;
409 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
410 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
411
412 bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
413 bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
414
415 df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
416
417 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
418 {
419 df_rd_bb_local_compute (bb_index);
420 }
421
422 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
423 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
424 {
425 if (! HARD_REGISTER_NUM_P (regno)
426 || !(df->changeable_flags & DF_NO_HARD_REGS))
427 {
428 if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
429 bitmap_set_bit (sparse_invalidated, regno);
430 else
431 bitmap_set_range (dense_invalidated,
432 DF_DEFS_BEGIN (regno),
433 DF_DEFS_COUNT (regno));
434 }
435 }
436
437 bitmap_clear (&seen_in_block);
438 bitmap_clear (&seen_in_insn);
439 }
440
441
442 /* Initialize the solution bit vectors for problem. */
443
444 static void
445 df_rd_init_solution (bitmap all_blocks)
446 {
447 unsigned int bb_index;
448 bitmap_iterator bi;
449
450 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
451 {
452 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
453
454 bitmap_copy (&bb_info->out, &bb_info->gen);
455 bitmap_clear (&bb_info->in);
456 }
457 }
458
459 /* In of target gets or of out of source. */
460
461 static bool
462 df_rd_confluence_n (edge e)
463 {
464 bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
465 bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
466 bool changed = false;
467
468 if (e->flags & EDGE_FAKE)
469 return false;
470
471 if (e->flags & EDGE_EH)
472 {
473 struct df_rd_problem_data *problem_data
474 = (struct df_rd_problem_data *) df_rd->problem_data;
475 bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
476 bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
477 bitmap_iterator bi;
478 unsigned int regno;
479 bitmap_head tmp;
480
481 bitmap_initialize (&tmp, &df_bitmap_obstack);
482 bitmap_copy (&tmp, op2);
483 bitmap_and_compl_into (&tmp, dense_invalidated);
484
485 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
486 {
487 bitmap_clear_range (&tmp,
488 DF_DEFS_BEGIN (regno),
489 DF_DEFS_COUNT (regno));
490 }
491 changed |= bitmap_ior_into (op1, &tmp);
492 bitmap_clear (&tmp);
493 return changed;
494 }
495 else
496 return bitmap_ior_into (op1, op2);
497 }
498
499
500 /* Transfer function. */
501
502 static bool
503 df_rd_transfer_function (int bb_index)
504 {
505 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
506 unsigned int regno;
507 bitmap_iterator bi;
508 bitmap in = &bb_info->in;
509 bitmap out = &bb_info->out;
510 bitmap gen = &bb_info->gen;
511 bitmap kill = &bb_info->kill;
512 bitmap sparse_kill = &bb_info->sparse_kill;
513 bool changed = false;
514
515 if (bitmap_empty_p (sparse_kill))
516 changed = bitmap_ior_and_compl (out, gen, in, kill);
517 else
518 {
519 struct df_rd_problem_data *problem_data;
520 bitmap_head tmp;
521
522 /* Note that TMP is _not_ a temporary bitmap if we end up replacing
523 OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
524 problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
525 bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
526
527 bitmap_copy (&tmp, in);
528 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
529 {
530 bitmap_clear_range (&tmp,
531 DF_DEFS_BEGIN (regno),
532 DF_DEFS_COUNT (regno));
533 }
534 bitmap_and_compl_into (&tmp, kill);
535 bitmap_ior_into (&tmp, gen);
536 changed = !bitmap_equal_p (&tmp, out);
537 if (changed)
538 {
539 bitmap_clear (out);
540 bb_info->out = tmp;
541 }
542 else
543 bitmap_clear (&tmp);
544 }
545
546 if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
547 {
548 /* Create a mask of DEFs for all registers live at the end of this
549 basic block, and mask out DEFs of registers that are not live.
550 Computing the mask looks costly, but the benefit of the pruning
551 outweighs the cost. */
552 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
553 bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
554 bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
555 unsigned int regno;
556 bitmap_iterator bi;
557
558 EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi)
559 bitmap_set_range (live_defs,
560 DF_DEFS_BEGIN (regno),
561 DF_DEFS_COUNT (regno));
562 changed |= bitmap_and_into (&bb_info->out, live_defs);
563 BITMAP_FREE (live_defs);
564 }
565
566 return changed;
567 }
568
569 /* Free all storage associated with the problem. */
570
571 static void
572 df_rd_free (void)
573 {
574 struct df_rd_problem_data *problem_data
575 = (struct df_rd_problem_data *) df_rd->problem_data;
576
577 if (problem_data)
578 {
579 bitmap_obstack_release (&problem_data->rd_bitmaps);
580
581 df_rd->block_info_size = 0;
582 free (df_rd->block_info);
583 df_rd->block_info = NULL;
584 free (df_rd->problem_data);
585 }
586 free (df_rd);
587 }
588
589
590 /* Debugging info. */
591
592 static void
593 df_rd_start_dump (FILE *file)
594 {
595 struct df_rd_problem_data *problem_data
596 = (struct df_rd_problem_data *) df_rd->problem_data;
597 unsigned int m = DF_REG_SIZE(df);
598 unsigned int regno;
599
600 if (!df_rd->block_info)
601 return;
602
603 fprintf (file, ";; Reaching defs:\n");
604
605 fprintf (file, ";; sparse invalidated \t");
606 dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
607 fprintf (file, ";; dense invalidated \t");
608 dump_bitmap (file, &problem_data->dense_invalidated_by_call);
609
610 fprintf (file, ";; reg->defs[] map:\t");
611 for (regno = 0; regno < m; regno++)
612 if (DF_DEFS_COUNT (regno))
613 fprintf (file, "%d[%d,%d] ", regno,
614 DF_DEFS_BEGIN (regno),
615 DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1);
616 fprintf (file, "\n");
617 }
618
619
620 static void
621 df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
622 {
623 bitmap_head tmp;
624 unsigned int regno;
625 unsigned int m = DF_REG_SIZE(df);
626 bool first_reg = true;
627
628 fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
629
630 bitmap_initialize (&tmp, &df_bitmap_obstack);
631 for (regno = 0; regno < m; regno++)
632 {
633 if (HARD_REGISTER_NUM_P (regno)
634 && (df->changeable_flags & DF_NO_HARD_REGS))
635 continue;
636 bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
637 bitmap_and_into (&tmp, defs_set);
638 if (! bitmap_empty_p (&tmp))
639 {
640 bitmap_iterator bi;
641 unsigned int ix;
642 bool first_def = true;
643
644 if (! first_reg)
645 fprintf (file, ",");
646 first_reg = false;
647
648 fprintf (file, "%u[", regno);
649 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi)
650 {
651 fprintf (file, "%s%u", first_def ? "" : ",", ix);
652 first_def = false;
653 }
654 fprintf (file, "]");
655 }
656 bitmap_clear (&tmp);
657 }
658
659 fprintf (file, "\n");
660 bitmap_clear (&tmp);
661 }
662
663 /* Debugging info at top of bb. */
664
665 static void
666 df_rd_top_dump (basic_block bb, FILE *file)
667 {
668 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
669 if (!bb_info)
670 return;
671
672 df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file);
673 df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file);
674 df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file);
675 }
676
677
678 /* Debugging info at bottom of bb. */
679
680 static void
681 df_rd_bottom_dump (basic_block bb, FILE *file)
682 {
683 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
684 if (!bb_info)
685 return;
686
687 df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file);
688 }
689
690 /* All of the information associated with every instance of the problem. */
691
692 static struct df_problem problem_RD =
693 {
694 DF_RD, /* Problem id. */
695 DF_FORWARD, /* Direction. */
696 df_rd_alloc, /* Allocate the problem specific data. */
697 NULL, /* Reset global information. */
698 df_rd_free_bb_info, /* Free basic block info. */
699 df_rd_local_compute, /* Local compute function. */
700 df_rd_init_solution, /* Init the solution specific data. */
701 df_worklist_dataflow, /* Worklist solver. */
702 NULL, /* Confluence operator 0. */
703 df_rd_confluence_n, /* Confluence operator n. */
704 df_rd_transfer_function, /* Transfer function. */
705 NULL, /* Finalize function. */
706 df_rd_free, /* Free all of the problem information. */
707 df_rd_free, /* Remove this problem from the stack of dataflow problems. */
708 df_rd_start_dump, /* Debugging. */
709 df_rd_top_dump, /* Debugging start block. */
710 df_rd_bottom_dump, /* Debugging end block. */
711 NULL, /* Debugging start insn. */
712 NULL, /* Debugging end insn. */
713 NULL, /* Incremental solution verify start. */
714 NULL, /* Incremental solution verify end. */
715 NULL, /* Dependent problem. */
716 sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
717 TV_DF_RD, /* Timing variable. */
718 true /* Reset blocks on dropping out of blocks_to_analyze. */
719 };
720
721
722
723 /* Create a new RD instance and add it to the existing instance
724 of DF. */
725
726 void
727 df_rd_add_problem (void)
728 {
729 df_add_problem (&problem_RD);
730 }
731
732
733 \f
734 /*----------------------------------------------------------------------------
735 LIVE REGISTERS
736
737 Find the locations in the function where any use of a pseudo can
738 reach in the backwards direction. In and out bitvectors are built
739 for each basic block. The regno is used to index into these sets.
740 See df.h for details.
741 ----------------------------------------------------------------------------*/
742
743 /* Private data used to verify the solution for this problem. */
744 struct df_lr_problem_data
745 {
746 bitmap_head *in;
747 bitmap_head *out;
748 /* An obstack for the bitmaps we need for this problem. */
749 bitmap_obstack lr_bitmaps;
750 };
751
752 /* Free basic block info. */
753
754 static void
755 df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
756 void *vbb_info)
757 {
758 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
759 if (bb_info)
760 {
761 bitmap_clear (&bb_info->use);
762 bitmap_clear (&bb_info->def);
763 bitmap_clear (&bb_info->in);
764 bitmap_clear (&bb_info->out);
765 }
766 }
767
768
769 /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
770 not touched unless the block is new. */
771
772 static void
773 df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
774 {
775 unsigned int bb_index;
776 bitmap_iterator bi;
777 struct df_lr_problem_data *problem_data;
778
779 df_grow_bb_info (df_lr);
780 if (df_lr->problem_data)
781 problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
782 else
783 {
784 problem_data = XNEW (struct df_lr_problem_data);
785 df_lr->problem_data = problem_data;
786
787 problem_data->out = NULL;
788 problem_data->in = NULL;
789 bitmap_obstack_initialize (&problem_data->lr_bitmaps);
790 }
791
792 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
793 {
794 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
795
796 /* When bitmaps are already initialized, just clear them. */
797 if (bb_info->use.obstack)
798 {
799 bitmap_clear (&bb_info->def);
800 bitmap_clear (&bb_info->use);
801 }
802 else
803 {
804 bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
805 bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
806 bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
807 bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
808 }
809 }
810
811 df_lr->optional_p = false;
812 }
813
814
815 /* Reset the global solution for recalculation. */
816
817 static void
818 df_lr_reset (bitmap all_blocks)
819 {
820 unsigned int bb_index;
821 bitmap_iterator bi;
822
823 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
824 {
825 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
826 gcc_assert (bb_info);
827 bitmap_clear (&bb_info->in);
828 bitmap_clear (&bb_info->out);
829 }
830 }
831
832
833 /* Compute local live register info for basic block BB. */
834
835 static void
836 df_lr_bb_local_compute (unsigned int bb_index)
837 {
838 basic_block bb = BASIC_BLOCK (bb_index);
839 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
840 rtx insn;
841 df_ref *def_rec;
842 df_ref *use_rec;
843
844 /* Process the registers set in an exception handler. */
845 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
846 {
847 df_ref def = *def_rec;
848 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
849 {
850 unsigned int dregno = DF_REF_REGNO (def);
851 bitmap_set_bit (&bb_info->def, dregno);
852 bitmap_clear_bit (&bb_info->use, dregno);
853 }
854 }
855
856 /* Process the hardware registers that are always live. */
857 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
858 {
859 df_ref use = *use_rec;
860 /* Add use to set of uses in this BB. */
861 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
862 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
863 }
864
865 FOR_BB_INSNS_REVERSE (bb, insn)
866 {
867 unsigned int uid = INSN_UID (insn);
868
869 if (!NONDEBUG_INSN_P (insn))
870 continue;
871
872 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
873 {
874 df_ref def = *def_rec;
875 /* If the def is to only part of the reg, it does
876 not kill the other defs that reach here. */
877 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
878 {
879 unsigned int dregno = DF_REF_REGNO (def);
880 bitmap_set_bit (&bb_info->def, dregno);
881 bitmap_clear_bit (&bb_info->use, dregno);
882 }
883 }
884
885 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
886 {
887 df_ref use = *use_rec;
888 /* Add use to set of uses in this BB. */
889 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
890 }
891 }
892
893 /* Process the registers set in an exception handler or the hard
894 frame pointer if this block is the target of a non local
895 goto. */
896 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
897 {
898 df_ref def = *def_rec;
899 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
900 {
901 unsigned int dregno = DF_REF_REGNO (def);
902 bitmap_set_bit (&bb_info->def, dregno);
903 bitmap_clear_bit (&bb_info->use, dregno);
904 }
905 }
906
907 #ifdef EH_USES
908 /* Process the uses that are live into an exception handler. */
909 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
910 {
911 df_ref use = *use_rec;
912 /* Add use to set of uses in this BB. */
913 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
914 bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
915 }
916 #endif
917
918 /* If the df_live problem is not defined, such as at -O0 and -O1, we
919 still need to keep the luids up to date. This is normally done
920 in the df_live problem since this problem has a forwards
921 scan. */
922 if (!df_live)
923 df_recompute_luids (bb);
924 }
925
926
927 /* Compute local live register info for each basic block within BLOCKS. */
928
929 static void
930 df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
931 {
932 unsigned int bb_index, i;
933 bitmap_iterator bi;
934
935 bitmap_clear (&df->hardware_regs_used);
936
937 /* The all-important stack pointer must always be live. */
938 bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
939
940 /* Global regs are always live, too. */
941 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
942 if (global_regs[i])
943 bitmap_set_bit (&df->hardware_regs_used, i);
944
945 /* Before reload, there are a few registers that must be forced
946 live everywhere -- which might not already be the case for
947 blocks within infinite loops. */
948 if (!reload_completed)
949 {
950 unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
951 /* Any reference to any pseudo before reload is a potential
952 reference of the frame pointer. */
953 bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
954
955 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
956 /* Pseudos with argument area equivalences may require
957 reloading via the argument pointer. */
958 if (fixed_regs[ARG_POINTER_REGNUM])
959 bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
960 #endif
961
962 /* Any constant, or pseudo with constant equivalences, may
963 require reloading from memory using the pic register. */
964 if (pic_offset_table_regnum != INVALID_REGNUM
965 && fixed_regs[pic_offset_table_regnum])
966 bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
967 }
968
969 EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
970 {
971 if (bb_index == EXIT_BLOCK)
972 {
973 /* The exit block is special for this problem and its bits are
974 computed from thin air. */
975 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
976 bitmap_copy (&bb_info->use, df->exit_block_uses);
977 }
978 else
979 df_lr_bb_local_compute (bb_index);
980 }
981
982 bitmap_clear (df_lr->out_of_date_transfer_functions);
983 }
984
985
986 /* Initialize the solution vectors. */
987
988 static void
989 df_lr_init (bitmap all_blocks)
990 {
991 unsigned int bb_index;
992 bitmap_iterator bi;
993
994 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
995 {
996 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
997 bitmap_copy (&bb_info->in, &bb_info->use);
998 bitmap_clear (&bb_info->out);
999 }
1000 }
1001
1002
1003 /* Confluence function that processes infinite loops. This might be a
1004 noreturn function that throws. And even if it isn't, getting the
1005 unwind info right helps debugging. */
1006 static void
1007 df_lr_confluence_0 (basic_block bb)
1008 {
1009 bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
1010 if (bb != EXIT_BLOCK_PTR)
1011 bitmap_copy (op1, &df->hardware_regs_used);
1012 }
1013
1014
1015 /* Confluence function that ignores fake edges. */
1016
1017 static bool
1018 df_lr_confluence_n (edge e)
1019 {
1020 bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
1021 bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
1022 bool changed = false;
1023
1024 /* Call-clobbered registers die across exception and call edges. */
1025 /* ??? Abnormal call edges ignored for the moment, as this gets
1026 confused by sibling call edges, which crashes reg-stack. */
1027 if (e->flags & EDGE_EH)
1028 changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
1029 else
1030 changed = bitmap_ior_into (op1, op2);
1031
1032 changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
1033 return changed;
1034 }
1035
1036
1037 /* Transfer function. */
1038
1039 static bool
1040 df_lr_transfer_function (int bb_index)
1041 {
1042 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
1043 bitmap in = &bb_info->in;
1044 bitmap out = &bb_info->out;
1045 bitmap use = &bb_info->use;
1046 bitmap def = &bb_info->def;
1047
1048 return bitmap_ior_and_compl (in, use, out, def);
1049 }
1050
1051
1052 /* Run the fast dce as a side effect of building LR. */
1053
1054 static void
1055 df_lr_finalize (bitmap all_blocks)
1056 {
1057 df_lr->solutions_dirty = false;
1058 if (df->changeable_flags & DF_LR_RUN_DCE)
1059 {
1060 run_fast_df_dce ();
1061
1062 /* If dce deletes some instructions, we need to recompute the lr
1063 solution before proceeding further. The problem is that fast
1064 dce is a pessimestic dataflow algorithm. In the case where
1065 it deletes a statement S inside of a loop, the uses inside of
1066 S may not be deleted from the dataflow solution because they
1067 were carried around the loop. While it is conservatively
1068 correct to leave these extra bits, the standards of df
1069 require that we maintain the best possible (least fixed
1070 point) solution. The only way to do that is to redo the
1071 iteration from the beginning. See PR35805 for an
1072 example. */
1073 if (df_lr->solutions_dirty)
1074 {
1075 df_clear_flags (DF_LR_RUN_DCE);
1076 df_lr_alloc (all_blocks);
1077 df_lr_local_compute (all_blocks);
1078 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
1079 df_lr_finalize (all_blocks);
1080 df_set_flags (DF_LR_RUN_DCE);
1081 }
1082 }
1083 }
1084
1085
1086 /* Free all storage associated with the problem. */
1087
1088 static void
1089 df_lr_free (void)
1090 {
1091 struct df_lr_problem_data *problem_data
1092 = (struct df_lr_problem_data *) df_lr->problem_data;
1093 if (df_lr->block_info)
1094 {
1095
1096 df_lr->block_info_size = 0;
1097 free (df_lr->block_info);
1098 df_lr->block_info = NULL;
1099 bitmap_obstack_release (&problem_data->lr_bitmaps);
1100 free (df_lr->problem_data);
1101 df_lr->problem_data = NULL;
1102 }
1103
1104 BITMAP_FREE (df_lr->out_of_date_transfer_functions);
1105 free (df_lr);
1106 }
1107
1108
1109 /* Debugging info at top of bb. */
1110
1111 static void
1112 df_lr_top_dump (basic_block bb, FILE *file)
1113 {
1114 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1115 struct df_lr_problem_data *problem_data;
1116 if (!bb_info)
1117 return;
1118
1119 fprintf (file, ";; lr in \t");
1120 df_print_regset (file, &bb_info->in);
1121 if (df_lr->problem_data)
1122 {
1123 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1124 if (problem_data->in)
1125 {
1126 fprintf (file, ";; old in \t");
1127 df_print_regset (file, &problem_data->in[bb->index]);
1128 }
1129 }
1130 fprintf (file, ";; lr use \t");
1131 df_print_regset (file, &bb_info->use);
1132 fprintf (file, ";; lr def \t");
1133 df_print_regset (file, &bb_info->def);
1134 }
1135
1136
1137 /* Debugging info at bottom of bb. */
1138
1139 static void
1140 df_lr_bottom_dump (basic_block bb, FILE *file)
1141 {
1142 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1143 struct df_lr_problem_data *problem_data;
1144 if (!bb_info)
1145 return;
1146
1147 fprintf (file, ";; lr out \t");
1148 df_print_regset (file, &bb_info->out);
1149 if (df_lr->problem_data)
1150 {
1151 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1152 if (problem_data->out)
1153 {
1154 fprintf (file, ";; old out \t");
1155 df_print_regset (file, &problem_data->out[bb->index]);
1156 }
1157 }
1158 }
1159
1160
1161 /* Build the datastructure to verify that the solution to the dataflow
1162 equations is not dirty. */
1163
1164 static void
1165 df_lr_verify_solution_start (void)
1166 {
1167 basic_block bb;
1168 struct df_lr_problem_data *problem_data;
1169 if (df_lr->solutions_dirty)
1170 return;
1171
1172 /* Set it true so that the solution is recomputed. */
1173 df_lr->solutions_dirty = true;
1174
1175 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1176 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1177 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1178
1179 FOR_ALL_BB (bb)
1180 {
1181 bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
1182 bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
1183 bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
1184 bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
1185 }
1186 }
1187
1188
1189 /* Compare the saved datastructure and the new solution to the dataflow
1190 equations. */
1191
1192 static void
1193 df_lr_verify_solution_end (void)
1194 {
1195 struct df_lr_problem_data *problem_data;
1196 basic_block bb;
1197
1198 problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
1199
1200 if (!problem_data->out)
1201 return;
1202
1203 if (df_lr->solutions_dirty)
1204 /* Do not check if the solution is still dirty. See the comment
1205 in df_lr_finalize for details. */
1206 df_lr->solutions_dirty = false;
1207 else
1208 FOR_ALL_BB (bb)
1209 {
1210 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
1211 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
1212 {
1213 /*df_dump (stderr);*/
1214 gcc_unreachable ();
1215 }
1216 }
1217
1218 /* Cannot delete them immediately because you may want to dump them
1219 if the comparison fails. */
1220 FOR_ALL_BB (bb)
1221 {
1222 bitmap_clear (&problem_data->in[bb->index]);
1223 bitmap_clear (&problem_data->out[bb->index]);
1224 }
1225
1226 free (problem_data->in);
1227 free (problem_data->out);
1228 problem_data->in = NULL;
1229 problem_data->out = NULL;
1230 }
1231
1232
1233 /* All of the information associated with every instance of the problem. */
1234
1235 static struct df_problem problem_LR =
1236 {
1237 DF_LR, /* Problem id. */
1238 DF_BACKWARD, /* Direction. */
1239 df_lr_alloc, /* Allocate the problem specific data. */
1240 df_lr_reset, /* Reset global information. */
1241 df_lr_free_bb_info, /* Free basic block info. */
1242 df_lr_local_compute, /* Local compute function. */
1243 df_lr_init, /* Init the solution specific data. */
1244 df_worklist_dataflow, /* Worklist solver. */
1245 df_lr_confluence_0, /* Confluence operator 0. */
1246 df_lr_confluence_n, /* Confluence operator n. */
1247 df_lr_transfer_function, /* Transfer function. */
1248 df_lr_finalize, /* Finalize function. */
1249 df_lr_free, /* Free all of the problem information. */
1250 NULL, /* Remove this problem from the stack of dataflow problems. */
1251 NULL, /* Debugging. */
1252 df_lr_top_dump, /* Debugging start block. */
1253 df_lr_bottom_dump, /* Debugging end block. */
1254 NULL, /* Debugging start insn. */
1255 NULL, /* Debugging end insn. */
1256 df_lr_verify_solution_start,/* Incremental solution verify start. */
1257 df_lr_verify_solution_end, /* Incremental solution verify end. */
1258 NULL, /* Dependent problem. */
1259 sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
1260 TV_DF_LR, /* Timing variable. */
1261 false /* Reset blocks on dropping out of blocks_to_analyze. */
1262 };
1263
1264
1265 /* Create a new DATAFLOW instance and add it to an existing instance
1266 of DF. The returned structure is what is used to get at the
1267 solution. */
1268
1269 void
1270 df_lr_add_problem (void)
1271 {
1272 df_add_problem (&problem_LR);
1273 /* These will be initialized when df_scan_blocks processes each
1274 block. */
1275 df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1276 }
1277
1278
1279 /* Verify that all of the lr related info is consistent and
1280 correct. */
1281
1282 void
1283 df_lr_verify_transfer_functions (void)
1284 {
1285 basic_block bb;
1286 bitmap_head saved_def;
1287 bitmap_head saved_use;
1288 bitmap_head all_blocks;
1289
1290 if (!df)
1291 return;
1292
1293 bitmap_initialize (&saved_def, &bitmap_default_obstack);
1294 bitmap_initialize (&saved_use, &bitmap_default_obstack);
1295 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1296
1297 FOR_ALL_BB (bb)
1298 {
1299 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
1300 bitmap_set_bit (&all_blocks, bb->index);
1301
1302 if (bb_info)
1303 {
1304 /* Make a copy of the transfer functions and then compute
1305 new ones to see if the transfer functions have
1306 changed. */
1307 if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1308 bb->index))
1309 {
1310 bitmap_copy (&saved_def, &bb_info->def);
1311 bitmap_copy (&saved_use, &bb_info->use);
1312 bitmap_clear (&bb_info->def);
1313 bitmap_clear (&bb_info->use);
1314
1315 df_lr_bb_local_compute (bb->index);
1316 gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
1317 gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
1318 }
1319 }
1320 else
1321 {
1322 /* If we do not have basic block info, the block must be in
1323 the list of dirty blocks or else some one has added a
1324 block behind our backs. */
1325 gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
1326 bb->index));
1327 }
1328 /* Make sure no one created a block without following
1329 procedures. */
1330 gcc_assert (df_scan_get_bb_info (bb->index));
1331 }
1332
1333 /* Make sure there are no dirty bits in blocks that have been deleted. */
1334 gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
1335 &all_blocks));
1336
1337 bitmap_clear (&saved_def);
1338 bitmap_clear (&saved_use);
1339 bitmap_clear (&all_blocks);
1340 }
1341
1342
1343 \f
1344 /*----------------------------------------------------------------------------
1345 LIVE AND MUST-INITIALIZED REGISTERS.
1346
1347 This problem first computes the IN and OUT bitvectors for the
1348 must-initialized registers problems, which is a forward problem.
1349 It gives the set of registers for which we MUST have an available
1350 definition on any path from the entry block to the entry/exit of
1351 a basic block. Sets generate a definition, while clobbers kill
1352 a definition.
1353
1354 In and out bitvectors are built for each basic block and are indexed by
1355 regnum (see df.h for details). In and out bitvectors in struct
1356 df_live_bb_info actually refers to the must-initialized problem;
1357
1358 Then, the in and out sets for the LIVE problem itself are computed.
1359 These are the logical AND of the IN and OUT sets from the LR problem
1360 and the must-initialized problem.
1361 ----------------------------------------------------------------------------*/
1362
1363 /* Private data used to verify the solution for this problem. */
1364 struct df_live_problem_data
1365 {
1366 bitmap_head *in;
1367 bitmap_head *out;
1368 /* An obstack for the bitmaps we need for this problem. */
1369 bitmap_obstack live_bitmaps;
1370 };
1371
1372 /* Scratch var used by transfer functions. This is used to implement
1373 an optimization to reduce the amount of space used to compute the
1374 combined lr and live analysis. */
1375 static bitmap_head df_live_scratch;
1376
1377
1378 /* Free basic block info. */
1379
1380 static void
1381 df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
1382 void *vbb_info)
1383 {
1384 struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
1385 if (bb_info)
1386 {
1387 bitmap_clear (&bb_info->gen);
1388 bitmap_clear (&bb_info->kill);
1389 bitmap_clear (&bb_info->in);
1390 bitmap_clear (&bb_info->out);
1391 }
1392 }
1393
1394
1395 /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
1396 not touched unless the block is new. */
1397
1398 static void
1399 df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
1400 {
1401 unsigned int bb_index;
1402 bitmap_iterator bi;
1403 struct df_live_problem_data *problem_data;
1404
1405 if (df_live->problem_data)
1406 problem_data = (struct df_live_problem_data *) df_live->problem_data;
1407 else
1408 {
1409 problem_data = XNEW (struct df_live_problem_data);
1410 df_live->problem_data = problem_data;
1411
1412 problem_data->out = NULL;
1413 problem_data->in = NULL;
1414 bitmap_obstack_initialize (&problem_data->live_bitmaps);
1415 bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
1416 }
1417
1418 df_grow_bb_info (df_live);
1419
1420 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
1421 {
1422 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1423
1424 /* When bitmaps are already initialized, just clear them. */
1425 if (bb_info->kill.obstack)
1426 {
1427 bitmap_clear (&bb_info->kill);
1428 bitmap_clear (&bb_info->gen);
1429 }
1430 else
1431 {
1432 bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
1433 bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
1434 bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
1435 bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
1436 }
1437 }
1438 df_live->optional_p = (optimize <= 1);
1439 }
1440
1441
1442 /* Reset the global solution for recalculation. */
1443
1444 static void
1445 df_live_reset (bitmap all_blocks)
1446 {
1447 unsigned int bb_index;
1448 bitmap_iterator bi;
1449
1450 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1451 {
1452 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1453 gcc_assert (bb_info);
1454 bitmap_clear (&bb_info->in);
1455 bitmap_clear (&bb_info->out);
1456 }
1457 }
1458
1459
1460 /* Compute local uninitialized register info for basic block BB. */
1461
1462 static void
1463 df_live_bb_local_compute (unsigned int bb_index)
1464 {
1465 basic_block bb = BASIC_BLOCK (bb_index);
1466 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1467 rtx insn;
1468 df_ref *def_rec;
1469 int luid = 0;
1470
1471 FOR_BB_INSNS (bb, insn)
1472 {
1473 unsigned int uid = INSN_UID (insn);
1474 struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
1475
1476 /* Inserting labels does not always trigger the incremental
1477 rescanning. */
1478 if (!insn_info)
1479 {
1480 gcc_assert (!INSN_P (insn));
1481 insn_info = df_insn_create_insn_record (insn);
1482 }
1483
1484 DF_INSN_INFO_LUID (insn_info) = luid;
1485 if (!INSN_P (insn))
1486 continue;
1487
1488 luid++;
1489 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
1490 {
1491 df_ref def = *def_rec;
1492 unsigned int regno = DF_REF_REGNO (def);
1493
1494 if (DF_REF_FLAGS_IS_SET (def,
1495 DF_REF_PARTIAL | DF_REF_CONDITIONAL))
1496 /* All partial or conditional def
1497 seen are included in the gen set. */
1498 bitmap_set_bit (&bb_info->gen, regno);
1499 else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
1500 /* Only must clobbers for the entire reg destroy the
1501 value. */
1502 bitmap_set_bit (&bb_info->kill, regno);
1503 else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1504 bitmap_set_bit (&bb_info->gen, regno);
1505 }
1506 }
1507
1508 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
1509 {
1510 df_ref def = *def_rec;
1511 bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
1512 }
1513 }
1514
1515
1516 /* Compute local uninitialized register info. */
1517
1518 static void
1519 df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
1520 {
1521 unsigned int bb_index;
1522 bitmap_iterator bi;
1523
1524 df_grow_insn_info ();
1525
1526 EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
1527 0, bb_index, bi)
1528 {
1529 df_live_bb_local_compute (bb_index);
1530 }
1531
1532 bitmap_clear (df_live->out_of_date_transfer_functions);
1533 }
1534
1535
1536 /* Initialize the solution vectors. */
1537
1538 static void
1539 df_live_init (bitmap all_blocks)
1540 {
1541 unsigned int bb_index;
1542 bitmap_iterator bi;
1543
1544 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1545 {
1546 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1547 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1548
1549 /* No register may reach a location where it is not used. Thus
1550 we trim the rr result to the places where it is used. */
1551 bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
1552 bitmap_clear (&bb_info->in);
1553 }
1554 }
1555
1556 /* Forward confluence function that ignores fake edges. */
1557
1558 static bool
1559 df_live_confluence_n (edge e)
1560 {
1561 bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
1562 bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
1563
1564 if (e->flags & EDGE_FAKE)
1565 return false;
1566
1567 return bitmap_ior_into (op1, op2);
1568 }
1569
1570
1571 /* Transfer function for the forwards must-initialized problem. */
1572
1573 static bool
1574 df_live_transfer_function (int bb_index)
1575 {
1576 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
1577 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1578 bitmap in = &bb_info->in;
1579 bitmap out = &bb_info->out;
1580 bitmap gen = &bb_info->gen;
1581 bitmap kill = &bb_info->kill;
1582
1583 /* We need to use a scratch set here so that the value returned from this
1584 function invocation properly reflects whether the sets changed in a
1585 significant way; i.e. not just because the lr set was anded in. */
1586 bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
1587 /* No register may reach a location where it is not used. Thus
1588 we trim the rr result to the places where it is used. */
1589 bitmap_and_into (in, &bb_lr_info->in);
1590
1591 return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
1592 }
1593
1594
1595 /* And the LR info with the must-initialized registers, to produce the LIVE info. */
1596
1597 static void
1598 df_live_finalize (bitmap all_blocks)
1599 {
1600
1601 if (df_live->solutions_dirty)
1602 {
1603 bitmap_iterator bi;
1604 unsigned int bb_index;
1605
1606 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1607 {
1608 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
1609 struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
1610
1611 /* No register may reach a location where it is not used. Thus
1612 we trim the rr result to the places where it is used. */
1613 bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
1614 bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
1615 }
1616
1617 df_live->solutions_dirty = false;
1618 }
1619 }
1620
1621
1622 /* Free all storage associated with the problem. */
1623
1624 static void
1625 df_live_free (void)
1626 {
1627 struct df_live_problem_data *problem_data
1628 = (struct df_live_problem_data *) df_live->problem_data;
1629 if (df_live->block_info)
1630 {
1631 df_live->block_info_size = 0;
1632 free (df_live->block_info);
1633 df_live->block_info = NULL;
1634 bitmap_clear (&df_live_scratch);
1635 bitmap_obstack_release (&problem_data->live_bitmaps);
1636 free (problem_data);
1637 df_live->problem_data = NULL;
1638 }
1639 BITMAP_FREE (df_live->out_of_date_transfer_functions);
1640 free (df_live);
1641 }
1642
1643
1644 /* Debugging info at top of bb. */
1645
1646 static void
1647 df_live_top_dump (basic_block bb, FILE *file)
1648 {
1649 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1650 struct df_live_problem_data *problem_data;
1651
1652 if (!bb_info)
1653 return;
1654
1655 fprintf (file, ";; live in \t");
1656 df_print_regset (file, &bb_info->in);
1657 if (df_live->problem_data)
1658 {
1659 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1660 if (problem_data->in)
1661 {
1662 fprintf (file, ";; old in \t");
1663 df_print_regset (file, &problem_data->in[bb->index]);
1664 }
1665 }
1666 fprintf (file, ";; live gen \t");
1667 df_print_regset (file, &bb_info->gen);
1668 fprintf (file, ";; live kill\t");
1669 df_print_regset (file, &bb_info->kill);
1670 }
1671
1672
1673 /* Debugging info at bottom of bb. */
1674
1675 static void
1676 df_live_bottom_dump (basic_block bb, FILE *file)
1677 {
1678 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1679 struct df_live_problem_data *problem_data;
1680
1681 if (!bb_info)
1682 return;
1683
1684 fprintf (file, ";; live out \t");
1685 df_print_regset (file, &bb_info->out);
1686 if (df_live->problem_data)
1687 {
1688 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1689 if (problem_data->out)
1690 {
1691 fprintf (file, ";; old out \t");
1692 df_print_regset (file, &problem_data->out[bb->index]);
1693 }
1694 }
1695 }
1696
1697
1698 /* Build the datastructure to verify that the solution to the dataflow
1699 equations is not dirty. */
1700
1701 static void
1702 df_live_verify_solution_start (void)
1703 {
1704 basic_block bb;
1705 struct df_live_problem_data *problem_data;
1706 if (df_live->solutions_dirty)
1707 return;
1708
1709 /* Set it true so that the solution is recomputed. */
1710 df_live->solutions_dirty = true;
1711
1712 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1713 problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
1714 problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
1715
1716 FOR_ALL_BB (bb)
1717 {
1718 bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
1719 bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
1720 bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
1721 bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
1722 }
1723 }
1724
1725
1726 /* Compare the saved datastructure and the new solution to the dataflow
1727 equations. */
1728
1729 static void
1730 df_live_verify_solution_end (void)
1731 {
1732 struct df_live_problem_data *problem_data;
1733 basic_block bb;
1734
1735 problem_data = (struct df_live_problem_data *)df_live->problem_data;
1736 if (!problem_data->out)
1737 return;
1738
1739 FOR_ALL_BB (bb)
1740 {
1741 if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
1742 || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
1743 {
1744 /*df_dump (stderr);*/
1745 gcc_unreachable ();
1746 }
1747 }
1748
1749 /* Cannot delete them immediately because you may want to dump them
1750 if the comparison fails. */
1751 FOR_ALL_BB (bb)
1752 {
1753 bitmap_clear (&problem_data->in[bb->index]);
1754 bitmap_clear (&problem_data->out[bb->index]);
1755 }
1756
1757 free (problem_data->in);
1758 free (problem_data->out);
1759 free (problem_data);
1760 df_live->problem_data = NULL;
1761 }
1762
1763
1764 /* All of the information associated with every instance of the problem. */
1765
1766 static struct df_problem problem_LIVE =
1767 {
1768 DF_LIVE, /* Problem id. */
1769 DF_FORWARD, /* Direction. */
1770 df_live_alloc, /* Allocate the problem specific data. */
1771 df_live_reset, /* Reset global information. */
1772 df_live_free_bb_info, /* Free basic block info. */
1773 df_live_local_compute, /* Local compute function. */
1774 df_live_init, /* Init the solution specific data. */
1775 df_worklist_dataflow, /* Worklist solver. */
1776 NULL, /* Confluence operator 0. */
1777 df_live_confluence_n, /* Confluence operator n. */
1778 df_live_transfer_function, /* Transfer function. */
1779 df_live_finalize, /* Finalize function. */
1780 df_live_free, /* Free all of the problem information. */
1781 df_live_free, /* Remove this problem from the stack of dataflow problems. */
1782 NULL, /* Debugging. */
1783 df_live_top_dump, /* Debugging start block. */
1784 df_live_bottom_dump, /* Debugging end block. */
1785 NULL, /* Debugging start insn. */
1786 NULL, /* Debugging end insn. */
1787 df_live_verify_solution_start,/* Incremental solution verify start. */
1788 df_live_verify_solution_end, /* Incremental solution verify end. */
1789 &problem_LR, /* Dependent problem. */
1790 sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
1791 TV_DF_LIVE, /* Timing variable. */
1792 false /* Reset blocks on dropping out of blocks_to_analyze. */
1793 };
1794
1795
1796 /* Create a new DATAFLOW instance and add it to an existing instance
1797 of DF. The returned structure is what is used to get at the
1798 solution. */
1799
1800 void
1801 df_live_add_problem (void)
1802 {
1803 df_add_problem (&problem_LIVE);
1804 /* These will be initialized when df_scan_blocks processes each
1805 block. */
1806 df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
1807 }
1808
1809
1810 /* Set all of the blocks as dirty. This needs to be done if this
1811 problem is added after all of the insns have been scanned. */
1812
1813 void
1814 df_live_set_all_dirty (void)
1815 {
1816 basic_block bb;
1817 FOR_ALL_BB (bb)
1818 bitmap_set_bit (df_live->out_of_date_transfer_functions,
1819 bb->index);
1820 }
1821
1822
1823 /* Verify that all of the lr related info is consistent and
1824 correct. */
1825
1826 void
1827 df_live_verify_transfer_functions (void)
1828 {
1829 basic_block bb;
1830 bitmap_head saved_gen;
1831 bitmap_head saved_kill;
1832 bitmap_head all_blocks;
1833
1834 if (!df)
1835 return;
1836
1837 bitmap_initialize (&saved_gen, &bitmap_default_obstack);
1838 bitmap_initialize (&saved_kill, &bitmap_default_obstack);
1839 bitmap_initialize (&all_blocks, &bitmap_default_obstack);
1840
1841 df_grow_insn_info ();
1842
1843 FOR_ALL_BB (bb)
1844 {
1845 struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
1846 bitmap_set_bit (&all_blocks, bb->index);
1847
1848 if (bb_info)
1849 {
1850 /* Make a copy of the transfer functions and then compute
1851 new ones to see if the transfer functions have
1852 changed. */
1853 if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
1854 bb->index))
1855 {
1856 bitmap_copy (&saved_gen, &bb_info->gen);
1857 bitmap_copy (&saved_kill, &bb_info->kill);
1858 bitmap_clear (&bb_info->gen);
1859 bitmap_clear (&bb_info->kill);
1860
1861 df_live_bb_local_compute (bb->index);
1862 gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
1863 gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
1864 }
1865 }
1866 else
1867 {
1868 /* If we do not have basic block info, the block must be in
1869 the list of dirty blocks or else some one has added a
1870 block behind our backs. */
1871 gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
1872 bb->index));
1873 }
1874 /* Make sure no one created a block without following
1875 procedures. */
1876 gcc_assert (df_scan_get_bb_info (bb->index));
1877 }
1878
1879 /* Make sure there are no dirty bits in blocks that have been deleted. */
1880 gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
1881 &all_blocks));
1882 bitmap_clear (&saved_gen);
1883 bitmap_clear (&saved_kill);
1884 bitmap_clear (&all_blocks);
1885 }
1886 \f
1887 /*----------------------------------------------------------------------------
1888 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
1889
1890 Link either the defs to the uses and / or the uses to the defs.
1891
1892 These problems are set up like the other dataflow problems so that
1893 they nicely fit into the framework. They are much simpler and only
1894 involve a single traversal of instructions and an examination of
1895 the reaching defs information (the dependent problem).
1896 ----------------------------------------------------------------------------*/
1897
1898 #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
1899
1900 /* Create a du or ud chain from SRC to DST and link it into SRC. */
1901
1902 struct df_link *
1903 df_chain_create (df_ref src, df_ref dst)
1904 {
1905 struct df_link *head = DF_REF_CHAIN (src);
1906 struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
1907
1908 DF_REF_CHAIN (src) = link;
1909 link->next = head;
1910 link->ref = dst;
1911 return link;
1912 }
1913
1914
1915 /* Delete any du or ud chains that start at REF and point to
1916 TARGET. */
1917 static void
1918 df_chain_unlink_1 (df_ref ref, df_ref target)
1919 {
1920 struct df_link *chain = DF_REF_CHAIN (ref);
1921 struct df_link *prev = NULL;
1922
1923 while (chain)
1924 {
1925 if (chain->ref == target)
1926 {
1927 if (prev)
1928 prev->next = chain->next;
1929 else
1930 DF_REF_CHAIN (ref) = chain->next;
1931 pool_free (df_chain->block_pool, chain);
1932 return;
1933 }
1934 prev = chain;
1935 chain = chain->next;
1936 }
1937 }
1938
1939
1940 /* Delete a du or ud chain that leave or point to REF. */
1941
1942 void
1943 df_chain_unlink (df_ref ref)
1944 {
1945 struct df_link *chain = DF_REF_CHAIN (ref);
1946 while (chain)
1947 {
1948 struct df_link *next = chain->next;
1949 /* Delete the other side if it exists. */
1950 df_chain_unlink_1 (chain->ref, ref);
1951 pool_free (df_chain->block_pool, chain);
1952 chain = next;
1953 }
1954 DF_REF_CHAIN (ref) = NULL;
1955 }
1956
1957
1958 /* Copy the du or ud chain starting at FROM_REF and attach it to
1959 TO_REF. */
1960
1961 void
1962 df_chain_copy (df_ref to_ref,
1963 struct df_link *from_ref)
1964 {
1965 while (from_ref)
1966 {
1967 df_chain_create (to_ref, from_ref->ref);
1968 from_ref = from_ref->next;
1969 }
1970 }
1971
1972
1973 /* Remove this problem from the stack of dataflow problems. */
1974
1975 static void
1976 df_chain_remove_problem (void)
1977 {
1978 bitmap_iterator bi;
1979 unsigned int bb_index;
1980
1981 /* Wholesale destruction of the old chains. */
1982 if (df_chain->block_pool)
1983 free_alloc_pool (df_chain->block_pool);
1984
1985 EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
1986 {
1987 rtx insn;
1988 df_ref *def_rec;
1989 df_ref *use_rec;
1990 basic_block bb = BASIC_BLOCK (bb_index);
1991
1992 if (df_chain_problem_p (DF_DU_CHAIN))
1993 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
1994 DF_REF_CHAIN (*def_rec) = NULL;
1995 if (df_chain_problem_p (DF_UD_CHAIN))
1996 for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
1997 DF_REF_CHAIN (*use_rec) = NULL;
1998
1999 FOR_BB_INSNS (bb, insn)
2000 {
2001 unsigned int uid = INSN_UID (insn);
2002
2003 if (INSN_P (insn))
2004 {
2005 if (df_chain_problem_p (DF_DU_CHAIN))
2006 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2007 DF_REF_CHAIN (*def_rec) = NULL;
2008 if (df_chain_problem_p (DF_UD_CHAIN))
2009 {
2010 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2011 DF_REF_CHAIN (*use_rec) = NULL;
2012 for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
2013 DF_REF_CHAIN (*use_rec) = NULL;
2014 }
2015 }
2016 }
2017 }
2018
2019 bitmap_clear (df_chain->out_of_date_transfer_functions);
2020 df_chain->block_pool = NULL;
2021 }
2022
2023
2024 /* Remove the chain problem completely. */
2025
2026 static void
2027 df_chain_fully_remove_problem (void)
2028 {
2029 df_chain_remove_problem ();
2030 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2031 free (df_chain);
2032 }
2033
2034
2035 /* Create def-use or use-def chains. */
2036
2037 static void
2038 df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2039 {
2040 df_chain_remove_problem ();
2041 df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
2042 sizeof (struct df_link), 50);
2043 df_chain->optional_p = true;
2044 }
2045
2046
2047 /* Reset all of the chains when the set of basic blocks changes. */
2048
2049 static void
2050 df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
2051 {
2052 df_chain_remove_problem ();
2053 }
2054
2055
2056 /* Create the chains for a list of USEs. */
2057
2058 static void
2059 df_chain_create_bb_process_use (bitmap local_rd,
2060 df_ref *use_rec,
2061 int top_flag)
2062 {
2063 bitmap_iterator bi;
2064 unsigned int def_index;
2065
2066 while (*use_rec)
2067 {
2068 df_ref use = *use_rec;
2069 unsigned int uregno = DF_REF_REGNO (use);
2070 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
2071 || (uregno >= FIRST_PSEUDO_REGISTER))
2072 {
2073 /* Do not want to go through this for an uninitialized var. */
2074 int count = DF_DEFS_COUNT (uregno);
2075 if (count)
2076 {
2077 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2078 {
2079 unsigned int first_index = DF_DEFS_BEGIN (uregno);
2080 unsigned int last_index = first_index + count - 1;
2081
2082 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2083 {
2084 df_ref def;
2085 if (def_index > last_index)
2086 break;
2087
2088 def = DF_DEFS_GET (def_index);
2089 if (df_chain_problem_p (DF_DU_CHAIN))
2090 df_chain_create (def, use);
2091 if (df_chain_problem_p (DF_UD_CHAIN))
2092 df_chain_create (use, def);
2093 }
2094 }
2095 }
2096 }
2097
2098 use_rec++;
2099 }
2100 }
2101
2102
2103 /* Create chains from reaching defs bitmaps for basic block BB. */
2104
2105 static void
2106 df_chain_create_bb (unsigned int bb_index)
2107 {
2108 basic_block bb = BASIC_BLOCK (bb_index);
2109 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
2110 rtx insn;
2111 bitmap_head cpy;
2112
2113 bitmap_initialize (&cpy, &bitmap_default_obstack);
2114 bitmap_copy (&cpy, &bb_info->in);
2115 bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
2116
2117 /* Since we are going forwards, process the artificial uses first
2118 then the artificial defs second. */
2119
2120 #ifdef EH_USES
2121 /* Create the chains for the artificial uses from the EH_USES at the
2122 beginning of the block. */
2123
2124 /* Artificials are only hard regs. */
2125 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2126 df_chain_create_bb_process_use (&cpy,
2127 df_get_artificial_uses (bb->index),
2128 DF_REF_AT_TOP);
2129 #endif
2130
2131 df_rd_simulate_artificial_defs_at_top (bb, &cpy);
2132
2133 /* Process the regular instructions next. */
2134 FOR_BB_INSNS (bb, insn)
2135 if (INSN_P (insn))
2136 {
2137 unsigned int uid = INSN_UID (insn);
2138
2139 /* First scan the uses and link them up with the defs that remain
2140 in the cpy vector. */
2141 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
2142 if (df->changeable_flags & DF_EQ_NOTES)
2143 df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
2144
2145 /* Since we are going forwards, process the defs second. */
2146 df_rd_simulate_one_insn (bb, insn, &cpy);
2147 }
2148
2149 /* Create the chains for the artificial uses of the hard registers
2150 at the end of the block. */
2151 if (!(df->changeable_flags & DF_NO_HARD_REGS))
2152 df_chain_create_bb_process_use (&cpy,
2153 df_get_artificial_uses (bb->index),
2154 0);
2155
2156 bitmap_clear (&cpy);
2157 }
2158
2159 /* Create def-use chains from reaching use bitmaps for basic blocks
2160 in BLOCKS. */
2161
2162 static void
2163 df_chain_finalize (bitmap all_blocks)
2164 {
2165 unsigned int bb_index;
2166 bitmap_iterator bi;
2167
2168 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2169 {
2170 df_chain_create_bb (bb_index);
2171 }
2172 }
2173
2174
2175 /* Free all storage associated with the problem. */
2176
2177 static void
2178 df_chain_free (void)
2179 {
2180 free_alloc_pool (df_chain->block_pool);
2181 BITMAP_FREE (df_chain->out_of_date_transfer_functions);
2182 free (df_chain);
2183 }
2184
2185
2186 /* Debugging info. */
2187
2188 static void
2189 df_chain_bb_dump (basic_block bb, FILE *file, bool top)
2190 {
2191 /* Artificials are only hard regs. */
2192 if (df->changeable_flags & DF_NO_HARD_REGS)
2193 return;
2194 if (df_chain_problem_p (DF_UD_CHAIN))
2195 {
2196 fprintf (file,
2197 ";; UD chains for artificial uses at %s\n",
2198 top ? "top" : "bottom");
2199 df_ref *use_rec = df_get_artificial_uses (bb->index);
2200 if (*use_rec)
2201 {
2202 while (*use_rec)
2203 {
2204 df_ref use = *use_rec;
2205 if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2206 || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
2207 {
2208 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2209 df_chain_dump (DF_REF_CHAIN (use), file);
2210 fprintf (file, "\n");
2211 }
2212 use_rec++;
2213 }
2214 }
2215 }
2216 if (df_chain_problem_p (DF_DU_CHAIN))
2217 {
2218 fprintf (file,
2219 ";; DU chains for artificial defs at %s\n",
2220 top ? "top" : "bottom");
2221 df_ref *def_rec = df_get_artificial_defs (bb->index);
2222 if (*def_rec)
2223 {
2224 while (*def_rec)
2225 {
2226 df_ref def = *def_rec;
2227
2228 if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
2229 || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
2230 {
2231 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2232 df_chain_dump (DF_REF_CHAIN (def), file);
2233 fprintf (file, "\n");
2234 }
2235 def_rec++;
2236 }
2237 }
2238 }
2239 }
2240
2241 static void
2242 df_chain_top_dump (basic_block bb, FILE *file)
2243 {
2244 df_chain_bb_dump (bb, file, /*top=*/true);
2245 }
2246
2247 static void
2248 df_chain_bottom_dump (basic_block bb, FILE *file)
2249 {
2250 df_chain_bb_dump (bb, file, /*top=*/false);
2251 }
2252
2253 static void
2254 df_chain_insn_top_dump (const_rtx insn, FILE *file)
2255 {
2256 if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
2257 {
2258 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2259 df_ref *use_rec = DF_INSN_INFO_USES (insn_info);
2260 df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
2261 fprintf (file, ";; UD chains for insn luid %d uid %d\n",
2262 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2263 if (*use_rec || *eq_use_rec)
2264 {
2265 while (*use_rec)
2266 {
2267 df_ref use = *use_rec;
2268 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2269 || !(df->changeable_flags & DF_NO_HARD_REGS))
2270 {
2271 fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
2272 if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
2273 fprintf (file, "read/write ");
2274 df_chain_dump (DF_REF_CHAIN (use), file);
2275 fprintf (file, "\n");
2276 }
2277 use_rec++;
2278 }
2279 while (*eq_use_rec)
2280 {
2281 df_ref use = *eq_use_rec;
2282 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
2283 || !(df->changeable_flags & DF_NO_HARD_REGS))
2284 {
2285 fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
2286 df_chain_dump (DF_REF_CHAIN (use), file);
2287 fprintf (file, "\n");
2288 }
2289 eq_use_rec++;
2290 }
2291 }
2292 }
2293 }
2294
2295 static void
2296 df_chain_insn_bottom_dump (const_rtx insn, FILE *file)
2297 {
2298 if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
2299 {
2300 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
2301 df_ref *def_rec = DF_INSN_INFO_DEFS (insn_info);
2302 fprintf (file, ";; DU chains for insn luid %d uid %d\n",
2303 DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
2304 if (*def_rec)
2305 {
2306 while (*def_rec)
2307 {
2308 df_ref def = *def_rec;
2309 if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
2310 || !(df->changeable_flags & DF_NO_HARD_REGS))
2311 {
2312 fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
2313 if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
2314 fprintf (file, "read/write ");
2315 df_chain_dump (DF_REF_CHAIN (def), file);
2316 fprintf (file, "\n");
2317 }
2318 def_rec++;
2319 }
2320 }
2321 fprintf (file, "\n");
2322 }
2323 }
2324
2325 static struct df_problem problem_CHAIN =
2326 {
2327 DF_CHAIN, /* Problem id. */
2328 DF_NONE, /* Direction. */
2329 df_chain_alloc, /* Allocate the problem specific data. */
2330 df_chain_reset, /* Reset global information. */
2331 NULL, /* Free basic block info. */
2332 NULL, /* Local compute function. */
2333 NULL, /* Init the solution specific data. */
2334 NULL, /* Iterative solver. */
2335 NULL, /* Confluence operator 0. */
2336 NULL, /* Confluence operator n. */
2337 NULL, /* Transfer function. */
2338 df_chain_finalize, /* Finalize function. */
2339 df_chain_free, /* Free all of the problem information. */
2340 df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
2341 NULL, /* Debugging. */
2342 df_chain_top_dump, /* Debugging start block. */
2343 df_chain_bottom_dump, /* Debugging end block. */
2344 df_chain_insn_top_dump, /* Debugging start insn. */
2345 df_chain_insn_bottom_dump, /* Debugging end insn. */
2346 NULL, /* Incremental solution verify start. */
2347 NULL, /* Incremental solution verify end. */
2348 &problem_RD, /* Dependent problem. */
2349 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
2350 TV_DF_CHAIN, /* Timing variable. */
2351 false /* Reset blocks on dropping out of blocks_to_analyze. */
2352 };
2353
2354
2355 /* Create a new DATAFLOW instance and add it to an existing instance
2356 of DF. The returned structure is what is used to get at the
2357 solution. */
2358
2359 void
2360 df_chain_add_problem (unsigned int chain_flags)
2361 {
2362 df_add_problem (&problem_CHAIN);
2363 df_chain->local_flags = chain_flags;
2364 df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2365 }
2366
2367 #undef df_chain_problem_p
2368
2369 \f
2370 /*----------------------------------------------------------------------------
2371 WORD LEVEL LIVE REGISTERS
2372
2373 Find the locations in the function where any use of a pseudo can
2374 reach in the backwards direction. In and out bitvectors are built
2375 for each basic block. We only track pseudo registers that have a
2376 size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
2377 contain two bits corresponding to each of the subwords.
2378
2379 ----------------------------------------------------------------------------*/
2380
2381 /* Private data used to verify the solution for this problem. */
2382 struct df_word_lr_problem_data
2383 {
2384 /* An obstack for the bitmaps we need for this problem. */
2385 bitmap_obstack word_lr_bitmaps;
2386 };
2387
2388
2389 /* Free basic block info. */
2390
2391 static void
2392 df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
2393 void *vbb_info)
2394 {
2395 struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
2396 if (bb_info)
2397 {
2398 bitmap_clear (&bb_info->use);
2399 bitmap_clear (&bb_info->def);
2400 bitmap_clear (&bb_info->in);
2401 bitmap_clear (&bb_info->out);
2402 }
2403 }
2404
2405
2406 /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
2407 not touched unless the block is new. */
2408
2409 static void
2410 df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2411 {
2412 unsigned int bb_index;
2413 bitmap_iterator bi;
2414 basic_block bb;
2415 struct df_word_lr_problem_data *problem_data
2416 = XNEW (struct df_word_lr_problem_data);
2417
2418 df_word_lr->problem_data = problem_data;
2419
2420 df_grow_bb_info (df_word_lr);
2421
2422 /* Create the mapping from regnos to slots. This does not change
2423 unless the problem is destroyed and recreated. In particular, if
2424 we end up deleting the only insn that used a subreg, we do not
2425 want to redo the mapping because this would invalidate everything
2426 else. */
2427
2428 bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
2429
2430 FOR_EACH_BB (bb)
2431 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
2432
2433 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
2434 bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
2435
2436 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2437 {
2438 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2439
2440 /* When bitmaps are already initialized, just clear them. */
2441 if (bb_info->use.obstack)
2442 {
2443 bitmap_clear (&bb_info->def);
2444 bitmap_clear (&bb_info->use);
2445 }
2446 else
2447 {
2448 bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
2449 bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
2450 bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
2451 bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
2452 }
2453 }
2454
2455 df_word_lr->optional_p = true;
2456 }
2457
2458
2459 /* Reset the global solution for recalculation. */
2460
2461 static void
2462 df_word_lr_reset (bitmap all_blocks)
2463 {
2464 unsigned int bb_index;
2465 bitmap_iterator bi;
2466
2467 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2468 {
2469 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2470 gcc_assert (bb_info);
2471 bitmap_clear (&bb_info->in);
2472 bitmap_clear (&bb_info->out);
2473 }
2474 }
2475
2476 /* Examine REF, and if it is for a reg we're interested in, set or
2477 clear the bits corresponding to its subwords from the bitmap
2478 according to IS_SET. LIVE is the bitmap we should update. We do
2479 not track hard regs or pseudos of any size other than 2 *
2480 UNITS_PER_WORD.
2481 We return true if we changed the bitmap, or if we encountered a register
2482 we're not tracking. */
2483
2484 bool
2485 df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
2486 {
2487 rtx orig_reg = DF_REF_REG (ref);
2488 rtx reg = orig_reg;
2489 enum machine_mode reg_mode;
2490 unsigned regno;
2491 /* Left at -1 for whole accesses. */
2492 int which_subword = -1;
2493 bool changed = false;
2494
2495 if (GET_CODE (reg) == SUBREG)
2496 reg = SUBREG_REG (orig_reg);
2497 regno = REGNO (reg);
2498 reg_mode = GET_MODE (reg);
2499 if (regno < FIRST_PSEUDO_REGISTER
2500 || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
2501 return true;
2502
2503 if (GET_CODE (orig_reg) == SUBREG
2504 && df_read_modify_subreg_p (orig_reg))
2505 {
2506 gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
2507 if (subreg_lowpart_p (orig_reg))
2508 which_subword = 0;
2509 else
2510 which_subword = 1;
2511 }
2512 if (is_set)
2513 {
2514 if (which_subword != 1)
2515 changed |= bitmap_set_bit (live, regno * 2);
2516 if (which_subword != 0)
2517 changed |= bitmap_set_bit (live, regno * 2 + 1);
2518 }
2519 else
2520 {
2521 if (which_subword != 1)
2522 changed |= bitmap_clear_bit (live, regno * 2);
2523 if (which_subword != 0)
2524 changed |= bitmap_clear_bit (live, regno * 2 + 1);
2525 }
2526 return changed;
2527 }
2528
2529 /* Compute local live register info for basic block BB. */
2530
2531 static void
2532 df_word_lr_bb_local_compute (unsigned int bb_index)
2533 {
2534 basic_block bb = BASIC_BLOCK (bb_index);
2535 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2536 rtx insn;
2537 df_ref *def_rec;
2538 df_ref *use_rec;
2539
2540 /* Ensure that artificial refs don't contain references to pseudos. */
2541 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
2542 {
2543 df_ref def = *def_rec;
2544 gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
2545 }
2546
2547 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
2548 {
2549 df_ref use = *use_rec;
2550 gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
2551 }
2552
2553 FOR_BB_INSNS_REVERSE (bb, insn)
2554 {
2555 unsigned int uid = INSN_UID (insn);
2556
2557 if (!NONDEBUG_INSN_P (insn))
2558 continue;
2559 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2560 {
2561 df_ref def = *def_rec;
2562 /* If the def is to only part of the reg, it does
2563 not kill the other defs that reach here. */
2564 if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
2565 {
2566 df_word_lr_mark_ref (def, true, &bb_info->def);
2567 df_word_lr_mark_ref (def, false, &bb_info->use);
2568 }
2569 }
2570 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2571 {
2572 df_ref use = *use_rec;
2573 df_word_lr_mark_ref (use, true, &bb_info->use);
2574 }
2575 }
2576 }
2577
2578
2579 /* Compute local live register info for each basic block within BLOCKS. */
2580
2581 static void
2582 df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
2583 {
2584 unsigned int bb_index;
2585 bitmap_iterator bi;
2586
2587 EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
2588 {
2589 if (bb_index == EXIT_BLOCK)
2590 {
2591 unsigned regno;
2592 bitmap_iterator bi;
2593 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
2594 regno, bi)
2595 gcc_unreachable ();
2596 }
2597 else
2598 df_word_lr_bb_local_compute (bb_index);
2599 }
2600
2601 bitmap_clear (df_word_lr->out_of_date_transfer_functions);
2602 }
2603
2604
2605 /* Initialize the solution vectors. */
2606
2607 static void
2608 df_word_lr_init (bitmap all_blocks)
2609 {
2610 unsigned int bb_index;
2611 bitmap_iterator bi;
2612
2613 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2614 {
2615 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2616 bitmap_copy (&bb_info->in, &bb_info->use);
2617 bitmap_clear (&bb_info->out);
2618 }
2619 }
2620
2621
2622 /* Confluence function that ignores fake edges. */
2623
2624 static bool
2625 df_word_lr_confluence_n (edge e)
2626 {
2627 bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
2628 bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
2629
2630 return bitmap_ior_into (op1, op2);
2631 }
2632
2633
2634 /* Transfer function. */
2635
2636 static bool
2637 df_word_lr_transfer_function (int bb_index)
2638 {
2639 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
2640 bitmap in = &bb_info->in;
2641 bitmap out = &bb_info->out;
2642 bitmap use = &bb_info->use;
2643 bitmap def = &bb_info->def;
2644
2645 return bitmap_ior_and_compl (in, use, out, def);
2646 }
2647
2648
2649 /* Free all storage associated with the problem. */
2650
2651 static void
2652 df_word_lr_free (void)
2653 {
2654 struct df_word_lr_problem_data *problem_data
2655 = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
2656
2657 if (df_word_lr->block_info)
2658 {
2659 df_word_lr->block_info_size = 0;
2660 free (df_word_lr->block_info);
2661 df_word_lr->block_info = NULL;
2662 }
2663
2664 BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
2665 bitmap_obstack_release (&problem_data->word_lr_bitmaps);
2666 free (problem_data);
2667 free (df_word_lr);
2668 }
2669
2670
2671 /* Debugging info at top of bb. */
2672
2673 static void
2674 df_word_lr_top_dump (basic_block bb, FILE *file)
2675 {
2676 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2677 if (!bb_info)
2678 return;
2679
2680 fprintf (file, ";; blr in \t");
2681 df_print_word_regset (file, &bb_info->in);
2682 fprintf (file, ";; blr use \t");
2683 df_print_word_regset (file, &bb_info->use);
2684 fprintf (file, ";; blr def \t");
2685 df_print_word_regset (file, &bb_info->def);
2686 }
2687
2688
2689 /* Debugging info at bottom of bb. */
2690
2691 static void
2692 df_word_lr_bottom_dump (basic_block bb, FILE *file)
2693 {
2694 struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
2695 if (!bb_info)
2696 return;
2697
2698 fprintf (file, ";; blr out \t");
2699 df_print_word_regset (file, &bb_info->out);
2700 }
2701
2702
2703 /* All of the information associated with every instance of the problem. */
2704
2705 static struct df_problem problem_WORD_LR =
2706 {
2707 DF_WORD_LR, /* Problem id. */
2708 DF_BACKWARD, /* Direction. */
2709 df_word_lr_alloc, /* Allocate the problem specific data. */
2710 df_word_lr_reset, /* Reset global information. */
2711 df_word_lr_free_bb_info, /* Free basic block info. */
2712 df_word_lr_local_compute, /* Local compute function. */
2713 df_word_lr_init, /* Init the solution specific data. */
2714 df_worklist_dataflow, /* Worklist solver. */
2715 NULL, /* Confluence operator 0. */
2716 df_word_lr_confluence_n, /* Confluence operator n. */
2717 df_word_lr_transfer_function, /* Transfer function. */
2718 NULL, /* Finalize function. */
2719 df_word_lr_free, /* Free all of the problem information. */
2720 df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
2721 NULL, /* Debugging. */
2722 df_word_lr_top_dump, /* Debugging start block. */
2723 df_word_lr_bottom_dump, /* Debugging end block. */
2724 NULL, /* Debugging start insn. */
2725 NULL, /* Debugging end insn. */
2726 NULL, /* Incremental solution verify start. */
2727 NULL, /* Incremental solution verify end. */
2728 NULL, /* Dependent problem. */
2729 sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
2730 TV_DF_WORD_LR, /* Timing variable. */
2731 false /* Reset blocks on dropping out of blocks_to_analyze. */
2732 };
2733
2734
2735 /* Create a new DATAFLOW instance and add it to an existing instance
2736 of DF. The returned structure is what is used to get at the
2737 solution. */
2738
2739 void
2740 df_word_lr_add_problem (void)
2741 {
2742 df_add_problem (&problem_WORD_LR);
2743 /* These will be initialized when df_scan_blocks processes each
2744 block. */
2745 df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
2746 }
2747
2748
2749 /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
2750 any bits, which is used by the caller to determine whether a set is
2751 necessary. We also return true if there are other reasons not to delete
2752 an insn. */
2753
2754 bool
2755 df_word_lr_simulate_defs (rtx insn, bitmap live)
2756 {
2757 bool changed = false;
2758 df_ref *def_rec;
2759 unsigned int uid = INSN_UID (insn);
2760
2761 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
2762 {
2763 df_ref def = *def_rec;
2764 if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
2765 changed = true;
2766 else
2767 changed |= df_word_lr_mark_ref (*def_rec, false, live);
2768 }
2769 return changed;
2770 }
2771
2772
2773 /* Simulate the effects of the uses of INSN on LIVE. */
2774
2775 void
2776 df_word_lr_simulate_uses (rtx insn, bitmap live)
2777 {
2778 df_ref *use_rec;
2779 unsigned int uid = INSN_UID (insn);
2780
2781 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
2782 df_word_lr_mark_ref (*use_rec, true, live);
2783 }
2784 \f
2785 /*----------------------------------------------------------------------------
2786 This problem computes REG_DEAD and REG_UNUSED notes.
2787 ----------------------------------------------------------------------------*/
2788
2789 static void
2790 df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
2791 {
2792 df_note->optional_p = true;
2793 }
2794
2795 /* This is only used if REG_DEAD_DEBUGGING is in effect. */
2796 static void
2797 df_print_note (const char *prefix, rtx insn, rtx note)
2798 {
2799 if (dump_file)
2800 {
2801 fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
2802 print_rtl (dump_file, note);
2803 fprintf (dump_file, "\n");
2804 }
2805 }
2806
2807
2808 /* After reg-stack, the x86 floating point stack regs are difficult to
2809 analyze because of all of the pushes, pops and rotations. Thus, we
2810 just leave the notes alone. */
2811
2812 #ifdef STACK_REGS
2813 static inline bool
2814 df_ignore_stack_reg (int regno)
2815 {
2816 return regstack_completed
2817 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
2818 }
2819 #else
2820 static inline bool
2821 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
2822 {
2823 return false;
2824 }
2825 #endif
2826
2827
2828 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
2829
2830 static void
2831 df_remove_dead_and_unused_notes (rtx insn)
2832 {
2833 rtx *pprev = &REG_NOTES (insn);
2834 rtx link = *pprev;
2835
2836 while (link)
2837 {
2838 switch (REG_NOTE_KIND (link))
2839 {
2840 case REG_DEAD:
2841 /* After reg-stack, we need to ignore any unused notes
2842 for the stack registers. */
2843 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2844 {
2845 pprev = &XEXP (link, 1);
2846 link = *pprev;
2847 }
2848 else
2849 {
2850 rtx next = XEXP (link, 1);
2851 if (REG_DEAD_DEBUGGING)
2852 df_print_note ("deleting: ", insn, link);
2853 free_EXPR_LIST_node (link);
2854 *pprev = link = next;
2855 }
2856 break;
2857
2858 case REG_UNUSED:
2859 /* After reg-stack, we need to ignore any unused notes
2860 for the stack registers. */
2861 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
2862 {
2863 pprev = &XEXP (link, 1);
2864 link = *pprev;
2865 }
2866 else
2867 {
2868 rtx next = XEXP (link, 1);
2869 if (REG_DEAD_DEBUGGING)
2870 df_print_note ("deleting: ", insn, link);
2871 free_EXPR_LIST_node (link);
2872 *pprev = link = next;
2873 }
2874 break;
2875
2876 default:
2877 pprev = &XEXP (link, 1);
2878 link = *pprev;
2879 break;
2880 }
2881 }
2882 }
2883
2884 /* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
2885 as the bitmap of currently live registers. */
2886
2887 static void
2888 df_remove_dead_eq_notes (rtx insn, bitmap live)
2889 {
2890 rtx *pprev = &REG_NOTES (insn);
2891 rtx link = *pprev;
2892
2893 while (link)
2894 {
2895 switch (REG_NOTE_KIND (link))
2896 {
2897 case REG_EQUAL:
2898 case REG_EQUIV:
2899 {
2900 /* Remove the notes that refer to dead registers. As we have at most
2901 one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
2902 so we need to purge the complete EQ_USES vector when removing
2903 the note using df_notes_rescan. */
2904 df_ref *use_rec;
2905 bool deleted = false;
2906
2907 for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
2908 {
2909 df_ref use = *use_rec;
2910 if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
2911 && DF_REF_LOC (use)
2912 && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
2913 && ! bitmap_bit_p (live, DF_REF_REGNO (use))
2914 && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
2915 {
2916 deleted = true;
2917 break;
2918 }
2919 }
2920 if (deleted)
2921 {
2922 rtx next;
2923 if (REG_DEAD_DEBUGGING)
2924 df_print_note ("deleting: ", insn, link);
2925 next = XEXP (link, 1);
2926 free_EXPR_LIST_node (link);
2927 *pprev = link = next;
2928 df_notes_rescan (insn);
2929 }
2930 else
2931 {
2932 pprev = &XEXP (link, 1);
2933 link = *pprev;
2934 }
2935 break;
2936 }
2937
2938 default:
2939 pprev = &XEXP (link, 1);
2940 link = *pprev;
2941 break;
2942 }
2943 }
2944 }
2945
2946 /* Set a NOTE_TYPE note for REG in INSN. */
2947
2948 static inline void
2949 df_set_note (enum reg_note note_type, rtx insn, rtx reg)
2950 {
2951 gcc_checking_assert (!DEBUG_INSN_P (insn));
2952 add_reg_note (insn, note_type, reg);
2953 }
2954
2955 /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
2956 arguments. Return true if the register value described by MWS's
2957 mw_reg is known to be completely unused, and if mw_reg can therefore
2958 be used in a REG_UNUSED note. */
2959
2960 static bool
2961 df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
2962 bitmap live, bitmap artificial_uses)
2963 {
2964 unsigned int r;
2965
2966 /* If MWS describes a partial reference, create REG_UNUSED notes for
2967 individual hard registers. */
2968 if (mws->flags & DF_REF_PARTIAL)
2969 return false;
2970
2971 /* Likewise if some part of the register is used. */
2972 for (r = mws->start_regno; r <= mws->end_regno; r++)
2973 if (bitmap_bit_p (live, r)
2974 || bitmap_bit_p (artificial_uses, r))
2975 return false;
2976
2977 gcc_assert (REG_P (mws->mw_reg));
2978 return true;
2979 }
2980
2981
2982 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
2983 based on the bits in LIVE. Do not generate notes for registers in
2984 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
2985 not generated if the reg is both read and written by the
2986 instruction.
2987 */
2988
2989 static void
2990 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
2991 bitmap live, bitmap do_not_gen,
2992 bitmap artificial_uses,
2993 struct dead_debug_local *debug)
2994 {
2995 unsigned int r;
2996
2997 if (REG_DEAD_DEBUGGING && dump_file)
2998 fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
2999 mws->start_regno, mws->end_regno);
3000
3001 if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
3002 {
3003 unsigned int regno = mws->start_regno;
3004 df_set_note (REG_UNUSED, insn, mws->mw_reg);
3005 dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3006
3007 if (REG_DEAD_DEBUGGING)
3008 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3009
3010 bitmap_set_bit (do_not_gen, regno);
3011 /* Only do this if the value is totally dead. */
3012 }
3013 else
3014 for (r = mws->start_regno; r <= mws->end_regno; r++)
3015 {
3016 if (!bitmap_bit_p (live, r)
3017 && !bitmap_bit_p (artificial_uses, r))
3018 {
3019 df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
3020 dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
3021 if (REG_DEAD_DEBUGGING)
3022 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3023 }
3024 bitmap_set_bit (do_not_gen, r);
3025 }
3026 }
3027
3028
3029 /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
3030 arguments. Return true if the register value described by MWS's
3031 mw_reg is known to be completely dead, and if mw_reg can therefore
3032 be used in a REG_DEAD note. */
3033
3034 static bool
3035 df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
3036 bitmap live, bitmap artificial_uses,
3037 bitmap do_not_gen)
3038 {
3039 unsigned int r;
3040
3041 /* If MWS describes a partial reference, create REG_DEAD notes for
3042 individual hard registers. */
3043 if (mws->flags & DF_REF_PARTIAL)
3044 return false;
3045
3046 /* Likewise if some part of the register is not dead. */
3047 for (r = mws->start_regno; r <= mws->end_regno; r++)
3048 if (bitmap_bit_p (live, r)
3049 || bitmap_bit_p (artificial_uses, r)
3050 || bitmap_bit_p (do_not_gen, r))
3051 return false;
3052
3053 gcc_assert (REG_P (mws->mw_reg));
3054 return true;
3055 }
3056
3057 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3058 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3059 from being set if the instruction both reads and writes the
3060 register. */
3061
3062 static void
3063 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3064 bitmap live, bitmap do_not_gen,
3065 bitmap artificial_uses, bool *added_notes_p)
3066 {
3067 unsigned int r;
3068 bool is_debug = *added_notes_p;
3069
3070 *added_notes_p = false;
3071
3072 if (REG_DEAD_DEBUGGING && dump_file)
3073 {
3074 fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
3075 mws->start_regno, mws->end_regno);
3076 df_print_regset (dump_file, do_not_gen);
3077 fprintf (dump_file, " live =");
3078 df_print_regset (dump_file, live);
3079 fprintf (dump_file, " artificial uses =");
3080 df_print_regset (dump_file, artificial_uses);
3081 }
3082
3083 if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
3084 {
3085 if (is_debug)
3086 {
3087 *added_notes_p = true;
3088 return;
3089 }
3090 /* Add a dead note for the entire multi word register. */
3091 df_set_note (REG_DEAD, insn, mws->mw_reg);
3092 if (REG_DEAD_DEBUGGING)
3093 df_print_note ("adding 1: ", insn, REG_NOTES (insn));
3094 }
3095 else
3096 {
3097 for (r = mws->start_regno; r <= mws->end_regno; r++)
3098 if (!bitmap_bit_p (live, r)
3099 && !bitmap_bit_p (artificial_uses, r)
3100 && !bitmap_bit_p (do_not_gen, r))
3101 {
3102 if (is_debug)
3103 {
3104 *added_notes_p = true;
3105 return;
3106 }
3107 df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
3108 if (REG_DEAD_DEBUGGING)
3109 df_print_note ("adding 2: ", insn, REG_NOTES (insn));
3110 }
3111 }
3112 return;
3113 }
3114
3115
3116 /* Create a REG_UNUSED note if necessary for DEF in INSN updating
3117 LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
3118
3119 static void
3120 df_create_unused_note (rtx insn, df_ref def,
3121 bitmap live, bitmap artificial_uses,
3122 struct dead_debug_local *debug)
3123 {
3124 unsigned int dregno = DF_REF_REGNO (def);
3125
3126 if (REG_DEAD_DEBUGGING && dump_file)
3127 {
3128 fprintf (dump_file, " regular looking at def ");
3129 df_ref_debug (def, dump_file);
3130 }
3131
3132 if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
3133 || bitmap_bit_p (live, dregno)
3134 || bitmap_bit_p (artificial_uses, dregno)
3135 || df_ignore_stack_reg (dregno)))
3136 {
3137 rtx reg = (DF_REF_LOC (def))
3138 ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
3139 df_set_note (REG_UNUSED, insn, reg);
3140 dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
3141 if (REG_DEAD_DEBUGGING)
3142 df_print_note ("adding 3: ", insn, REG_NOTES (insn));
3143 }
3144
3145 return;
3146 }
3147
3148
3149 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3150 info: lifetime, bb, and number of defs and uses for basic block
3151 BB. The three bitvectors are scratch regs used here. */
3152
3153 static void
3154 df_note_bb_compute (unsigned int bb_index,
3155 bitmap live, bitmap do_not_gen, bitmap artificial_uses)
3156 {
3157 basic_block bb = BASIC_BLOCK (bb_index);
3158 rtx insn;
3159 df_ref *def_rec;
3160 df_ref *use_rec;
3161 struct dead_debug_local debug;
3162
3163 dead_debug_local_init (&debug, NULL, NULL);
3164
3165 bitmap_copy (live, df_get_live_out (bb));
3166 bitmap_clear (artificial_uses);
3167
3168 if (REG_DEAD_DEBUGGING && dump_file)
3169 {
3170 fprintf (dump_file, "live at bottom ");
3171 df_print_regset (dump_file, live);
3172 }
3173
3174 /* Process the artificial defs and uses at the bottom of the block
3175 to begin processing. */
3176 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3177 {
3178 df_ref def = *def_rec;
3179
3180 if (REG_DEAD_DEBUGGING && dump_file)
3181 fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
3182
3183 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3184 bitmap_clear_bit (live, DF_REF_REGNO (def));
3185 }
3186
3187 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3188 {
3189 df_ref use = *use_rec;
3190 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3191 {
3192 unsigned int regno = DF_REF_REGNO (use);
3193 bitmap_set_bit (live, regno);
3194
3195 /* Notes are not generated for any of the artificial registers
3196 at the bottom of the block. */
3197 bitmap_set_bit (artificial_uses, regno);
3198 }
3199 }
3200
3201 if (REG_DEAD_DEBUGGING && dump_file)
3202 {
3203 fprintf (dump_file, "live before artificials out ");
3204 df_print_regset (dump_file, live);
3205 }
3206
3207 FOR_BB_INSNS_REVERSE (bb, insn)
3208 {
3209 unsigned int uid = INSN_UID (insn);
3210 struct df_mw_hardreg **mws_rec;
3211 int debug_insn;
3212
3213 if (!INSN_P (insn))
3214 continue;
3215
3216 debug_insn = DEBUG_INSN_P (insn);
3217
3218 bitmap_clear (do_not_gen);
3219 df_remove_dead_and_unused_notes (insn);
3220
3221 /* Process the defs. */
3222 if (CALL_P (insn))
3223 {
3224 if (REG_DEAD_DEBUGGING && dump_file)
3225 {
3226 fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
3227 df_print_regset (dump_file, live);
3228 }
3229
3230 /* We only care about real sets for calls. Clobbers cannot
3231 be depended on to really die. */
3232 mws_rec = DF_INSN_UID_MWS (uid);
3233 while (*mws_rec)
3234 {
3235 struct df_mw_hardreg *mws = *mws_rec;
3236 if ((DF_MWS_REG_DEF_P (mws))
3237 && !df_ignore_stack_reg (mws->start_regno))
3238 df_set_unused_notes_for_mw (insn,
3239 mws, live, do_not_gen,
3240 artificial_uses, &debug);
3241 mws_rec++;
3242 }
3243
3244 /* All of the defs except the return value are some sort of
3245 clobber. This code is for the return. */
3246 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3247 {
3248 df_ref def = *def_rec;
3249 unsigned int dregno = DF_REF_REGNO (def);
3250 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3251 {
3252 df_create_unused_note (insn,
3253 def, live, artificial_uses, &debug);
3254 bitmap_set_bit (do_not_gen, dregno);
3255 }
3256
3257 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3258 bitmap_clear_bit (live, dregno);
3259 }
3260 }
3261 else
3262 {
3263 /* Regular insn. */
3264 mws_rec = DF_INSN_UID_MWS (uid);
3265 while (*mws_rec)
3266 {
3267 struct df_mw_hardreg *mws = *mws_rec;
3268 if (DF_MWS_REG_DEF_P (mws))
3269 df_set_unused_notes_for_mw (insn,
3270 mws, live, do_not_gen,
3271 artificial_uses, &debug);
3272 mws_rec++;
3273 }
3274
3275 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3276 {
3277 df_ref def = *def_rec;
3278 unsigned int dregno = DF_REF_REGNO (def);
3279 df_create_unused_note (insn,
3280 def, live, artificial_uses, &debug);
3281
3282 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
3283 bitmap_set_bit (do_not_gen, dregno);
3284
3285 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3286 bitmap_clear_bit (live, dregno);
3287 }
3288 }
3289
3290 /* Process the uses. */
3291 mws_rec = DF_INSN_UID_MWS (uid);
3292 while (*mws_rec)
3293 {
3294 struct df_mw_hardreg *mws = *mws_rec;
3295 if (DF_MWS_REG_USE_P (mws)
3296 && !df_ignore_stack_reg (mws->start_regno))
3297 {
3298 bool really_add_notes = debug_insn != 0;
3299
3300 df_set_dead_notes_for_mw (insn,
3301 mws, live, do_not_gen,
3302 artificial_uses,
3303 &really_add_notes);
3304
3305 if (really_add_notes)
3306 debug_insn = -1;
3307 }
3308 mws_rec++;
3309 }
3310
3311 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3312 {
3313 df_ref use = *use_rec;
3314 unsigned int uregno = DF_REF_REGNO (use);
3315
3316 if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
3317 {
3318 fprintf (dump_file, " regular looking at use ");
3319 df_ref_debug (use, dump_file);
3320 }
3321
3322 if (!bitmap_bit_p (live, uregno))
3323 {
3324 if (debug_insn)
3325 {
3326 if (debug_insn > 0)
3327 {
3328 /* We won't add REG_UNUSED or REG_DEAD notes for
3329 these, so we don't have to mess with them in
3330 debug insns either. */
3331 if (!bitmap_bit_p (artificial_uses, uregno)
3332 && !df_ignore_stack_reg (uregno))
3333 dead_debug_add (&debug, use, uregno);
3334 continue;
3335 }
3336 break;
3337 }
3338 else
3339 dead_debug_insert_temp (&debug, uregno, insn,
3340 DEBUG_TEMP_BEFORE_WITH_REG);
3341
3342 if ( (!(DF_REF_FLAGS (use)
3343 & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
3344 && (!bitmap_bit_p (do_not_gen, uregno))
3345 && (!bitmap_bit_p (artificial_uses, uregno))
3346 && (!df_ignore_stack_reg (uregno)))
3347 {
3348 rtx reg = (DF_REF_LOC (use))
3349 ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
3350 df_set_note (REG_DEAD, insn, reg);
3351
3352 if (REG_DEAD_DEBUGGING)
3353 df_print_note ("adding 4: ", insn, REG_NOTES (insn));
3354 }
3355 /* This register is now live. */
3356 bitmap_set_bit (live, uregno);
3357 }
3358 }
3359
3360 df_remove_dead_eq_notes (insn, live);
3361
3362 if (debug_insn == -1)
3363 {
3364 /* ??? We could probably do better here, replacing dead
3365 registers with their definitions. */
3366 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
3367 df_insn_rescan_debug_internal (insn);
3368 }
3369 }
3370
3371 dead_debug_local_finish (&debug, NULL);
3372 }
3373
3374
3375 /* Compute register info: lifetime, bb, and number of defs and uses. */
3376 static void
3377 df_note_compute (bitmap all_blocks)
3378 {
3379 unsigned int bb_index;
3380 bitmap_iterator bi;
3381 bitmap_head live, do_not_gen, artificial_uses;
3382
3383 bitmap_initialize (&live, &df_bitmap_obstack);
3384 bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
3385 bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
3386
3387 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
3388 {
3389 /* ??? Unlike fast DCE, we don't use global_debug for uses of dead
3390 pseudos in debug insns because we don't always (re)visit blocks
3391 with death points after visiting dead uses. Even changing this
3392 loop to postorder would still leave room for visiting a death
3393 point before visiting a subsequent debug use. */
3394 df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
3395 }
3396
3397 bitmap_clear (&live);
3398 bitmap_clear (&do_not_gen);
3399 bitmap_clear (&artificial_uses);
3400 }
3401
3402
3403 /* Free all storage associated with the problem. */
3404
3405 static void
3406 df_note_free (void)
3407 {
3408 free (df_note);
3409 }
3410
3411
3412 /* All of the information associated every instance of the problem. */
3413
3414 static struct df_problem problem_NOTE =
3415 {
3416 DF_NOTE, /* Problem id. */
3417 DF_NONE, /* Direction. */
3418 df_note_alloc, /* Allocate the problem specific data. */
3419 NULL, /* Reset global information. */
3420 NULL, /* Free basic block info. */
3421 df_note_compute, /* Local compute function. */
3422 NULL, /* Init the solution specific data. */
3423 NULL, /* Iterative solver. */
3424 NULL, /* Confluence operator 0. */
3425 NULL, /* Confluence operator n. */
3426 NULL, /* Transfer function. */
3427 NULL, /* Finalize function. */
3428 df_note_free, /* Free all of the problem information. */
3429 df_note_free, /* Remove this problem from the stack of dataflow problems. */
3430 NULL, /* Debugging. */
3431 NULL, /* Debugging start block. */
3432 NULL, /* Debugging end block. */
3433 NULL, /* Debugging start insn. */
3434 NULL, /* Debugging end insn. */
3435 NULL, /* Incremental solution verify start. */
3436 NULL, /* Incremental solution verify end. */
3437 &problem_LR, /* Dependent problem. */
3438 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
3439 TV_DF_NOTE, /* Timing variable. */
3440 false /* Reset blocks on dropping out of blocks_to_analyze. */
3441 };
3442
3443
3444 /* Create a new DATAFLOW instance and add it to an existing instance
3445 of DF. The returned structure is what is used to get at the
3446 solution. */
3447
3448 void
3449 df_note_add_problem (void)
3450 {
3451 df_add_problem (&problem_NOTE);
3452 }
3453
3454
3455
3456 \f
3457 /*----------------------------------------------------------------------------
3458 Functions for simulating the effects of single insns.
3459
3460 You can either simulate in the forwards direction, starting from
3461 the top of a block or the backwards direction from the end of the
3462 block. If you go backwards, defs are examined first to clear bits,
3463 then uses are examined to set bits. If you go forwards, defs are
3464 examined first to set bits, then REG_DEAD and REG_UNUSED notes
3465 are examined to clear bits. In either case, the result of examining
3466 a def can be undone (respectively by a use or a REG_UNUSED note).
3467
3468 If you start at the top of the block, use one of DF_LIVE_IN or
3469 DF_LR_IN. If you start at the bottom of the block use one of
3470 DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
3471 THEY WILL BE DESTROYED.
3472 ----------------------------------------------------------------------------*/
3473
3474
3475 /* Find the set of DEFs for INSN. */
3476
3477 void
3478 df_simulate_find_defs (rtx insn, bitmap defs)
3479 {
3480 df_ref *def_rec;
3481 unsigned int uid = INSN_UID (insn);
3482
3483 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3484 {
3485 df_ref def = *def_rec;
3486 bitmap_set_bit (defs, DF_REF_REGNO (def));
3487 }
3488 }
3489
3490 /* Find the set of uses for INSN. This includes partial defs. */
3491
3492 static void
3493 df_simulate_find_uses (rtx insn, bitmap uses)
3494 {
3495 df_ref *rec;
3496 unsigned int uid = INSN_UID (insn);
3497
3498 for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
3499 {
3500 df_ref def = *rec;
3501 if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
3502 bitmap_set_bit (uses, DF_REF_REGNO (def));
3503 }
3504 for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
3505 {
3506 df_ref use = *rec;
3507 bitmap_set_bit (uses, DF_REF_REGNO (use));
3508 }
3509 }
3510
3511 /* Find the set of real DEFs, which are not clobbers, for INSN. */
3512
3513 void
3514 df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
3515 {
3516 df_ref *def_rec;
3517 unsigned int uid = INSN_UID (insn);
3518
3519 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3520 {
3521 df_ref def = *def_rec;
3522 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3523 bitmap_set_bit (defs, DF_REF_REGNO (def));
3524 }
3525 }
3526
3527
3528 /* Simulate the effects of the defs of INSN on LIVE. */
3529
3530 void
3531 df_simulate_defs (rtx insn, bitmap live)
3532 {
3533 df_ref *def_rec;
3534 unsigned int uid = INSN_UID (insn);
3535
3536 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3537 {
3538 df_ref def = *def_rec;
3539 unsigned int dregno = DF_REF_REGNO (def);
3540
3541 /* If the def is to only part of the reg, it does
3542 not kill the other defs that reach here. */
3543 if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
3544 bitmap_clear_bit (live, dregno);
3545 }
3546 }
3547
3548
3549 /* Simulate the effects of the uses of INSN on LIVE. */
3550
3551 void
3552 df_simulate_uses (rtx insn, bitmap live)
3553 {
3554 df_ref *use_rec;
3555 unsigned int uid = INSN_UID (insn);
3556
3557 if (DEBUG_INSN_P (insn))
3558 return;
3559
3560 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
3561 {
3562 df_ref use = *use_rec;
3563 /* Add use to set of uses in this BB. */
3564 bitmap_set_bit (live, DF_REF_REGNO (use));
3565 }
3566 }
3567
3568
3569 /* Add back the always live regs in BB to LIVE. */
3570
3571 static inline void
3572 df_simulate_fixup_sets (basic_block bb, bitmap live)
3573 {
3574 /* These regs are considered always live so if they end up dying
3575 because of some def, we need to bring the back again. */
3576 if (bb_has_eh_pred (bb))
3577 bitmap_ior_into (live, &df->eh_block_artificial_uses);
3578 else
3579 bitmap_ior_into (live, &df->regular_block_artificial_uses);
3580 }
3581
3582
3583 /*----------------------------------------------------------------------------
3584 The following three functions are used only for BACKWARDS scanning:
3585 i.e. they process the defs before the uses.
3586
3587 df_simulate_initialize_backwards should be called first with a
3588 bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
3589 df_simulate_one_insn_backwards should be called for each insn in
3590 the block, starting with the last one. Finally,
3591 df_simulate_finalize_backwards can be called to get a new value
3592 of the sets at the top of the block (this is rarely used).
3593 ----------------------------------------------------------------------------*/
3594
3595 /* Apply the artificial uses and defs at the end of BB in a backwards
3596 direction. */
3597
3598 void
3599 df_simulate_initialize_backwards (basic_block bb, bitmap live)
3600 {
3601 df_ref *def_rec;
3602 df_ref *use_rec;
3603 int bb_index = bb->index;
3604
3605 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3606 {
3607 df_ref def = *def_rec;
3608 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3609 bitmap_clear_bit (live, DF_REF_REGNO (def));
3610 }
3611
3612 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3613 {
3614 df_ref use = *use_rec;
3615 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3616 bitmap_set_bit (live, DF_REF_REGNO (use));
3617 }
3618 }
3619
3620
3621 /* Simulate the backwards effects of INSN on the bitmap LIVE. */
3622
3623 void
3624 df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
3625 {
3626 if (!NONDEBUG_INSN_P (insn))
3627 return;
3628
3629 df_simulate_defs (insn, live);
3630 df_simulate_uses (insn, live);
3631 df_simulate_fixup_sets (bb, live);
3632 }
3633
3634
3635 /* Apply the artificial uses and defs at the top of BB in a backwards
3636 direction. */
3637
3638 void
3639 df_simulate_finalize_backwards (basic_block bb, bitmap live)
3640 {
3641 df_ref *def_rec;
3642 #ifdef EH_USES
3643 df_ref *use_rec;
3644 #endif
3645 int bb_index = bb->index;
3646
3647 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3648 {
3649 df_ref def = *def_rec;
3650 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3651 bitmap_clear_bit (live, DF_REF_REGNO (def));
3652 }
3653
3654 #ifdef EH_USES
3655 for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
3656 {
3657 df_ref use = *use_rec;
3658 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
3659 bitmap_set_bit (live, DF_REF_REGNO (use));
3660 }
3661 #endif
3662 }
3663 /*----------------------------------------------------------------------------
3664 The following three functions are used only for FORWARDS scanning:
3665 i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
3666 Thus it is important to add the DF_NOTES problem to the stack of
3667 problems computed before using these functions.
3668
3669 df_simulate_initialize_forwards should be called first with a
3670 bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
3671 df_simulate_one_insn_forwards should be called for each insn in
3672 the block, starting with the first one.
3673 ----------------------------------------------------------------------------*/
3674
3675 /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
3676 DF_LR_IN for basic block BB, for forward scanning by marking artificial
3677 defs live. */
3678
3679 void
3680 df_simulate_initialize_forwards (basic_block bb, bitmap live)
3681 {
3682 df_ref *def_rec;
3683 int bb_index = bb->index;
3684
3685 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
3686 {
3687 df_ref def = *def_rec;
3688 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
3689 bitmap_set_bit (live, DF_REF_REGNO (def));
3690 }
3691 }
3692
3693 /* Simulate the forwards effects of INSN on the bitmap LIVE. */
3694
3695 void
3696 df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
3697 {
3698 rtx link;
3699 if (! INSN_P (insn))
3700 return;
3701
3702 /* Make sure that DF_NOTE really is an active df problem. */
3703 gcc_assert (df_note);
3704
3705 /* Note that this is the opposite as how the problem is defined, because
3706 in the LR problem defs _kill_ liveness. However, they do so backwards,
3707 while here the scan is performed forwards! So, first assume that the
3708 def is live, and if this is not true REG_UNUSED notes will rectify the
3709 situation. */
3710 df_simulate_find_noclobber_defs (insn, live);
3711
3712 /* Clear all of the registers that go dead. */
3713 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
3714 {
3715 switch (REG_NOTE_KIND (link))
3716 {
3717 case REG_DEAD:
3718 case REG_UNUSED:
3719 {
3720 rtx reg = XEXP (link, 0);
3721 int regno = REGNO (reg);
3722 if (HARD_REGISTER_NUM_P (regno))
3723 bitmap_clear_range (live, regno,
3724 hard_regno_nregs[regno][GET_MODE (reg)]);
3725 else
3726 bitmap_clear_bit (live, regno);
3727 }
3728 break;
3729 default:
3730 break;
3731 }
3732 }
3733 df_simulate_fixup_sets (bb, live);
3734 }
3735 \f
3736 /* Used by the next two functions to encode information about the
3737 memory references we found. */
3738 #define MEMREF_NORMAL 1
3739 #define MEMREF_VOLATILE 2
3740
3741 /* A subroutine of can_move_insns_across_p called through for_each_rtx.
3742 Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
3743
3744 static int
3745 find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3746 {
3747 rtx x = *px;
3748
3749 if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
3750 return MEMREF_VOLATILE;
3751
3752 if (!MEM_P (x))
3753 return 0;
3754 if (MEM_VOLATILE_P (x))
3755 return MEMREF_VOLATILE;
3756 if (MEM_READONLY_P (x))
3757 return 0;
3758
3759 return MEMREF_NORMAL;
3760 }
3761
3762 /* A subroutine of can_move_insns_across_p called through note_stores.
3763 DATA points to an integer in which we set either the bit for
3764 MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
3765 of either kind. */
3766
3767 static void
3768 find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
3769 void *data ATTRIBUTE_UNUSED)
3770 {
3771 int *pflags = (int *)data;
3772 if (GET_CODE (x) == SUBREG)
3773 x = XEXP (x, 0);
3774 /* Treat stores to SP as stores to memory, this will prevent problems
3775 when there are references to the stack frame. */
3776 if (x == stack_pointer_rtx)
3777 *pflags |= MEMREF_VOLATILE;
3778 if (!MEM_P (x))
3779 return;
3780 *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
3781 }
3782
3783 /* Scan BB backwards, using df_simulate functions to keep track of
3784 lifetimes, up to insn POINT. The result is stored in LIVE. */
3785
3786 void
3787 simulate_backwards_to_point (basic_block bb, regset live, rtx point)
3788 {
3789 rtx insn;
3790 bitmap_copy (live, df_get_live_out (bb));
3791 df_simulate_initialize_backwards (bb, live);
3792
3793 /* Scan and update life information until we reach the point we're
3794 interested in. */
3795 for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
3796 df_simulate_one_insn_backwards (bb, insn, live);
3797 }
3798
3799 /* Return true if it is safe to move a group of insns, described by
3800 the range FROM to TO, backwards across another group of insns,
3801 described by ACROSS_FROM to ACROSS_TO. It is assumed that there
3802 are no insns between ACROSS_TO and FROM, but they may be in
3803 different basic blocks; MERGE_BB is the block from which the
3804 insns will be moved. The caller must pass in a regset MERGE_LIVE
3805 which specifies the registers live after TO.
3806
3807 This function may be called in one of two cases: either we try to
3808 move identical instructions from all successor blocks into their
3809 predecessor, or we try to move from only one successor block. If
3810 OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
3811 the second case. It should contain a set of registers live at the
3812 end of ACROSS_TO which must not be clobbered by moving the insns.
3813 In that case, we're also more careful about moving memory references
3814 and trapping insns.
3815
3816 We return false if it is not safe to move the entire group, but it
3817 may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
3818 is set to point at the last moveable insn in such a case. */
3819
3820 bool
3821 can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
3822 basic_block merge_bb, regset merge_live,
3823 regset other_branch_live, rtx *pmove_upto)
3824 {
3825 rtx insn, next, max_to;
3826 bitmap merge_set, merge_use, local_merge_live;
3827 bitmap test_set, test_use;
3828 unsigned i, fail = 0;
3829 bitmap_iterator bi;
3830 int memrefs_in_across = 0;
3831 int mem_sets_in_across = 0;
3832 bool trapping_insns_in_across = false;
3833
3834 if (pmove_upto != NULL)
3835 *pmove_upto = NULL_RTX;
3836
3837 /* Find real bounds, ignoring debug insns. */
3838 while (!NONDEBUG_INSN_P (from) && from != to)
3839 from = NEXT_INSN (from);
3840 while (!NONDEBUG_INSN_P (to) && from != to)
3841 to = PREV_INSN (to);
3842
3843 for (insn = across_to; ; insn = next)
3844 {
3845 if (CALL_P (insn))
3846 {
3847 if (RTL_CONST_OR_PURE_CALL_P (insn))
3848 /* Pure functions can read from memory. Const functions can
3849 read from arguments that the ABI has forced onto the stack.
3850 Neither sort of read can be volatile. */
3851 memrefs_in_across |= MEMREF_NORMAL;
3852 else
3853 {
3854 memrefs_in_across |= MEMREF_VOLATILE;
3855 mem_sets_in_across |= MEMREF_VOLATILE;
3856 }
3857 }
3858 if (NONDEBUG_INSN_P (insn))
3859 {
3860 if (volatile_insn_p (PATTERN (insn)))
3861 return false;
3862 memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
3863 NULL);
3864 note_stores (PATTERN (insn), find_memory_stores,
3865 &mem_sets_in_across);
3866 /* This is used just to find sets of the stack pointer. */
3867 memrefs_in_across |= mem_sets_in_across;
3868 trapping_insns_in_across |= may_trap_p (PATTERN (insn));
3869 }
3870 next = PREV_INSN (insn);
3871 if (insn == across_from)
3872 break;
3873 }
3874
3875 /* Collect:
3876 MERGE_SET = set of registers set in MERGE_BB
3877 MERGE_USE = set of registers used in MERGE_BB and live at its top
3878 MERGE_LIVE = set of registers live at the point inside the MERGE
3879 range that we've reached during scanning
3880 TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
3881 TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
3882 and live before ACROSS_FROM. */
3883
3884 merge_set = BITMAP_ALLOC (&reg_obstack);
3885 merge_use = BITMAP_ALLOC (&reg_obstack);
3886 local_merge_live = BITMAP_ALLOC (&reg_obstack);
3887 test_set = BITMAP_ALLOC (&reg_obstack);
3888 test_use = BITMAP_ALLOC (&reg_obstack);
3889
3890 /* Compute the set of registers set and used in the ACROSS range. */
3891 if (other_branch_live != NULL)
3892 bitmap_copy (test_use, other_branch_live);
3893 df_simulate_initialize_backwards (merge_bb, test_use);
3894 for (insn = across_to; ; insn = next)
3895 {
3896 if (NONDEBUG_INSN_P (insn))
3897 {
3898 df_simulate_find_defs (insn, test_set);
3899 df_simulate_defs (insn, test_use);
3900 df_simulate_uses (insn, test_use);
3901 }
3902 next = PREV_INSN (insn);
3903 if (insn == across_from)
3904 break;
3905 }
3906
3907 /* Compute an upper bound for the amount of insns moved, by finding
3908 the first insn in MERGE that sets a register in TEST_USE, or uses
3909 a register in TEST_SET. We also check for calls, trapping operations,
3910 and memory references. */
3911 max_to = NULL_RTX;
3912 for (insn = from; ; insn = next)
3913 {
3914 if (CALL_P (insn))
3915 break;
3916 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3917 break;
3918 if (NONDEBUG_INSN_P (insn))
3919 {
3920 if (may_trap_or_fault_p (PATTERN (insn))
3921 && (trapping_insns_in_across
3922 || other_branch_live != NULL
3923 || volatile_insn_p (PATTERN (insn))))
3924 break;
3925
3926 /* We cannot move memory stores past each other, or move memory
3927 reads past stores, at least not without tracking them and
3928 calling true_dependence on every pair.
3929
3930 If there is no other branch and no memory references or
3931 sets in the ACROSS range, we can move memory references
3932 freely, even volatile ones.
3933
3934 Otherwise, the rules are as follows: volatile memory
3935 references and stores can't be moved at all, and any type
3936 of memory reference can't be moved if there are volatile
3937 accesses or stores in the ACROSS range. That leaves
3938 normal reads, which can be moved, as the trapping case is
3939 dealt with elsewhere. */
3940 if (other_branch_live != NULL || memrefs_in_across != 0)
3941 {
3942 int mem_ref_flags = 0;
3943 int mem_set_flags = 0;
3944 note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
3945 mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
3946 NULL);
3947 /* Catch sets of the stack pointer. */
3948 mem_ref_flags |= mem_set_flags;
3949
3950 if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
3951 break;
3952 if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
3953 break;
3954 if (mem_set_flags != 0
3955 || (mem_sets_in_across != 0 && mem_ref_flags != 0))
3956 break;
3957 }
3958 df_simulate_find_uses (insn, merge_use);
3959 /* We're only interested in uses which use a value live at
3960 the top, not one previously set in this block. */
3961 bitmap_and_compl_into (merge_use, merge_set);
3962 df_simulate_find_defs (insn, merge_set);
3963 if (bitmap_intersect_p (merge_set, test_use)
3964 || bitmap_intersect_p (merge_use, test_set))
3965 break;
3966 #ifdef HAVE_cc0
3967 if (!sets_cc0_p (insn))
3968 #endif
3969 max_to = insn;
3970 }
3971 next = NEXT_INSN (insn);
3972 if (insn == to)
3973 break;
3974 }
3975 if (max_to != to)
3976 fail = 1;
3977
3978 if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
3979 goto out;
3980
3981 /* Now, lower this upper bound by also taking into account that
3982 a range of insns moved across ACROSS must not leave a register
3983 live at the end that will be clobbered in ACROSS. We need to
3984 find a point where TEST_SET & LIVE == 0.
3985
3986 Insns in the MERGE range that set registers which are also set
3987 in the ACROSS range may still be moved as long as we also move
3988 later insns which use the results of the set, and make the
3989 register dead again. This is verified by the condition stated
3990 above. We only need to test it for registers that are set in
3991 the moved region.
3992
3993 MERGE_LIVE is provided by the caller and holds live registers after
3994 TO. */
3995 bitmap_copy (local_merge_live, merge_live);
3996 for (insn = to; insn != max_to; insn = PREV_INSN (insn))
3997 df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
3998
3999 /* We're not interested in registers that aren't set in the moved
4000 region at all. */
4001 bitmap_and_into (local_merge_live, merge_set);
4002 for (;;)
4003 {
4004 if (NONDEBUG_INSN_P (insn))
4005 {
4006 if (!bitmap_intersect_p (test_set, local_merge_live)
4007 #ifdef HAVE_cc0
4008 && !sets_cc0_p (insn)
4009 #endif
4010 )
4011 {
4012 max_to = insn;
4013 break;
4014 }
4015
4016 df_simulate_one_insn_backwards (merge_bb, insn,
4017 local_merge_live);
4018 }
4019 if (insn == from)
4020 {
4021 fail = 1;
4022 goto out;
4023 }
4024 insn = PREV_INSN (insn);
4025 }
4026
4027 if (max_to != to)
4028 fail = 1;
4029
4030 if (pmove_upto)
4031 *pmove_upto = max_to;
4032
4033 /* For small register class machines, don't lengthen lifetimes of
4034 hard registers before reload. */
4035 if (! reload_completed
4036 && targetm.small_register_classes_for_mode_p (VOIDmode))
4037 {
4038 EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
4039 {
4040 if (i < FIRST_PSEUDO_REGISTER
4041 && ! fixed_regs[i]
4042 && ! global_regs[i])
4043 fail = 1;
4044 }
4045 }
4046
4047 out:
4048 BITMAP_FREE (merge_set);
4049 BITMAP_FREE (merge_use);
4050 BITMAP_FREE (local_merge_live);
4051 BITMAP_FREE (test_set);
4052 BITMAP_FREE (test_use);
4053
4054 return !fail;
4055 }
4056
4057 \f
4058 /*----------------------------------------------------------------------------
4059 MULTIPLE DEFINITIONS
4060
4061 Find the locations in the function reached by multiple definition sites
4062 for a live pseudo. In and out bitvectors are built for each basic
4063 block. They are restricted for efficiency to live registers.
4064
4065 The gen and kill sets for the problem are obvious. Together they
4066 include all defined registers in a basic block; the gen set includes
4067 registers where a partial or conditional or may-clobber definition is
4068 last in the BB, while the kill set includes registers with a complete
4069 definition coming last. However, the computation of the dataflow
4070 itself is interesting.
4071
4072 The idea behind it comes from SSA form's iterated dominance frontier
4073 criterion for inserting PHI functions. Just like in that case, we can use
4074 the dominance frontier to find places where multiple definitions meet;
4075 a register X defined in a basic block BB1 has multiple definitions in
4076 basic blocks in BB1's dominance frontier.
4077
4078 So, the in-set of a basic block BB2 is not just the union of the
4079 out-sets of BB2's predecessors, but includes some more bits that come
4080 from the basic blocks of whose dominance frontier BB2 is part (BB1 in
4081 the previous paragraph). I called this set the init-set of BB2.
4082
4083 (Note: I actually use the kill-set only to build the init-set.
4084 gen bits are anyway propagated from BB1 to BB2 by dataflow).
4085
4086 For example, if you have
4087
4088 BB1 : r10 = 0
4089 r11 = 0
4090 if <...> goto BB2 else goto BB3;
4091
4092 BB2 : r10 = 1
4093 r12 = 1
4094 goto BB3;
4095
4096 BB3 :
4097
4098 you have BB3 in BB2's dominance frontier but not in BB1's, so that the
4099 init-set of BB3 includes r10 and r12, but not r11. Note that we do
4100 not need to iterate the dominance frontier, because we do not insert
4101 anything like PHI functions there! Instead, dataflow will take care of
4102 propagating the information to BB3's successors.
4103 ---------------------------------------------------------------------------*/
4104
4105 /* Private data used to verify the solution for this problem. */
4106 struct df_md_problem_data
4107 {
4108 /* An obstack for the bitmaps we need for this problem. */
4109 bitmap_obstack md_bitmaps;
4110 };
4111
4112 /* Scratch var used by transfer functions. This is used to do md analysis
4113 only for live registers. */
4114 static bitmap_head df_md_scratch;
4115
4116
4117 static void
4118 df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
4119 void *vbb_info)
4120 {
4121 struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
4122 if (bb_info)
4123 {
4124 bitmap_clear (&bb_info->kill);
4125 bitmap_clear (&bb_info->gen);
4126 bitmap_clear (&bb_info->init);
4127 bitmap_clear (&bb_info->in);
4128 bitmap_clear (&bb_info->out);
4129 }
4130 }
4131
4132
4133 /* Allocate or reset bitmaps for DF_MD. The solution bits are
4134 not touched unless the block is new. */
4135
4136 static void
4137 df_md_alloc (bitmap all_blocks)
4138 {
4139 unsigned int bb_index;
4140 bitmap_iterator bi;
4141 struct df_md_problem_data *problem_data;
4142
4143 df_grow_bb_info (df_md);
4144 if (df_md->problem_data)
4145 problem_data = (struct df_md_problem_data *) df_md->problem_data;
4146 else
4147 {
4148 problem_data = XNEW (struct df_md_problem_data);
4149 df_md->problem_data = problem_data;
4150 bitmap_obstack_initialize (&problem_data->md_bitmaps);
4151 }
4152 bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
4153
4154 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4155 {
4156 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4157 /* When bitmaps are already initialized, just clear them. */
4158 if (bb_info->init.obstack)
4159 {
4160 bitmap_clear (&bb_info->init);
4161 bitmap_clear (&bb_info->gen);
4162 bitmap_clear (&bb_info->kill);
4163 bitmap_clear (&bb_info->in);
4164 bitmap_clear (&bb_info->out);
4165 }
4166 else
4167 {
4168 bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
4169 bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
4170 bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
4171 bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
4172 bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
4173 }
4174 }
4175
4176 df_md->optional_p = true;
4177 }
4178
4179 /* Add the effect of the top artificial defs of BB to the multiple definitions
4180 bitmap LOCAL_MD. */
4181
4182 void
4183 df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
4184 {
4185 int bb_index = bb->index;
4186 df_ref *def_rec;
4187 for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
4188 {
4189 df_ref def = *def_rec;
4190 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
4191 {
4192 unsigned int dregno = DF_REF_REGNO (def);
4193 if (DF_REF_FLAGS (def)
4194 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4195 bitmap_set_bit (local_md, dregno);
4196 else
4197 bitmap_clear_bit (local_md, dregno);
4198 }
4199 }
4200 }
4201
4202
4203 /* Add the effect of the defs of INSN to the reaching definitions bitmap
4204 LOCAL_MD. */
4205
4206 void
4207 df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
4208 bitmap local_md)
4209 {
4210 unsigned uid = INSN_UID (insn);
4211 df_ref *def_rec;
4212
4213 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
4214 {
4215 df_ref def = *def_rec;
4216 unsigned int dregno = DF_REF_REGNO (def);
4217 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
4218 || (dregno >= FIRST_PSEUDO_REGISTER))
4219 {
4220 if (DF_REF_FLAGS (def)
4221 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4222 bitmap_set_bit (local_md, DF_REF_ID (def));
4223 else
4224 bitmap_clear_bit (local_md, DF_REF_ID (def));
4225 }
4226 }
4227 }
4228
4229 static void
4230 df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
4231 df_ref *def_rec,
4232 int top_flag)
4233 {
4234 df_ref def;
4235 bitmap_clear (&seen_in_insn);
4236
4237 while ((def = *def_rec++) != NULL)
4238 {
4239 unsigned int dregno = DF_REF_REGNO (def);
4240 if (((!(df->changeable_flags & DF_NO_HARD_REGS))
4241 || (dregno >= FIRST_PSEUDO_REGISTER))
4242 && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
4243 {
4244 if (!bitmap_bit_p (&seen_in_insn, dregno))
4245 {
4246 if (DF_REF_FLAGS (def)
4247 & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
4248 {
4249 bitmap_set_bit (&bb_info->gen, dregno);
4250 bitmap_clear_bit (&bb_info->kill, dregno);
4251 }
4252 else
4253 {
4254 /* When we find a clobber and a regular def,
4255 make sure the regular def wins. */
4256 bitmap_set_bit (&seen_in_insn, dregno);
4257 bitmap_set_bit (&bb_info->kill, dregno);
4258 bitmap_clear_bit (&bb_info->gen, dregno);
4259 }
4260 }
4261 }
4262 }
4263 }
4264
4265
4266 /* Compute local multiple def info for basic block BB. */
4267
4268 static void
4269 df_md_bb_local_compute (unsigned int bb_index)
4270 {
4271 basic_block bb = BASIC_BLOCK (bb_index);
4272 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4273 rtx insn;
4274
4275 /* Artificials are only hard regs. */
4276 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4277 df_md_bb_local_compute_process_def (bb_info,
4278 df_get_artificial_defs (bb_index),
4279 DF_REF_AT_TOP);
4280
4281 FOR_BB_INSNS (bb, insn)
4282 {
4283 unsigned int uid = INSN_UID (insn);
4284 if (!INSN_P (insn))
4285 continue;
4286
4287 df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
4288 }
4289
4290 if (!(df->changeable_flags & DF_NO_HARD_REGS))
4291 df_md_bb_local_compute_process_def (bb_info,
4292 df_get_artificial_defs (bb_index),
4293 0);
4294 }
4295
4296 /* Compute local reaching def info for each basic block within BLOCKS. */
4297
4298 static void
4299 df_md_local_compute (bitmap all_blocks)
4300 {
4301 unsigned int bb_index, df_bb_index;
4302 bitmap_iterator bi1, bi2;
4303 basic_block bb;
4304 bitmap_head *frontiers;
4305
4306 bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
4307
4308 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4309 {
4310 df_md_bb_local_compute (bb_index);
4311 }
4312
4313 bitmap_clear (&seen_in_insn);
4314
4315 frontiers = XNEWVEC (bitmap_head, last_basic_block);
4316 FOR_ALL_BB (bb)
4317 bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
4318
4319 compute_dominance_frontiers (frontiers);
4320
4321 /* Add each basic block's kills to the nodes in the frontier of the BB. */
4322 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
4323 {
4324 bitmap kill = &df_md_get_bb_info (bb_index)->kill;
4325 EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
4326 {
4327 basic_block bb = BASIC_BLOCK (df_bb_index);
4328 if (bitmap_bit_p (all_blocks, df_bb_index))
4329 bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
4330 df_get_live_in (bb));
4331 }
4332 }
4333
4334 FOR_ALL_BB (bb)
4335 bitmap_clear (&frontiers[bb->index]);
4336 free (frontiers);
4337 }
4338
4339
4340 /* Reset the global solution for recalculation. */
4341
4342 static void
4343 df_md_reset (bitmap all_blocks)
4344 {
4345 unsigned int bb_index;
4346 bitmap_iterator bi;
4347
4348 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4349 {
4350 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4351 gcc_assert (bb_info);
4352 bitmap_clear (&bb_info->in);
4353 bitmap_clear (&bb_info->out);
4354 }
4355 }
4356
4357 static bool
4358 df_md_transfer_function (int bb_index)
4359 {
4360 basic_block bb = BASIC_BLOCK (bb_index);
4361 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4362 bitmap in = &bb_info->in;
4363 bitmap out = &bb_info->out;
4364 bitmap gen = &bb_info->gen;
4365 bitmap kill = &bb_info->kill;
4366
4367 /* We need to use a scratch set here so that the value returned from this
4368 function invocation properly reflects whether the sets changed in a
4369 significant way; i.e. not just because the live set was anded in. */
4370 bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
4371
4372 /* Multiple definitions of a register are not relevant if it is not
4373 live. Thus we trim the result to the places where it is live. */
4374 bitmap_and_into (in, df_get_live_in (bb));
4375
4376 return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
4377 }
4378
4379 /* Initialize the solution bit vectors for problem. */
4380
4381 static void
4382 df_md_init (bitmap all_blocks)
4383 {
4384 unsigned int bb_index;
4385 bitmap_iterator bi;
4386
4387 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
4388 {
4389 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
4390
4391 bitmap_copy (&bb_info->in, &bb_info->init);
4392 df_md_transfer_function (bb_index);
4393 }
4394 }
4395
4396 static void
4397 df_md_confluence_0 (basic_block bb)
4398 {
4399 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4400 bitmap_copy (&bb_info->in, &bb_info->init);
4401 }
4402
4403 /* In of target gets or of out of source. */
4404
4405 static bool
4406 df_md_confluence_n (edge e)
4407 {
4408 bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
4409 bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
4410
4411 if (e->flags & EDGE_FAKE)
4412 return false;
4413
4414 if (e->flags & EDGE_EH)
4415 return bitmap_ior_and_compl_into (op1, op2,
4416 regs_invalidated_by_call_regset);
4417 else
4418 return bitmap_ior_into (op1, op2);
4419 }
4420
4421 /* Free all storage associated with the problem. */
4422
4423 static void
4424 df_md_free (void)
4425 {
4426 struct df_md_problem_data *problem_data
4427 = (struct df_md_problem_data *) df_md->problem_data;
4428
4429 bitmap_obstack_release (&problem_data->md_bitmaps);
4430 free (problem_data);
4431 df_md->problem_data = NULL;
4432
4433 df_md->block_info_size = 0;
4434 free (df_md->block_info);
4435 df_md->block_info = NULL;
4436 free (df_md);
4437 }
4438
4439
4440 /* Debugging info at top of bb. */
4441
4442 static void
4443 df_md_top_dump (basic_block bb, FILE *file)
4444 {
4445 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4446 if (!bb_info)
4447 return;
4448
4449 fprintf (file, ";; md in \t");
4450 df_print_regset (file, &bb_info->in);
4451 fprintf (file, ";; md init \t");
4452 df_print_regset (file, &bb_info->init);
4453 fprintf (file, ";; md gen \t");
4454 df_print_regset (file, &bb_info->gen);
4455 fprintf (file, ";; md kill \t");
4456 df_print_regset (file, &bb_info->kill);
4457 }
4458
4459 /* Debugging info at bottom of bb. */
4460
4461 static void
4462 df_md_bottom_dump (basic_block bb, FILE *file)
4463 {
4464 struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
4465 if (!bb_info)
4466 return;
4467
4468 fprintf (file, ";; md out \t");
4469 df_print_regset (file, &bb_info->out);
4470 }
4471
4472 static struct df_problem problem_MD =
4473 {
4474 DF_MD, /* Problem id. */
4475 DF_FORWARD, /* Direction. */
4476 df_md_alloc, /* Allocate the problem specific data. */
4477 df_md_reset, /* Reset global information. */
4478 df_md_free_bb_info, /* Free basic block info. */
4479 df_md_local_compute, /* Local compute function. */
4480 df_md_init, /* Init the solution specific data. */
4481 df_worklist_dataflow, /* Worklist solver. */
4482 df_md_confluence_0, /* Confluence operator 0. */
4483 df_md_confluence_n, /* Confluence operator n. */
4484 df_md_transfer_function, /* Transfer function. */
4485 NULL, /* Finalize function. */
4486 df_md_free, /* Free all of the problem information. */
4487 df_md_free, /* Remove this problem from the stack of dataflow problems. */
4488 NULL, /* Debugging. */
4489 df_md_top_dump, /* Debugging start block. */
4490 df_md_bottom_dump, /* Debugging end block. */
4491 NULL, /* Debugging start insn. */
4492 NULL, /* Debugging end insn. */
4493 NULL, /* Incremental solution verify start. */
4494 NULL, /* Incremental solution verify end. */
4495 NULL, /* Dependent problem. */
4496 sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
4497 TV_DF_MD, /* Timing variable. */
4498 false /* Reset blocks on dropping out of blocks_to_analyze. */
4499 };
4500
4501 /* Create a new MD instance and add it to the existing instance
4502 of DF. */
4503
4504 void
4505 df_md_add_problem (void)
4506 {
4507 df_add_problem (&problem_MD);
4508 }
4509
4510
4511