usage.adb: Change "pragma inline" to "pragma Inline" in information and error messages
[gcc.git] / gcc / df.c
1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
5 mhayes@redhat.com)
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA.
23
24
25 OVERVIEW:
26
27 This file provides some dataflow routines for computing reaching defs,
28 upward exposed uses, live variables, def-use chains, and use-def
29 chains. The global dataflow is performed using simple iterative
30 methods with a worklist and could be sped up by ordering the blocks
31 with a depth first search order.
32
33 A `struct ref' data structure (ref) is allocated for every register
34 reference (def or use) and this records the insn and bb the ref is
35 found within. The refs are linked together in chains of uses and defs
36 for each insn and for each register. Each ref also has a chain field
37 that links all the use refs for a def or all the def refs for a use.
38 This is used to create use-def or def-use chains.
39
40
41 USAGE:
42
43 Here's an example of using the dataflow routines.
44
45 struct df *df;
46
47 df = df_init ();
48
49 df_analyze (df, 0, DF_ALL);
50
51 df_dump (df, DF_ALL, stderr);
52
53 df_finish (df);
54
55
56 df_init simply creates a poor man's object (df) that needs to be
57 passed to all the dataflow routines. df_finish destroys this
58 object and frees up any allocated memory. DF_ALL says to analyze
59 everything.
60
61 df_analyze performs the following:
62
63 1. Records defs and uses by scanning the insns in each basic block
64 or by scanning the insns queued by df_insn_modify.
65 2. Links defs and uses into insn-def and insn-use chains.
66 3. Links defs and uses into reg-def and reg-use chains.
67 4. Assigns LUIDs to each insn (for modified blocks).
68 5. Calculates local reaching definitions.
69 6. Calculates global reaching definitions.
70 7. Creates use-def chains.
71 8. Calculates local reaching uses (upwards exposed uses).
72 9. Calculates global reaching uses.
73 10. Creates def-use chains.
74 11. Calculates local live registers.
75 12. Calculates global live registers.
76 13. Calculates register lifetimes and determines local registers.
77
78
79 PHILOSOPHY:
80
81 Note that the dataflow information is not updated for every newly
82 deleted or created insn. If the dataflow information requires
83 updating then all the changed, new, or deleted insns needs to be
84 marked with df_insn_modify (or df_insns_modify) either directly or
85 indirectly (say through calling df_insn_delete). df_insn_modify
86 marks all the modified insns to get processed the next time df_analyze
87 is called.
88
89 Beware that tinkering with insns may invalidate the dataflow information.
90 The philosophy behind these routines is that once the dataflow
91 information has been gathered, the user should store what they require
92 before they tinker with any insn. Once a reg is replaced, for example,
93 then the reg-def/reg-use chains will point to the wrong place. Once a
94 whole lot of changes have been made, df_analyze can be called again
95 to update the dataflow information. Currently, this is not very smart
96 with regard to propagating changes to the dataflow so it should not
97 be called very often.
98
99
100 DATA STRUCTURES:
101
102 The basic object is a REF (reference) and this may either be a DEF
103 (definition) or a USE of a register.
104
105 These are linked into a variety of lists; namely reg-def, reg-use,
106 insn-def, insn-use, def-use, and use-def lists. For example,
107 the reg-def lists contain all the refs that define a given register
108 while the insn-use lists contain all the refs used by an insn.
109
110 Note that the reg-def and reg-use chains are generally short (except for the
111 hard registers) and thus it is much faster to search these chains
112 rather than searching the def or use bitmaps.
113
114 If the insns are in SSA form then the reg-def and use-def lists
115 should only contain the single defining ref.
116
117
118 TODO:
119
120 1) Incremental dataflow analysis.
121
122 Note that if a loop invariant insn is hoisted (or sunk), we do not
123 need to change the def-use or use-def chains. All we have to do is to
124 change the bb field for all the associated defs and uses and to
125 renumber the LUIDs for the original and new basic blocks of the insn.
126
127 When shadowing loop mems we create new uses and defs for new pseudos
128 so we do not affect the existing dataflow information.
129
130 My current strategy is to queue up all modified, created, or deleted
131 insns so when df_analyze is called we can easily determine all the new
132 or deleted refs. Currently the global dataflow information is
133 recomputed from scratch but this could be propagated more efficiently.
134
135 2) Reduced memory requirements.
136
137 We could operate a pool of ref structures. When a ref is deleted it
138 gets returned to the pool (say by linking on to a chain of free refs).
139 This will require a pair of bitmaps for defs and uses so that we can
140 tell which ones have been changed. Alternatively, we could
141 periodically squeeze the def and use tables and associated bitmaps and
142 renumber the def and use ids.
143
144 3) Ordering of reg-def and reg-use lists.
145
146 Should the first entry in the def list be the first def (within a BB)?
147 Similarly, should the first entry in the use list be the last use
148 (within a BB)?
149
150 4) Working with a sub-CFG.
151
152 Often the whole CFG does not need to be analyzed, for example,
153 when optimizing a loop, only certain registers are of interest.
154 Perhaps there should be a bitmap argument to df_analyze to specify
155 which registers should be analyzed?
156
157
158 NOTES:
159
160 Embedded addressing side-effects, such as POST_INC or PRE_INC, generate
161 both a use and a def. These are both marked read/write to show that they
162 are dependent. For example, (set (reg 40) (mem (post_inc (reg 42))))
163 will generate a use of reg 42 followed by a def of reg 42 (both marked
164 read/write). Similarly, (set (reg 40) (mem (pre_dec (reg 41))))
165 generates a use of reg 41 then a def of reg 41 (both marked read/write),
166 even though reg 41 is decremented before it is used for the memory
167 address in this second example.
168
169 A set to a REG inside a ZERO_EXTRACT, SIGN_EXTRACT, or SUBREG invokes
170 a read-modify write operation. We generate both a use and a def
171 and again mark them read/write.
172 */
173
174 #include "config.h"
175 #include "system.h"
176 #include "coretypes.h"
177 #include "tm.h"
178 #include "rtl.h"
179 #include "tm_p.h"
180 #include "insn-config.h"
181 #include "recog.h"
182 #include "function.h"
183 #include "regs.h"
184 #include "alloc-pool.h"
185 #include "hard-reg-set.h"
186 #include "basic-block.h"
187 #include "sbitmap.h"
188 #include "bitmap.h"
189 #include "df.h"
190
191 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
192 do \
193 { \
194 unsigned int node_; \
195 bitmap_iterator bi; \
196 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, bi) \
197 { \
198 (BB) = BASIC_BLOCK (node_); \
199 CODE; \
200 } \
201 } \
202 while (0)
203
204 static alloc_pool df_ref_pool;
205 static alloc_pool df_link_pool;
206 static struct df *ddf;
207
208 static void df_reg_table_realloc (struct df *, int);
209 static void df_insn_table_realloc (struct df *, unsigned int);
210 static void df_bb_table_realloc (struct df *, unsigned int);
211 static void df_bitmaps_alloc (struct df *, bitmap, int);
212 static void df_bitmaps_free (struct df *, int);
213 static void df_free (struct df *);
214 static void df_alloc (struct df *, int);
215
216 static rtx df_reg_use_gen (unsigned int);
217
218 static inline struct df_link *df_link_create (struct ref *, struct df_link *);
219 static struct df_link *df_ref_unlink (struct df_link **, struct ref *);
220 static void df_def_unlink (struct df *, struct ref *);
221 static void df_use_unlink (struct df *, struct ref *);
222 static void df_insn_refs_unlink (struct df *, basic_block, rtx);
223 #if 0
224 static void df_bb_refs_unlink (struct df *, basic_block);
225 static void df_refs_unlink (struct df *, bitmap);
226 #endif
227
228 static struct ref *df_ref_create (struct df *, rtx, rtx *, rtx,
229 enum df_ref_type, enum df_ref_flags);
230 static void df_ref_record_1 (struct df *, rtx, rtx *, rtx, enum df_ref_type,
231 enum df_ref_flags);
232 static void df_ref_record (struct df *, rtx, rtx *, rtx, enum df_ref_type,
233 enum df_ref_flags);
234 static void df_def_record_1 (struct df *, rtx, basic_block, rtx);
235 static void df_defs_record (struct df *, rtx, basic_block, rtx);
236 static void df_uses_record (struct df *, rtx *, enum df_ref_type,
237 basic_block, rtx, enum df_ref_flags);
238 static void df_insn_refs_record (struct df *, basic_block, rtx);
239 static void df_bb_refs_record (struct df *, basic_block);
240 static void df_refs_record (struct df *, bitmap);
241
242 static void df_bb_reg_def_chain_create (struct df *, basic_block);
243 static void df_reg_def_chain_create (struct df *, bitmap, bool);
244 static void df_bb_reg_use_chain_create (struct df *, basic_block);
245 static void df_reg_use_chain_create (struct df *, bitmap, bool);
246 static void df_bb_du_chain_create (struct df *, basic_block, bitmap);
247 static void df_du_chain_create (struct df *, bitmap);
248 static void df_bb_ud_chain_create (struct df *, basic_block);
249 static void df_ud_chain_create (struct df *, bitmap);
250 static void df_bb_rd_local_compute (struct df *, basic_block, bitmap);
251 static void df_rd_local_compute (struct df *, bitmap);
252 static void df_bb_ru_local_compute (struct df *, basic_block);
253 static void df_ru_local_compute (struct df *, bitmap);
254 static void df_bb_lr_local_compute (struct df *, basic_block);
255 static void df_lr_local_compute (struct df *, bitmap);
256 static void df_bb_reg_info_compute (struct df *, basic_block, bitmap);
257 static void df_reg_info_compute (struct df *, bitmap);
258
259 static int df_bb_luids_set (struct df *df, basic_block);
260 static int df_luids_set (struct df *df, bitmap);
261
262 static int df_modified_p (struct df *, bitmap);
263 static int df_refs_queue (struct df *);
264 static int df_refs_process (struct df *);
265 static int df_bb_refs_update (struct df *, basic_block);
266 static int df_refs_update (struct df *, bitmap);
267 static void df_analyze_1 (struct df *, bitmap, int, int);
268
269 static void df_insns_modify (struct df *, basic_block, rtx, rtx);
270 static int df_rtx_mem_replace (rtx *, void *);
271 static int df_rtx_reg_replace (rtx *, void *);
272 void df_refs_reg_replace (struct df *, bitmap, struct df_link *, rtx, rtx);
273
274 static int df_def_dominates_all_uses_p (struct df *, struct ref *def);
275 static int df_def_dominates_uses_p (struct df *, struct ref *def, bitmap);
276 static struct ref *df_bb_insn_regno_last_use_find (struct df *, basic_block,
277 rtx, unsigned int);
278 static struct ref *df_bb_insn_regno_first_def_find (struct df *, basic_block,
279 rtx, unsigned int);
280
281 static void df_chain_dump (struct df_link *, FILE *file);
282 static void df_chain_dump_regno (struct df_link *, FILE *file);
283 static void df_regno_debug (struct df *, unsigned int, FILE *);
284 static void df_ref_debug (struct df *, struct ref *, FILE *);
285 static void df_rd_transfer_function (int, int *, void *, void *, void *,
286 void *, void *);
287 static void df_ru_transfer_function (int, int *, void *, void *, void *,
288 void *, void *);
289 static void df_lr_transfer_function (int, int *, void *, void *, void *,
290 void *, void *);
291 static void hybrid_search (basic_block, struct dataflow *,
292 sbitmap, sbitmap, sbitmap);
293
294 \f
295 /* Local memory allocation/deallocation routines. */
296
297
298 /* Increase the insn info table to have space for at least SIZE + 1
299 elements. */
300 static void
301 df_insn_table_realloc (struct df *df, unsigned int size)
302 {
303 size++;
304 if (size <= df->insn_size)
305 return;
306
307 /* Make the table a little larger than requested, so we do not need
308 to enlarge it so often. */
309 size += df->insn_size / 4;
310
311 df->insns = xrealloc (df->insns, size * sizeof (struct insn_info));
312
313 memset (df->insns + df->insn_size, 0,
314 (size - df->insn_size) * sizeof (struct insn_info));
315
316 df->insn_size = size;
317
318 if (! df->insns_modified)
319 {
320 df->insns_modified = BITMAP_XMALLOC ();
321 bitmap_zero (df->insns_modified);
322 }
323 }
324
325 /* Increase the bb info table to have space for at least SIZE + 1
326 elements. */
327
328 static void
329 df_bb_table_realloc (struct df *df, unsigned int size)
330 {
331 size++;
332 if (size <= df->n_bbs)
333 return;
334
335 /* Make the table a little larger than requested, so we do not need
336 to enlarge it so often. */
337 size += df->n_bbs / 4;
338
339 df->bbs = xrealloc (df->bbs, size * sizeof (struct bb_info));
340
341 memset (df->bbs + df->n_bbs, 0, (size - df->n_bbs) * sizeof (struct bb_info));
342
343 df->n_bbs = size;
344 }
345
346 /* Increase the reg info table by SIZE more elements. */
347 static void
348 df_reg_table_realloc (struct df *df, int size)
349 {
350 /* Make table 25 percent larger by default. */
351 if (! size)
352 size = df->reg_size / 4;
353
354 size += df->reg_size;
355 if (size < max_reg_num ())
356 size = max_reg_num ();
357
358 df->regs = xrealloc (df->regs, size * sizeof (struct reg_info));
359 df->reg_def_last = xrealloc (df->reg_def_last,
360 size * sizeof (struct ref *));
361
362 /* Zero the new entries. */
363 memset (df->regs + df->reg_size, 0,
364 (size - df->reg_size) * sizeof (struct reg_info));
365
366 df->reg_size = size;
367 }
368
369
370 /* Allocate bitmaps for each basic block. */
371
372 static void
373 df_bitmaps_alloc (struct df *df, bitmap blocks, int flags)
374 {
375 basic_block bb;
376
377 df->n_defs = df->def_id;
378 df->n_uses = df->use_id;
379
380 if (!blocks)
381 blocks = df->all_blocks;
382
383 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
384 {
385 struct bb_info *bb_info = DF_BB_INFO (df, bb);
386
387 if (flags & DF_RD)
388 {
389 if (!bb_info->rd_in)
390 {
391 /* Allocate bitmaps for reaching definitions. */
392 bb_info->rd_kill = BITMAP_XMALLOC ();
393 bb_info->rd_gen = BITMAP_XMALLOC ();
394 bb_info->rd_in = BITMAP_XMALLOC ();
395 bb_info->rd_out = BITMAP_XMALLOC ();
396 }
397 else
398 {
399 bitmap_clear (bb_info->rd_kill);
400 bitmap_clear (bb_info->rd_gen);
401 bitmap_clear (bb_info->rd_in);
402 bitmap_clear (bb_info->rd_out);
403 }
404 }
405
406 if (flags & DF_RU)
407 {
408 if (!bb_info->ru_in)
409 {
410 /* Allocate bitmaps for upward exposed uses. */
411 bb_info->ru_kill = BITMAP_XMALLOC ();
412 bb_info->ru_gen = BITMAP_XMALLOC ();
413 bb_info->ru_in = BITMAP_XMALLOC ();
414 bb_info->ru_out = BITMAP_XMALLOC ();
415 }
416 else
417 {
418 bitmap_clear (bb_info->ru_kill);
419 bitmap_clear (bb_info->ru_gen);
420 bitmap_clear (bb_info->ru_in);
421 bitmap_clear (bb_info->ru_out);
422 }
423 }
424
425 if (flags & DF_LR)
426 {
427 if (!bb_info->lr_in)
428 {
429 /* Allocate bitmaps for live variables. */
430 bb_info->lr_def = BITMAP_XMALLOC ();
431 bb_info->lr_use = BITMAP_XMALLOC ();
432 bb_info->lr_in = BITMAP_XMALLOC ();
433 bb_info->lr_out = BITMAP_XMALLOC ();
434 }
435 else
436 {
437 bitmap_clear (bb_info->lr_def);
438 bitmap_clear (bb_info->lr_use);
439 bitmap_clear (bb_info->lr_in);
440 bitmap_clear (bb_info->lr_out);
441 }
442 }
443 });
444 }
445
446
447 /* Free bitmaps for each basic block. */
448 static void
449 df_bitmaps_free (struct df *df, int flags)
450 {
451 basic_block bb;
452
453 FOR_EACH_BB (bb)
454 {
455 struct bb_info *bb_info = DF_BB_INFO (df, bb);
456
457 if (!bb_info)
458 continue;
459
460 if ((flags & DF_RD) && bb_info->rd_in)
461 {
462 /* Free bitmaps for reaching definitions. */
463 BITMAP_XFREE (bb_info->rd_kill);
464 bb_info->rd_kill = NULL;
465 BITMAP_XFREE (bb_info->rd_gen);
466 bb_info->rd_gen = NULL;
467 BITMAP_XFREE (bb_info->rd_in);
468 bb_info->rd_in = NULL;
469 BITMAP_XFREE (bb_info->rd_out);
470 bb_info->rd_out = NULL;
471 }
472
473 if ((flags & DF_RU) && bb_info->ru_in)
474 {
475 /* Free bitmaps for upward exposed uses. */
476 BITMAP_XFREE (bb_info->ru_kill);
477 bb_info->ru_kill = NULL;
478 BITMAP_XFREE (bb_info->ru_gen);
479 bb_info->ru_gen = NULL;
480 BITMAP_XFREE (bb_info->ru_in);
481 bb_info->ru_in = NULL;
482 BITMAP_XFREE (bb_info->ru_out);
483 bb_info->ru_out = NULL;
484 }
485
486 if ((flags & DF_LR) && bb_info->lr_in)
487 {
488 /* Free bitmaps for live variables. */
489 BITMAP_XFREE (bb_info->lr_def);
490 bb_info->lr_def = NULL;
491 BITMAP_XFREE (bb_info->lr_use);
492 bb_info->lr_use = NULL;
493 BITMAP_XFREE (bb_info->lr_in);
494 bb_info->lr_in = NULL;
495 BITMAP_XFREE (bb_info->lr_out);
496 bb_info->lr_out = NULL;
497 }
498 }
499 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
500 }
501
502
503 /* Allocate and initialize dataflow memory. */
504 static void
505 df_alloc (struct df *df, int n_regs)
506 {
507 int n_insns;
508 basic_block bb;
509
510 df_link_pool = create_alloc_pool ("df_link pool", sizeof (struct df_link),
511 100);
512 df_ref_pool = create_alloc_pool ("df_ref pool", sizeof (struct ref), 100);
513
514 /* Perhaps we should use LUIDs to save memory for the insn_refs
515 table. This is only a small saving; a few pointers. */
516 n_insns = get_max_uid () + 1;
517
518 df->def_id = 0;
519 df->n_defs = 0;
520 /* Approximate number of defs by number of insns. */
521 df->def_size = n_insns;
522 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
523
524 df->use_id = 0;
525 df->n_uses = 0;
526 /* Approximate number of uses by twice number of insns. */
527 df->use_size = n_insns * 2;
528 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
529
530 df->n_regs = n_regs;
531 df->n_bbs = last_basic_block;
532
533 /* Allocate temporary working array used during local dataflow analysis. */
534 df_insn_table_realloc (df, n_insns);
535
536 df_reg_table_realloc (df, df->n_regs);
537
538 df->bbs_modified = BITMAP_XMALLOC ();
539 bitmap_zero (df->bbs_modified);
540
541 df->flags = 0;
542
543 df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
544
545 df->all_blocks = BITMAP_XMALLOC ();
546 FOR_EACH_BB (bb)
547 bitmap_set_bit (df->all_blocks, bb->index);
548 }
549
550
551 /* Free all the dataflow info. */
552 static void
553 df_free (struct df *df)
554 {
555 df_bitmaps_free (df, DF_ALL);
556
557 if (df->bbs)
558 free (df->bbs);
559 df->bbs = 0;
560
561 if (df->insns)
562 free (df->insns);
563 df->insns = 0;
564 df->insn_size = 0;
565
566 if (df->defs)
567 free (df->defs);
568 df->defs = 0;
569 df->def_size = 0;
570 df->def_id = 0;
571
572 if (df->uses)
573 free (df->uses);
574 df->uses = 0;
575 df->use_size = 0;
576 df->use_id = 0;
577
578 if (df->regs)
579 free (df->regs);
580 df->regs = 0;
581 df->reg_size = 0;
582
583 if (df->bbs_modified)
584 BITMAP_XFREE (df->bbs_modified);
585 df->bbs_modified = 0;
586
587 if (df->insns_modified)
588 BITMAP_XFREE (df->insns_modified);
589 df->insns_modified = 0;
590
591 BITMAP_XFREE (df->all_blocks);
592 df->all_blocks = 0;
593
594 free_alloc_pool (df_ref_pool);
595 free_alloc_pool (df_link_pool);
596 }
597 \f
598 /* Local miscellaneous routines. */
599
600 /* Return a USE for register REGNO. */
601 static rtx df_reg_use_gen (unsigned int regno)
602 {
603 rtx reg;
604 rtx use;
605
606 reg = regno_reg_rtx[regno];
607
608 use = gen_rtx_USE (GET_MODE (reg), reg);
609 return use;
610 }
611 \f
612 /* Local chain manipulation routines. */
613
614 /* Create a link in a def-use or use-def chain. */
615 static inline struct df_link *
616 df_link_create (struct ref *ref, struct df_link *next)
617 {
618 struct df_link *link;
619
620 link = pool_alloc (df_link_pool);
621 link->next = next;
622 link->ref = ref;
623 return link;
624 }
625
626 /* Releases members of the CHAIN. */
627
628 static void
629 free_reg_ref_chain (struct df_link **chain)
630 {
631 struct df_link *act, *next;
632
633 for (act = *chain; act; act = next)
634 {
635 next = act->next;
636 pool_free (df_link_pool, act);
637 }
638
639 *chain = NULL;
640 }
641
642 /* Add REF to chain head pointed to by PHEAD. */
643 static struct df_link *
644 df_ref_unlink (struct df_link **phead, struct ref *ref)
645 {
646 struct df_link *link = *phead;
647
648 if (link)
649 {
650 if (! link->next)
651 {
652 /* Only a single ref. It must be the one we want.
653 If not, the def-use and use-def chains are likely to
654 be inconsistent. */
655 gcc_assert (link->ref == ref);
656
657 /* Now have an empty chain. */
658 *phead = NULL;
659 }
660 else
661 {
662 /* Multiple refs. One of them must be us. */
663 if (link->ref == ref)
664 *phead = link->next;
665 else
666 {
667 /* Follow chain. */
668 for (; link->next; link = link->next)
669 {
670 if (link->next->ref == ref)
671 {
672 /* Unlink from list. */
673 link->next = link->next->next;
674 return link->next;
675 }
676 }
677 }
678 }
679 }
680 return link;
681 }
682
683
684 /* Unlink REF from all def-use/use-def chains, etc. */
685 int
686 df_ref_remove (struct df *df, struct ref *ref)
687 {
688 if (DF_REF_REG_DEF_P (ref))
689 {
690 df_def_unlink (df, ref);
691 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
692 }
693 else
694 {
695 df_use_unlink (df, ref);
696 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
697 }
698 return 1;
699 }
700
701
702 /* Unlink DEF from use-def and reg-def chains. */
703 static void
704 df_def_unlink (struct df *df ATTRIBUTE_UNUSED, struct ref *def)
705 {
706 struct df_link *du_link;
707 unsigned int dregno = DF_REF_REGNO (def);
708
709 /* Follow def-use chain to find all the uses of this def. */
710 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
711 {
712 struct ref *use = du_link->ref;
713
714 /* Unlink this def from the use-def chain. */
715 df_ref_unlink (&DF_REF_CHAIN (use), def);
716 }
717 DF_REF_CHAIN (def) = 0;
718
719 /* Unlink def from reg-def chain. */
720 df_ref_unlink (&df->regs[dregno].defs, def);
721
722 df->defs[DF_REF_ID (def)] = 0;
723 }
724
725
726 /* Unlink use from def-use and reg-use chains. */
727 static void
728 df_use_unlink (struct df *df ATTRIBUTE_UNUSED, struct ref *use)
729 {
730 struct df_link *ud_link;
731 unsigned int uregno = DF_REF_REGNO (use);
732
733 /* Follow use-def chain to find all the defs of this use. */
734 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
735 {
736 struct ref *def = ud_link->ref;
737
738 /* Unlink this use from the def-use chain. */
739 df_ref_unlink (&DF_REF_CHAIN (def), use);
740 }
741 DF_REF_CHAIN (use) = 0;
742
743 /* Unlink use from reg-use chain. */
744 df_ref_unlink (&df->regs[uregno].uses, use);
745
746 df->uses[DF_REF_ID (use)] = 0;
747 }
748 \f
749 /* Local routines for recording refs. */
750
751
752 /* Create a new ref of type DF_REF_TYPE for register REG at address
753 LOC within INSN of BB. */
754 static struct ref *
755 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
756 enum df_ref_type ref_type, enum df_ref_flags ref_flags)
757 {
758 struct ref *this_ref;
759
760 this_ref = pool_alloc (df_ref_pool);
761 DF_REF_REG (this_ref) = reg;
762 DF_REF_LOC (this_ref) = loc;
763 DF_REF_INSN (this_ref) = insn;
764 DF_REF_CHAIN (this_ref) = 0;
765 DF_REF_TYPE (this_ref) = ref_type;
766 DF_REF_FLAGS (this_ref) = ref_flags;
767 DF_REF_DATA (this_ref) = NULL;
768
769 if (ref_type == DF_REF_REG_DEF)
770 {
771 if (df->def_id >= df->def_size)
772 {
773 /* Make table 25 percent larger. */
774 df->def_size += (df->def_size / 4);
775 df->defs = xrealloc (df->defs,
776 df->def_size * sizeof (*df->defs));
777 }
778 DF_REF_ID (this_ref) = df->def_id;
779 df->defs[df->def_id++] = this_ref;
780 }
781 else
782 {
783 if (df->use_id >= df->use_size)
784 {
785 /* Make table 25 percent larger. */
786 df->use_size += (df->use_size / 4);
787 df->uses = xrealloc (df->uses,
788 df->use_size * sizeof (*df->uses));
789 }
790 DF_REF_ID (this_ref) = df->use_id;
791 df->uses[df->use_id++] = this_ref;
792 }
793 return this_ref;
794 }
795
796
797 /* Create a new reference of type DF_REF_TYPE for a single register REG,
798 used inside the LOC rtx of INSN. */
799 static void
800 df_ref_record_1 (struct df *df, rtx reg, rtx *loc, rtx insn,
801 enum df_ref_type ref_type, enum df_ref_flags ref_flags)
802 {
803 df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
804 }
805
806
807 /* Create new references of type DF_REF_TYPE for each part of register REG
808 at address LOC within INSN of BB. */
809 static void
810 df_ref_record (struct df *df, rtx reg, rtx *loc, rtx insn,
811 enum df_ref_type ref_type, enum df_ref_flags ref_flags)
812 {
813 unsigned int regno;
814
815 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
816
817 /* For the reg allocator we are interested in some SUBREG rtx's, but not
818 all. Notably only those representing a word extraction from a multi-word
819 reg. As written in the docu those should have the form
820 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
821 XXX Is that true? We could also use the global word_mode variable. */
822 if (GET_CODE (reg) == SUBREG
823 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
824 || GET_MODE_SIZE (GET_MODE (reg))
825 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
826 {
827 loc = &SUBREG_REG (reg);
828 reg = *loc;
829 ref_flags |= DF_REF_STRIPPED;
830 }
831
832 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
833 if (regno < FIRST_PSEUDO_REGISTER)
834 {
835 int i;
836 int endregno;
837
838 if (! (df->flags & DF_HARD_REGS))
839 return;
840
841 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
842 for the mode, because we only want to add references to regs, which
843 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
844 reference the whole reg 0 in DI mode (which would also include
845 reg 1, at least, if 0 and 1 are SImode registers). */
846 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
847 if (GET_CODE (reg) == SUBREG)
848 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
849 SUBREG_BYTE (reg), GET_MODE (reg));
850 endregno += regno;
851
852 for (i = regno; i < endregno; i++)
853 df_ref_record_1 (df, regno_reg_rtx[i],
854 loc, insn, ref_type, ref_flags);
855 }
856 else
857 {
858 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
859 }
860 }
861
862
863 /* Return nonzero if writes to paradoxical SUBREGs, or SUBREGs which
864 are too narrow, are read-modify-write. */
865 bool
866 read_modify_subreg_p (rtx x)
867 {
868 unsigned int isize, osize;
869 if (GET_CODE (x) != SUBREG)
870 return false;
871 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
872 osize = GET_MODE_SIZE (GET_MODE (x));
873 /* Paradoxical subreg writes don't leave a trace of the old content. */
874 return (isize > osize && isize > UNITS_PER_WORD);
875 }
876
877
878 /* Process all the registers defined in the rtx, X. */
879 static void
880 df_def_record_1 (struct df *df, rtx x, basic_block bb, rtx insn)
881 {
882 rtx *loc;
883 rtx dst;
884 enum df_ref_flags flags = 0;
885
886 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
887 construct. */
888 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
889 loc = &XEXP (x, 0);
890 else
891 loc = &SET_DEST (x);
892 dst = *loc;
893
894 /* Some targets place small structures in registers for
895 return values of functions. */
896 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
897 {
898 int i;
899
900 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
901 {
902 rtx temp = XVECEXP (dst, 0, i);
903 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
904 || GET_CODE (temp) == SET)
905 df_def_record_1 (df, temp, bb, insn);
906 }
907 return;
908 }
909
910 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
911 be handy for the reg allocator. */
912 while (GET_CODE (dst) == STRICT_LOW_PART
913 || GET_CODE (dst) == ZERO_EXTRACT
914 || GET_CODE (dst) == SIGN_EXTRACT
915 || ((df->flags & DF_FOR_REGALLOC) == 0
916 && read_modify_subreg_p (dst)))
917 {
918 /* Strict low part always contains SUBREG, but we do not want to make
919 it appear outside, as whole register is always considered. */
920 if (GET_CODE (dst) == STRICT_LOW_PART)
921 {
922 loc = &XEXP (dst, 0);
923 dst = *loc;
924 }
925 loc = &XEXP (dst, 0);
926 dst = *loc;
927 flags |= DF_REF_READ_WRITE;
928 }
929
930 if (REG_P (dst)
931 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
932 df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
933 }
934
935
936 /* Process all the registers defined in the pattern rtx, X. */
937 static void
938 df_defs_record (struct df *df, rtx x, basic_block bb, rtx insn)
939 {
940 RTX_CODE code = GET_CODE (x);
941
942 if (code == SET || code == CLOBBER)
943 {
944 /* Mark the single def within the pattern. */
945 df_def_record_1 (df, x, bb, insn);
946 }
947 else if (code == PARALLEL)
948 {
949 int i;
950
951 /* Mark the multiple defs within the pattern. */
952 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
953 {
954 code = GET_CODE (XVECEXP (x, 0, i));
955 if (code == SET || code == CLOBBER)
956 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
957 }
958 }
959 }
960
961
962 /* Process all the registers used in the rtx at address LOC. */
963 static void
964 df_uses_record (struct df *df, rtx *loc, enum df_ref_type ref_type,
965 basic_block bb, rtx insn, enum df_ref_flags flags)
966 {
967 RTX_CODE code;
968 rtx x;
969 retry:
970 x = *loc;
971 if (!x)
972 return;
973 code = GET_CODE (x);
974 switch (code)
975 {
976 case LABEL_REF:
977 case SYMBOL_REF:
978 case CONST_INT:
979 case CONST:
980 case CONST_DOUBLE:
981 case CONST_VECTOR:
982 case PC:
983 case CC0:
984 case ADDR_VEC:
985 case ADDR_DIFF_VEC:
986 return;
987
988 case CLOBBER:
989 /* If we are clobbering a MEM, mark any registers inside the address
990 as being used. */
991 if (MEM_P (XEXP (x, 0)))
992 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
993 DF_REF_REG_MEM_STORE, bb, insn, flags);
994
995 /* If we're clobbering a REG then we have a def so ignore. */
996 return;
997
998 case MEM:
999 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, 0);
1000 return;
1001
1002 case SUBREG:
1003 /* While we're here, optimize this case. */
1004
1005 /* In case the SUBREG is not of a REG, do not optimize. */
1006 if (!REG_P (SUBREG_REG (x)))
1007 {
1008 loc = &SUBREG_REG (x);
1009 df_uses_record (df, loc, ref_type, bb, insn, flags);
1010 return;
1011 }
1012 /* ... Fall through ... */
1013
1014 case REG:
1015 df_ref_record (df, x, loc, insn, ref_type, flags);
1016 return;
1017
1018 case SET:
1019 {
1020 rtx dst = SET_DEST (x);
1021
1022 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1023
1024 switch (GET_CODE (dst))
1025 {
1026 case SUBREG:
1027 if ((df->flags & DF_FOR_REGALLOC) == 0
1028 && read_modify_subreg_p (dst))
1029 {
1030 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1031 insn, DF_REF_READ_WRITE);
1032 break;
1033 }
1034 /* Fall through. */
1035 case REG:
1036 case PARALLEL:
1037 case PC:
1038 case CC0:
1039 break;
1040 case MEM:
1041 df_uses_record (df, &XEXP (dst, 0),
1042 DF_REF_REG_MEM_STORE,
1043 bb, insn, 0);
1044 break;
1045 case STRICT_LOW_PART:
1046 /* A strict_low_part uses the whole REG and not just the
1047 SUBREG. */
1048 dst = XEXP (dst, 0);
1049 gcc_assert (GET_CODE (dst) == SUBREG);
1050 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1051 insn, DF_REF_READ_WRITE);
1052 break;
1053 case ZERO_EXTRACT:
1054 case SIGN_EXTRACT:
1055 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1056 DF_REF_READ_WRITE);
1057 df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1058 df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1059 dst = XEXP (dst, 0);
1060 break;
1061 default:
1062 gcc_unreachable ();
1063 }
1064 return;
1065 }
1066
1067 case RETURN:
1068 break;
1069
1070 case ASM_OPERANDS:
1071 case UNSPEC_VOLATILE:
1072 case TRAP_IF:
1073 case ASM_INPUT:
1074 {
1075 /* Traditional and volatile asm instructions must be considered to use
1076 and clobber all hard registers, all pseudo-registers and all of
1077 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1078
1079 Consider for instance a volatile asm that changes the fpu rounding
1080 mode. An insn should not be moved across this even if it only uses
1081 pseudo-regs because it might give an incorrectly rounded result.
1082
1083 For now, just mark any regs we can find in ASM_OPERANDS as
1084 used. */
1085
1086 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1087 We can not just fall through here since then we would be confused
1088 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1089 traditional asms unlike their normal usage. */
1090 if (code == ASM_OPERANDS)
1091 {
1092 int j;
1093
1094 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1095 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1096 DF_REF_REG_USE, bb, insn, 0);
1097 return;
1098 }
1099 break;
1100 }
1101
1102 case PRE_DEC:
1103 case POST_DEC:
1104 case PRE_INC:
1105 case POST_INC:
1106 case PRE_MODIFY:
1107 case POST_MODIFY:
1108 /* Catch the def of the register being modified. */
1109 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1110
1111 /* ... Fall through to handle uses ... */
1112
1113 default:
1114 break;
1115 }
1116
1117 /* Recursively scan the operands of this expression. */
1118 {
1119 const char *fmt = GET_RTX_FORMAT (code);
1120 int i;
1121
1122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1123 {
1124 if (fmt[i] == 'e')
1125 {
1126 /* Tail recursive case: save a function call level. */
1127 if (i == 0)
1128 {
1129 loc = &XEXP (x, 0);
1130 goto retry;
1131 }
1132 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1133 }
1134 else if (fmt[i] == 'E')
1135 {
1136 int j;
1137 for (j = 0; j < XVECLEN (x, i); j++)
1138 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1139 bb, insn, flags);
1140 }
1141 }
1142 }
1143 }
1144
1145
1146 /* Record all the df within INSN of basic block BB. */
1147 static void
1148 df_insn_refs_record (struct df *df, basic_block bb, rtx insn)
1149 {
1150 int i;
1151
1152 if (INSN_P (insn))
1153 {
1154 rtx note;
1155
1156 /* Record register defs. */
1157 df_defs_record (df, PATTERN (insn), bb, insn);
1158
1159 if (df->flags & DF_EQUIV_NOTES)
1160 for (note = REG_NOTES (insn); note;
1161 note = XEXP (note, 1))
1162 {
1163 switch (REG_NOTE_KIND (note))
1164 {
1165 case REG_EQUIV:
1166 case REG_EQUAL:
1167 df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1168 bb, insn, 0);
1169 default:
1170 break;
1171 }
1172 }
1173
1174 if (CALL_P (insn))
1175 {
1176 rtx note;
1177 rtx x;
1178
1179 /* Record the registers used to pass arguments. */
1180 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1181 note = XEXP (note, 1))
1182 {
1183 if (GET_CODE (XEXP (note, 0)) == USE)
1184 df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1185 bb, insn, 0);
1186 }
1187
1188 /* The stack ptr is used (honorarily) by a CALL insn. */
1189 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1190 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1191
1192 if (df->flags & DF_HARD_REGS)
1193 {
1194 /* Calls may also reference any of the global registers,
1195 so they are recorded as used. */
1196 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1197 if (global_regs[i])
1198 {
1199 x = df_reg_use_gen (i);
1200 df_uses_record (df, &SET_DEST (x),
1201 DF_REF_REG_USE, bb, insn, 0);
1202 }
1203 }
1204 }
1205
1206 /* Record the register uses. */
1207 df_uses_record (df, &PATTERN (insn),
1208 DF_REF_REG_USE, bb, insn, 0);
1209
1210 if (CALL_P (insn))
1211 {
1212 rtx note;
1213
1214 /* We do not record hard registers clobbered by the call,
1215 since there are awfully many of them and "defs" created
1216 through them are not interesting (since no use can be legally
1217 reached by them). So we must just make sure we include them when
1218 computing kill bitmaps. */
1219
1220 /* There may be extra registers to be clobbered. */
1221 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1222 note;
1223 note = XEXP (note, 1))
1224 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1225 df_defs_record (df, XEXP (note, 0), bb, insn);
1226 }
1227 }
1228 }
1229
1230
1231 /* Record all the refs within the basic block BB. */
1232 static void
1233 df_bb_refs_record (struct df *df, basic_block bb)
1234 {
1235 rtx insn;
1236
1237 /* Scan the block an insn at a time from beginning to end. */
1238 FOR_BB_INSNS (bb, insn)
1239 {
1240 if (INSN_P (insn))
1241 {
1242 /* Record defs within INSN. */
1243 df_insn_refs_record (df, bb, insn);
1244 }
1245 }
1246 }
1247
1248
1249 /* Record all the refs in the basic blocks specified by BLOCKS. */
1250 static void
1251 df_refs_record (struct df *df, bitmap blocks)
1252 {
1253 basic_block bb;
1254
1255 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1256 {
1257 df_bb_refs_record (df, bb);
1258 });
1259 }
1260 \f
1261 /* Dataflow analysis routines. */
1262
1263 /* Create reg-def chains for basic block BB. These are a list of
1264 definitions for each register. */
1265
1266 static void
1267 df_bb_reg_def_chain_create (struct df *df, basic_block bb)
1268 {
1269 rtx insn;
1270
1271 /* Perhaps the defs should be sorted using a depth first search
1272 of the CFG (or possibly a breadth first search). */
1273
1274 FOR_BB_INSNS_REVERSE (bb, insn)
1275 {
1276 struct df_link *link;
1277 unsigned int uid = INSN_UID (insn);
1278
1279 if (! INSN_P (insn))
1280 continue;
1281
1282 for (link = df->insns[uid].defs; link; link = link->next)
1283 {
1284 struct ref *def = link->ref;
1285 unsigned int dregno = DF_REF_REGNO (def);
1286
1287 /* Do not add ref's to the chain twice, i.e., only add new
1288 refs. XXX the same could be done by testing if the
1289 current insn is a modified (or a new) one. This would be
1290 faster. */
1291 if (DF_REF_ID (def) < df->def_id_save)
1292 continue;
1293
1294 df->regs[dregno].defs = df_link_create (def, df->regs[dregno].defs);
1295 }
1296 }
1297 }
1298
1299
1300 /* Create reg-def chains for each basic block within BLOCKS. These
1301 are a list of definitions for each register. If REDO is true, add
1302 all defs, otherwise just add the new defs. */
1303
1304 static void
1305 df_reg_def_chain_create (struct df *df, bitmap blocks, bool redo)
1306 {
1307 basic_block bb;
1308 #ifdef ENABLE_CHECKING
1309 unsigned regno;
1310 #endif
1311 unsigned old_def_id_save = df->def_id_save;
1312
1313 if (redo)
1314 {
1315 #ifdef ENABLE_CHECKING
1316 for (regno = 0; regno < df->n_regs; regno++)
1317 gcc_assert (!df->regs[regno].defs);
1318 #endif
1319
1320 /* Pretend that all defs are new. */
1321 df->def_id_save = 0;
1322 }
1323
1324 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1325 {
1326 df_bb_reg_def_chain_create (df, bb);
1327 });
1328
1329 df->def_id_save = old_def_id_save;
1330 }
1331
1332 /* Remove all reg-def chains stored in the dataflow object DF. */
1333
1334 static void
1335 df_reg_def_chain_clean (struct df *df)
1336 {
1337 unsigned regno;
1338
1339 for (regno = 0; regno < df->n_regs; regno++)
1340 free_reg_ref_chain (&df->regs[regno].defs);
1341 }
1342
1343 /* Create reg-use chains for basic block BB. These are a list of uses
1344 for each register. */
1345
1346 static void
1347 df_bb_reg_use_chain_create (struct df *df, basic_block bb)
1348 {
1349 rtx insn;
1350
1351 /* Scan in forward order so that the last uses appear at the start
1352 of the chain. */
1353
1354 FOR_BB_INSNS (bb, insn)
1355 {
1356 struct df_link *link;
1357 unsigned int uid = INSN_UID (insn);
1358
1359 if (! INSN_P (insn))
1360 continue;
1361
1362 for (link = df->insns[uid].uses; link; link = link->next)
1363 {
1364 struct ref *use = link->ref;
1365 unsigned int uregno = DF_REF_REGNO (use);
1366
1367 /* Do not add ref's to the chain twice, i.e., only add new
1368 refs. XXX the same could be done by testing if the
1369 current insn is a modified (or a new) one. This would be
1370 faster. */
1371 if (DF_REF_ID (use) < df->use_id_save)
1372 continue;
1373
1374 df->regs[uregno].uses
1375 = df_link_create (use, df->regs[uregno].uses);
1376 }
1377 }
1378 }
1379
1380
1381 /* Create reg-use chains for each basic block within BLOCKS. These
1382 are a list of uses for each register. If REDO is true, remove the
1383 old reg-use chains first, otherwise just add new uses to them. */
1384
1385 static void
1386 df_reg_use_chain_create (struct df *df, bitmap blocks, bool redo)
1387 {
1388 basic_block bb;
1389 #ifdef ENABLE_CHECKING
1390 unsigned regno;
1391 #endif
1392 unsigned old_use_id_save = df->use_id_save;
1393
1394 if (redo)
1395 {
1396 #ifdef ENABLE_CHECKING
1397 for (regno = 0; regno < df->n_regs; regno++)
1398 gcc_assert (!df->regs[regno].uses);
1399 #endif
1400
1401 /* Pretend that all uses are new. */
1402 df->use_id_save = 0;
1403 }
1404
1405 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1406 {
1407 df_bb_reg_use_chain_create (df, bb);
1408 });
1409
1410 df->use_id_save = old_use_id_save;
1411 }
1412
1413 /* Remove all reg-use chains stored in the dataflow object DF. */
1414
1415 static void
1416 df_reg_use_chain_clean (struct df *df)
1417 {
1418 unsigned regno;
1419
1420 for (regno = 0; regno < df->n_regs; regno++)
1421 free_reg_ref_chain (&df->regs[regno].uses);
1422 }
1423
1424 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1425 static void
1426 df_bb_du_chain_create (struct df *df, basic_block bb, bitmap ru)
1427 {
1428 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1429 rtx insn;
1430
1431 bitmap_copy (ru, bb_info->ru_out);
1432
1433 /* For each def in BB create a linked list (chain) of uses
1434 reached from the def. */
1435 FOR_BB_INSNS_REVERSE (bb, insn)
1436 {
1437 struct df_link *def_link;
1438 struct df_link *use_link;
1439 unsigned int uid = INSN_UID (insn);
1440
1441 if (! INSN_P (insn))
1442 continue;
1443
1444 /* For each def in insn... */
1445 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1446 {
1447 struct ref *def = def_link->ref;
1448 unsigned int dregno = DF_REF_REGNO (def);
1449
1450 DF_REF_CHAIN (def) = 0;
1451
1452 /* While the reg-use chains are not essential, it
1453 is _much_ faster to search these short lists rather
1454 than all the reaching uses, especially for large functions. */
1455 for (use_link = df->regs[dregno].uses; use_link;
1456 use_link = use_link->next)
1457 {
1458 struct ref *use = use_link->ref;
1459
1460 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1461 {
1462 DF_REF_CHAIN (def)
1463 = df_link_create (use, DF_REF_CHAIN (def));
1464
1465 bitmap_clear_bit (ru, DF_REF_ID (use));
1466 }
1467 }
1468 }
1469
1470 /* For each use in insn... */
1471 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1472 {
1473 struct ref *use = use_link->ref;
1474 bitmap_set_bit (ru, DF_REF_ID (use));
1475 }
1476 }
1477 }
1478
1479
1480 /* Create def-use chains from reaching use bitmaps for basic blocks
1481 in BLOCKS. */
1482 static void
1483 df_du_chain_create (struct df *df, bitmap blocks)
1484 {
1485 bitmap ru;
1486 basic_block bb;
1487
1488 ru = BITMAP_XMALLOC ();
1489
1490 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1491 {
1492 df_bb_du_chain_create (df, bb, ru);
1493 });
1494
1495 BITMAP_XFREE (ru);
1496 }
1497
1498
1499 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1500 static void
1501 df_bb_ud_chain_create (struct df *df, basic_block bb)
1502 {
1503 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1504 struct ref **reg_def_last = df->reg_def_last;
1505 rtx insn;
1506
1507 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1508
1509 /* For each use in BB create a linked list (chain) of defs
1510 that reach the use. */
1511 FOR_BB_INSNS (bb, insn)
1512 {
1513 unsigned int uid = INSN_UID (insn);
1514 struct df_link *use_link;
1515 struct df_link *def_link;
1516
1517 if (! INSN_P (insn))
1518 continue;
1519
1520 /* For each use in insn... */
1521 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1522 {
1523 struct ref *use = use_link->ref;
1524 unsigned int regno = DF_REF_REGNO (use);
1525
1526 DF_REF_CHAIN (use) = 0;
1527
1528 /* Has regno been defined in this BB yet? If so, use
1529 the last def as the single entry for the use-def
1530 chain for this use. Otherwise, we need to add all
1531 the defs using this regno that reach the start of
1532 this BB. */
1533 if (reg_def_last[regno])
1534 {
1535 DF_REF_CHAIN (use)
1536 = df_link_create (reg_def_last[regno], 0);
1537 }
1538 else
1539 {
1540 /* While the reg-def chains are not essential, it is
1541 _much_ faster to search these short lists rather than
1542 all the reaching defs, especially for large
1543 functions. */
1544 for (def_link = df->regs[regno].defs; def_link;
1545 def_link = def_link->next)
1546 {
1547 struct ref *def = def_link->ref;
1548
1549 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1550 {
1551 DF_REF_CHAIN (use)
1552 = df_link_create (def, DF_REF_CHAIN (use));
1553 }
1554 }
1555 }
1556 }
1557
1558
1559 /* For each def in insn... record the last def of each reg. */
1560 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1561 {
1562 struct ref *def = def_link->ref;
1563 int dregno = DF_REF_REGNO (def);
1564
1565 reg_def_last[dregno] = def;
1566 }
1567 }
1568 }
1569
1570
1571 /* Create use-def chains from reaching def bitmaps for basic blocks
1572 within BLOCKS. */
1573 static void
1574 df_ud_chain_create (struct df *df, bitmap blocks)
1575 {
1576 basic_block bb;
1577
1578 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1579 {
1580 df_bb_ud_chain_create (df, bb);
1581 });
1582 }
1583 \f
1584
1585
1586 static void
1587 df_rd_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1588 void *out, void *gen, void *kill,
1589 void *data ATTRIBUTE_UNUSED)
1590 {
1591 *changed = bitmap_union_of_diff (out, gen, in, kill);
1592 }
1593
1594
1595 static void
1596 df_ru_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1597 void *out, void *gen, void *kill,
1598 void *data ATTRIBUTE_UNUSED)
1599 {
1600 *changed = bitmap_union_of_diff (in, gen, out, kill);
1601 }
1602
1603
1604 static void
1605 df_lr_transfer_function (int bb ATTRIBUTE_UNUSED, int *changed, void *in,
1606 void *out, void *use, void *def,
1607 void *data ATTRIBUTE_UNUSED)
1608 {
1609 *changed = bitmap_union_of_diff (in, use, out, def);
1610 }
1611
1612
1613 /* Compute local reaching def info for basic block BB. */
1614 static void
1615 df_bb_rd_local_compute (struct df *df, basic_block bb, bitmap call_killed_defs)
1616 {
1617 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1618 rtx insn;
1619 bitmap seen = BITMAP_XMALLOC ();
1620 bool call_seen = false;
1621
1622 FOR_BB_INSNS_REVERSE (bb, insn)
1623 {
1624 unsigned int uid = INSN_UID (insn);
1625 struct df_link *def_link;
1626
1627 if (! INSN_P (insn))
1628 continue;
1629
1630 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1631 {
1632 struct ref *def = def_link->ref;
1633 unsigned int regno = DF_REF_REGNO (def);
1634 struct df_link *def2_link;
1635
1636 if (bitmap_bit_p (seen, regno)
1637 || (call_seen
1638 && regno < FIRST_PSEUDO_REGISTER
1639 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)))
1640 continue;
1641
1642 for (def2_link = df->regs[regno].defs; def2_link;
1643 def2_link = def2_link->next)
1644 {
1645 struct ref *def2 = def2_link->ref;
1646
1647 /* Add all defs of this reg to the set of kills. This
1648 is greedy since many of these defs will not actually
1649 be killed by this BB but it keeps things a lot
1650 simpler. */
1651 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1652 }
1653
1654 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1655 bitmap_set_bit (seen, regno);
1656 }
1657
1658 if (CALL_P (insn) && (df->flags & DF_HARD_REGS))
1659 {
1660 bitmap_operation (bb_info->rd_kill, bb_info->rd_kill,
1661 call_killed_defs, BITMAP_IOR);
1662 call_seen = 1;
1663 }
1664 }
1665
1666 BITMAP_XFREE (seen);
1667 }
1668
1669
1670 /* Compute local reaching def info for each basic block within BLOCKS. */
1671 static void
1672 df_rd_local_compute (struct df *df, bitmap blocks)
1673 {
1674 basic_block bb;
1675 bitmap killed_by_call = NULL;
1676 unsigned regno;
1677 struct df_link *def_link;
1678
1679 if (df->flags & DF_HARD_REGS)
1680 {
1681 killed_by_call = BITMAP_XMALLOC ();
1682 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683 {
1684 if (!TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1685 continue;
1686
1687 for (def_link = df->regs[regno].defs;
1688 def_link;
1689 def_link = def_link->next)
1690 bitmap_set_bit (killed_by_call, DF_REF_ID (def_link->ref));
1691 }
1692 }
1693
1694 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1695 {
1696 df_bb_rd_local_compute (df, bb, killed_by_call);
1697 });
1698
1699 if (df->flags & DF_HARD_REGS)
1700 BITMAP_XFREE (killed_by_call);
1701 }
1702
1703
1704 /* Compute local reaching use (upward exposed use) info for basic
1705 block BB. */
1706 static void
1707 df_bb_ru_local_compute (struct df *df, basic_block bb)
1708 {
1709 /* This is much more tricky than computing reaching defs. With
1710 reaching defs, defs get killed by other defs. With upwards
1711 exposed uses, these get killed by defs with the same regno. */
1712
1713 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1714 rtx insn;
1715
1716
1717 FOR_BB_INSNS_REVERSE (bb, insn)
1718 {
1719 unsigned int uid = INSN_UID (insn);
1720 struct df_link *def_link;
1721 struct df_link *use_link;
1722
1723 if (! INSN_P (insn))
1724 continue;
1725
1726 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1727 {
1728 struct ref *def = def_link->ref;
1729 unsigned int dregno = DF_REF_REGNO (def);
1730
1731 for (use_link = df->regs[dregno].uses; use_link;
1732 use_link = use_link->next)
1733 {
1734 struct ref *use = use_link->ref;
1735
1736 /* Add all uses of this reg to the set of kills. This
1737 is greedy since many of these uses will not actually
1738 be killed by this BB but it keeps things a lot
1739 simpler. */
1740 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1741
1742 /* Zap from the set of gens for this BB. */
1743 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1744 }
1745 }
1746
1747 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1748 {
1749 struct ref *use = use_link->ref;
1750 /* Add use to set of gens in this BB. */
1751 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1752 }
1753 }
1754 }
1755
1756
1757 /* Compute local reaching use (upward exposed use) info for each basic
1758 block within BLOCKS. */
1759 static void
1760 df_ru_local_compute (struct df *df, bitmap blocks)
1761 {
1762 basic_block bb;
1763
1764 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1765 {
1766 df_bb_ru_local_compute (df, bb);
1767 });
1768 }
1769
1770
1771 /* Compute local live variable info for basic block BB. */
1772 static void
1773 df_bb_lr_local_compute (struct df *df, basic_block bb)
1774 {
1775 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1776 rtx insn;
1777
1778 FOR_BB_INSNS_REVERSE (bb, insn)
1779 {
1780 unsigned int uid = INSN_UID (insn);
1781 struct df_link *link;
1782
1783 if (! INSN_P (insn))
1784 continue;
1785
1786 for (link = df->insns[uid].defs; link; link = link->next)
1787 {
1788 struct ref *def = link->ref;
1789 unsigned int dregno = DF_REF_REGNO (def);
1790
1791 /* Add def to set of defs in this BB. */
1792 bitmap_set_bit (bb_info->lr_def, dregno);
1793
1794 bitmap_clear_bit (bb_info->lr_use, dregno);
1795 }
1796
1797 for (link = df->insns[uid].uses; link; link = link->next)
1798 {
1799 struct ref *use = link->ref;
1800 /* Add use to set of uses in this BB. */
1801 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1802 }
1803 }
1804 }
1805
1806
1807 /* Compute local live variable info for each basic block within BLOCKS. */
1808 static void
1809 df_lr_local_compute (struct df *df, bitmap blocks)
1810 {
1811 basic_block bb;
1812
1813 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1814 {
1815 df_bb_lr_local_compute (df, bb);
1816 });
1817 }
1818
1819
1820 /* Compute register info: lifetime, bb, and number of defs and uses
1821 for basic block BB. */
1822 static void
1823 df_bb_reg_info_compute (struct df *df, basic_block bb, bitmap live)
1824 {
1825 struct reg_info *reg_info = df->regs;
1826 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1827 rtx insn;
1828
1829 bitmap_copy (live, bb_info->lr_out);
1830
1831 FOR_BB_INSNS_REVERSE (bb, insn)
1832 {
1833 unsigned int uid = INSN_UID (insn);
1834 unsigned int regno;
1835 struct df_link *link;
1836 bitmap_iterator bi;
1837
1838 if (! INSN_P (insn))
1839 continue;
1840
1841 for (link = df->insns[uid].defs; link; link = link->next)
1842 {
1843 struct ref *def = link->ref;
1844 unsigned int dregno = DF_REF_REGNO (def);
1845
1846 /* Kill this register. */
1847 bitmap_clear_bit (live, dregno);
1848 reg_info[dregno].n_defs++;
1849 }
1850
1851 for (link = df->insns[uid].uses; link; link = link->next)
1852 {
1853 struct ref *use = link->ref;
1854 unsigned int uregno = DF_REF_REGNO (use);
1855
1856 /* This register is now live. */
1857 bitmap_set_bit (live, uregno);
1858 reg_info[uregno].n_uses++;
1859 }
1860
1861 /* Increment lifetimes of all live registers. */
1862 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
1863 {
1864 reg_info[regno].lifetime++;
1865 }
1866 }
1867 }
1868
1869
1870 /* Compute register info: lifetime, bb, and number of defs and uses. */
1871 static void
1872 df_reg_info_compute (struct df *df, bitmap blocks)
1873 {
1874 basic_block bb;
1875 bitmap live;
1876
1877 live = BITMAP_XMALLOC ();
1878
1879 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1880 {
1881 df_bb_reg_info_compute (df, bb, live);
1882 });
1883
1884 BITMAP_XFREE (live);
1885 }
1886
1887
1888 /* Assign LUIDs for BB. */
1889 static int
1890 df_bb_luids_set (struct df *df, basic_block bb)
1891 {
1892 rtx insn;
1893 int luid = 0;
1894
1895 /* The LUIDs are monotonically increasing for each basic block. */
1896
1897 FOR_BB_INSNS (bb, insn)
1898 {
1899 if (INSN_P (insn))
1900 DF_INSN_LUID (df, insn) = luid++;
1901 DF_INSN_LUID (df, insn) = luid;
1902 }
1903 return luid;
1904 }
1905
1906
1907 /* Assign LUIDs for each basic block within BLOCKS. */
1908 static int
1909 df_luids_set (struct df *df, bitmap blocks)
1910 {
1911 basic_block bb;
1912 int total = 0;
1913
1914 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1915 {
1916 total += df_bb_luids_set (df, bb);
1917 });
1918 return total;
1919 }
1920
1921
1922 /* Perform dataflow analysis using existing DF structure for blocks
1923 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
1924 static void
1925 df_analyze_1 (struct df *df, bitmap blocks, int flags, int update)
1926 {
1927 int aflags;
1928 int dflags;
1929 int i;
1930 basic_block bb;
1931 struct dataflow dflow;
1932
1933 dflags = 0;
1934 aflags = flags;
1935 if (flags & DF_UD_CHAIN)
1936 aflags |= DF_RD | DF_RD_CHAIN;
1937
1938 if (flags & DF_DU_CHAIN)
1939 aflags |= DF_RU;
1940
1941 if (flags & DF_RU)
1942 aflags |= DF_RU_CHAIN;
1943
1944 if (flags & DF_REG_INFO)
1945 aflags |= DF_LR;
1946
1947 if (! blocks)
1948 blocks = df->all_blocks;
1949
1950 df->flags = flags;
1951 if (update)
1952 {
1953 df_refs_update (df, NULL);
1954 /* More fine grained incremental dataflow analysis would be
1955 nice. For now recompute the whole shebang for the
1956 modified blocks. */
1957 #if 0
1958 df_refs_unlink (df, blocks);
1959 #endif
1960 /* All the def-use, use-def chains can be potentially
1961 modified by changes in one block. The size of the
1962 bitmaps can also change. */
1963 }
1964 else
1965 {
1966 /* Scan the function for all register defs and uses. */
1967 df_refs_queue (df);
1968 df_refs_record (df, blocks);
1969
1970 /* Link all the new defs and uses to the insns. */
1971 df_refs_process (df);
1972 }
1973
1974 /* Allocate the bitmaps now the total number of defs and uses are
1975 known. If the number of defs or uses have changed, then
1976 these bitmaps need to be reallocated. */
1977 df_bitmaps_alloc (df, NULL, aflags);
1978
1979 /* Set the LUIDs for each specified basic block. */
1980 df_luids_set (df, blocks);
1981
1982 /* Recreate reg-def and reg-use chains from scratch so that first
1983 def is at the head of the reg-def chain and the last use is at
1984 the head of the reg-use chain. This is only important for
1985 regs local to a basic block as it speeds up searching. */
1986 if (aflags & DF_RD_CHAIN)
1987 {
1988 df_reg_def_chain_create (df, blocks, false);
1989 }
1990
1991 if (aflags & DF_RU_CHAIN)
1992 {
1993 df_reg_use_chain_create (df, blocks, false);
1994 }
1995
1996 df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
1997 df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
1998 df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
1999 df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2000 df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2001 df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2002
2003 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2004 flow_reverse_top_sort_order_compute (df->rts_order);
2005 for (i = 0; i < n_basic_blocks; i++)
2006 {
2007 df->inverse_dfs_map[df->dfs_order[i]] = i;
2008 df->inverse_rc_map[df->rc_order[i]] = i;
2009 df->inverse_rts_map[df->rts_order[i]] = i;
2010 }
2011 if (aflags & DF_RD)
2012 {
2013 /* Compute the sets of gens and kills for the defs of each bb. */
2014 dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2015 dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2016 dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2017 dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2018
2019 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2020 FOR_EACH_BB (bb)
2021 {
2022 dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2023 dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2024 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2025 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2026 }
2027
2028 dflow.repr = SR_BITMAP;
2029 dflow.dir = DF_FORWARD;
2030 dflow.conf_op = DF_UNION;
2031 dflow.transfun = df_rd_transfer_function;
2032 dflow.n_blocks = n_basic_blocks;
2033 dflow.order = df->rc_order;
2034 dflow.data = NULL;
2035
2036 iterative_dataflow (&dflow);
2037 free (dflow.in);
2038 free (dflow.out);
2039 free (dflow.gen);
2040 free (dflow.kill);
2041 }
2042
2043 if (aflags & DF_UD_CHAIN)
2044 {
2045 /* Create use-def chains. */
2046 df_ud_chain_create (df, df->all_blocks);
2047
2048 if (! (flags & DF_RD))
2049 dflags |= DF_RD;
2050 }
2051
2052 if (aflags & DF_RU)
2053 {
2054 /* Compute the sets of gens and kills for the upwards exposed
2055 uses in each bb. */
2056 dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2057 dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2058 dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2059 dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2060
2061 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2062
2063 FOR_EACH_BB (bb)
2064 {
2065 dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2066 dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2067 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2068 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2069 }
2070
2071 dflow.repr = SR_BITMAP;
2072 dflow.dir = DF_BACKWARD;
2073 dflow.conf_op = DF_UNION;
2074 dflow.transfun = df_ru_transfer_function;
2075 dflow.n_blocks = n_basic_blocks;
2076 dflow.order = df->rts_order;
2077 dflow.data = NULL;
2078
2079 iterative_dataflow (&dflow);
2080 free (dflow.in);
2081 free (dflow.out);
2082 free (dflow.gen);
2083 free (dflow.kill);
2084 }
2085
2086 if (aflags & DF_DU_CHAIN)
2087 {
2088 /* Create def-use chains. */
2089 df_du_chain_create (df, df->all_blocks);
2090
2091 if (! (flags & DF_RU))
2092 dflags |= DF_RU;
2093 }
2094
2095 /* Free up bitmaps that are no longer required. */
2096 if (dflags)
2097 df_bitmaps_free (df, dflags);
2098
2099 if (aflags & DF_LR)
2100 {
2101 /* Compute the sets of defs and uses of live variables. */
2102 dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2103 dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2104 dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2105 dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2106
2107 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2108
2109 FOR_EACH_BB (bb)
2110 {
2111 dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2112 dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2113 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2114 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2115 }
2116
2117 dflow.repr = SR_BITMAP;
2118 dflow.dir = DF_BACKWARD;
2119 dflow.conf_op = DF_UNION;
2120 dflow.transfun = df_lr_transfer_function;
2121 dflow.n_blocks = n_basic_blocks;
2122 dflow.order = df->rts_order;
2123 dflow.data = NULL;
2124
2125 iterative_dataflow (&dflow);
2126 free (dflow.in);
2127 free (dflow.out);
2128 free (dflow.gen);
2129 free (dflow.kill);
2130 }
2131
2132 if (aflags & DF_REG_INFO)
2133 {
2134 df_reg_info_compute (df, df->all_blocks);
2135 }
2136
2137 free (df->dfs_order);
2138 free (df->rc_order);
2139 free (df->rts_order);
2140 free (df->inverse_rc_map);
2141 free (df->inverse_dfs_map);
2142 free (df->inverse_rts_map);
2143 }
2144
2145
2146 /* Initialize dataflow analysis. */
2147 struct df *
2148 df_init (void)
2149 {
2150 struct df *df;
2151
2152 df = xcalloc (1, sizeof (struct df));
2153
2154 /* Squirrel away a global for debugging. */
2155 ddf = df;
2156
2157 return df;
2158 }
2159
2160
2161 /* Start queuing refs. */
2162 static int
2163 df_refs_queue (struct df *df)
2164 {
2165 df->def_id_save = df->def_id;
2166 df->use_id_save = df->use_id;
2167 /* ???? Perhaps we should save current obstack state so that we can
2168 unwind it. */
2169 return 0;
2170 }
2171
2172
2173 /* Process queued refs. */
2174 static int
2175 df_refs_process (struct df *df)
2176 {
2177 unsigned int i;
2178
2179 /* Build new insn-def chains. */
2180 for (i = df->def_id_save; i != df->def_id; i++)
2181 {
2182 struct ref *def = df->defs[i];
2183 unsigned int uid = DF_REF_INSN_UID (def);
2184
2185 /* Add def to head of def list for INSN. */
2186 df->insns[uid].defs
2187 = df_link_create (def, df->insns[uid].defs);
2188 }
2189
2190 /* Build new insn-use chains. */
2191 for (i = df->use_id_save; i != df->use_id; i++)
2192 {
2193 struct ref *use = df->uses[i];
2194 unsigned int uid = DF_REF_INSN_UID (use);
2195
2196 /* Add use to head of use list for INSN. */
2197 df->insns[uid].uses
2198 = df_link_create (use, df->insns[uid].uses);
2199 }
2200 return 0;
2201 }
2202
2203
2204 /* Update refs for basic block BB. */
2205 static int
2206 df_bb_refs_update (struct df *df, basic_block bb)
2207 {
2208 rtx insn;
2209 int count = 0;
2210
2211 /* While we have to scan the chain of insns for this BB, we do not
2212 need to allocate and queue a long chain of BB/INSN pairs. Using
2213 a bitmap for insns_modified saves memory and avoids queuing
2214 duplicates. */
2215
2216 FOR_BB_INSNS (bb, insn)
2217 {
2218 unsigned int uid;
2219
2220 uid = INSN_UID (insn);
2221
2222 if (bitmap_bit_p (df->insns_modified, uid))
2223 {
2224 /* Delete any allocated refs of this insn. MPH, FIXME. */
2225 df_insn_refs_unlink (df, bb, insn);
2226
2227 /* Scan the insn for refs. */
2228 df_insn_refs_record (df, bb, insn);
2229
2230 count++;
2231 }
2232 }
2233 return count;
2234 }
2235
2236
2237 /* Process all the modified/deleted insns that were queued. */
2238 static int
2239 df_refs_update (struct df *df, bitmap blocks)
2240 {
2241 basic_block bb;
2242 int count = 0, bbno;
2243
2244 df->n_regs = max_reg_num ();
2245 if (df->n_regs >= df->reg_size)
2246 df_reg_table_realloc (df, 0);
2247
2248 df_refs_queue (df);
2249
2250 if (!blocks)
2251 {
2252 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2253 {
2254 count += df_bb_refs_update (df, bb);
2255 });
2256 }
2257 else
2258 {
2259 bitmap_iterator bi;
2260
2261 EXECUTE_IF_AND_IN_BITMAP (df->bbs_modified, blocks, 0, bbno, bi)
2262 {
2263 count += df_bb_refs_update (df, BASIC_BLOCK (bbno));
2264 }
2265 }
2266
2267 df_refs_process (df);
2268 return count;
2269 }
2270
2271
2272 /* Return nonzero if any of the requested blocks in the bitmap
2273 BLOCKS have been modified. */
2274 static int
2275 df_modified_p (struct df *df, bitmap blocks)
2276 {
2277 int update = 0;
2278 basic_block bb;
2279
2280 if (!df->n_bbs)
2281 return 0;
2282
2283 FOR_EACH_BB (bb)
2284 if (bitmap_bit_p (df->bbs_modified, bb->index)
2285 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2286 {
2287 update = 1;
2288 break;
2289 }
2290
2291 return update;
2292 }
2293
2294 /* Analyze dataflow info for the basic blocks specified by the bitmap
2295 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2296 modified blocks if BLOCKS is -1. */
2297
2298 int
2299 df_analyze (struct df *df, bitmap blocks, int flags)
2300 {
2301 int update;
2302
2303 /* We could deal with additional basic blocks being created by
2304 rescanning everything again. */
2305 gcc_assert (!df->n_bbs || df->n_bbs == (unsigned int) last_basic_block);
2306
2307 update = df_modified_p (df, blocks);
2308 if (update || (flags != df->flags))
2309 {
2310 if (! blocks)
2311 {
2312 if (df->n_bbs)
2313 {
2314 /* Recompute everything from scratch. */
2315 df_free (df);
2316 }
2317 /* Allocate and initialize data structures. */
2318 df_alloc (df, max_reg_num ());
2319 df_analyze_1 (df, 0, flags, 0);
2320 update = 1;
2321 }
2322 else
2323 {
2324 if (blocks == (bitmap) -1)
2325 blocks = df->bbs_modified;
2326
2327 gcc_assert (df->n_bbs);
2328
2329 df_analyze_1 (df, blocks, flags, 1);
2330 bitmap_zero (df->bbs_modified);
2331 bitmap_zero (df->insns_modified);
2332 }
2333 }
2334 return update;
2335 }
2336
2337 /* Remove the entries not in BLOCKS from the LIST of length LEN, preserving
2338 the order of the remaining entries. Returns the length of the resulting
2339 list. */
2340
2341 static unsigned
2342 prune_to_subcfg (int list[], unsigned len, bitmap blocks)
2343 {
2344 unsigned act, last;
2345
2346 for (act = 0, last = 0; act < len; act++)
2347 if (bitmap_bit_p (blocks, list[act]))
2348 list[last++] = list[act];
2349
2350 return last;
2351 }
2352
2353 /* Alternative entry point to the analysis. Analyze just the part of the cfg
2354 graph induced by BLOCKS.
2355
2356 TODO I am not quite sure how to avoid code duplication with df_analyze_1
2357 here, and simultaneously not make even greater chaos in it. We behave
2358 slightly differently in some details, especially in handling modified
2359 insns. */
2360
2361 void
2362 df_analyze_subcfg (struct df *df, bitmap blocks, int flags)
2363 {
2364 rtx insn;
2365 basic_block bb;
2366 struct dataflow dflow;
2367 unsigned n_blocks;
2368
2369 if (flags & DF_UD_CHAIN)
2370 flags |= DF_RD | DF_RD_CHAIN;
2371 if (flags & DF_DU_CHAIN)
2372 flags |= DF_RU;
2373 if (flags & DF_RU)
2374 flags |= DF_RU_CHAIN;
2375 if (flags & DF_REG_INFO)
2376 flags |= DF_LR;
2377
2378 if (!df->n_bbs)
2379 {
2380 df_alloc (df, max_reg_num ());
2381
2382 /* Mark all insns as modified. */
2383
2384 FOR_EACH_BB (bb)
2385 {
2386 FOR_BB_INSNS (bb, insn)
2387 {
2388 df_insn_modify (df, bb, insn);
2389 }
2390 }
2391 }
2392
2393 df->flags = flags;
2394
2395 df_reg_def_chain_clean (df);
2396 df_reg_use_chain_clean (df);
2397
2398 df_refs_update (df, blocks);
2399
2400 /* Clear the updated stuff from ``modified'' bitmaps. */
2401 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2402 {
2403 if (bitmap_bit_p (df->bbs_modified, bb->index))
2404 {
2405 FOR_BB_INSNS (bb, insn)
2406 {
2407 bitmap_clear_bit (df->insns_modified, INSN_UID (insn));
2408 }
2409
2410 bitmap_clear_bit (df->bbs_modified, bb->index);
2411 }
2412 });
2413
2414 /* Allocate the bitmaps now the total number of defs and uses are
2415 known. If the number of defs or uses have changed, then
2416 these bitmaps need to be reallocated. */
2417 df_bitmaps_alloc (df, blocks, flags);
2418
2419 /* Set the LUIDs for each specified basic block. */
2420 df_luids_set (df, blocks);
2421
2422 /* Recreate reg-def and reg-use chains from scratch so that first
2423 def is at the head of the reg-def chain and the last use is at
2424 the head of the reg-use chain. This is only important for
2425 regs local to a basic block as it speeds up searching. */
2426 if (flags & DF_RD_CHAIN)
2427 {
2428 df_reg_def_chain_create (df, blocks, true);
2429 }
2430
2431 if (flags & DF_RU_CHAIN)
2432 {
2433 df_reg_use_chain_create (df, blocks, true);
2434 }
2435
2436 df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2437 df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2438 df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2439
2440 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2441 flow_reverse_top_sort_order_compute (df->rts_order);
2442
2443 n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks, blocks);
2444 prune_to_subcfg (df->rc_order, n_basic_blocks, blocks);
2445 prune_to_subcfg (df->rts_order, n_basic_blocks, blocks);
2446
2447 dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
2448 dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
2449 dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
2450 dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
2451
2452 if (flags & DF_RD)
2453 {
2454 /* Compute the sets of gens and kills for the defs of each bb. */
2455 df_rd_local_compute (df, blocks);
2456
2457 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2458 {
2459 dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2460 dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2461 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2462 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2463 });
2464
2465 dflow.repr = SR_BITMAP;
2466 dflow.dir = DF_FORWARD;
2467 dflow.conf_op = DF_UNION;
2468 dflow.transfun = df_rd_transfer_function;
2469 dflow.n_blocks = n_blocks;
2470 dflow.order = df->rc_order;
2471 dflow.data = NULL;
2472
2473 iterative_dataflow (&dflow);
2474 }
2475
2476 if (flags & DF_UD_CHAIN)
2477 {
2478 /* Create use-def chains. */
2479 df_ud_chain_create (df, blocks);
2480 }
2481
2482 if (flags & DF_RU)
2483 {
2484 /* Compute the sets of gens and kills for the upwards exposed
2485 uses in each bb. */
2486 df_ru_local_compute (df, blocks);
2487
2488 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2489 {
2490 dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2491 dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2492 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2493 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2494 });
2495
2496 dflow.repr = SR_BITMAP;
2497 dflow.dir = DF_BACKWARD;
2498 dflow.conf_op = DF_UNION;
2499 dflow.transfun = df_ru_transfer_function;
2500 dflow.n_blocks = n_blocks;
2501 dflow.order = df->rts_order;
2502 dflow.data = NULL;
2503
2504 iterative_dataflow (&dflow);
2505 }
2506
2507 if (flags & DF_DU_CHAIN)
2508 {
2509 /* Create def-use chains. */
2510 df_du_chain_create (df, blocks);
2511 }
2512
2513 if (flags & DF_LR)
2514 {
2515 /* Compute the sets of defs and uses of live variables. */
2516 df_lr_local_compute (df, blocks);
2517
2518 FOR_EACH_BB (bb)
2519 {
2520 dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2521 dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2522 dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2523 dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2524 }
2525
2526 dflow.repr = SR_BITMAP;
2527 dflow.dir = DF_BACKWARD;
2528 dflow.conf_op = DF_UNION;
2529 dflow.transfun = df_lr_transfer_function;
2530 dflow.n_blocks = n_blocks;
2531 dflow.order = df->rts_order;
2532 dflow.data = NULL;
2533
2534 iterative_dataflow (&dflow);
2535 }
2536
2537 if (flags & DF_REG_INFO)
2538 {
2539 df_reg_info_compute (df, blocks);
2540 }
2541
2542 free (dflow.in);
2543 free (dflow.out);
2544 free (dflow.gen);
2545 free (dflow.kill);
2546
2547 free (df->dfs_order);
2548 free (df->rc_order);
2549 free (df->rts_order);
2550 }
2551
2552 /* Free all the dataflow info and the DF structure. */
2553 void
2554 df_finish (struct df *df)
2555 {
2556 df_free (df);
2557 free (df);
2558 }
2559
2560 /* Unlink INSN from its reference information. */
2561 static void
2562 df_insn_refs_unlink (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
2563 {
2564 struct df_link *link;
2565 unsigned int uid;
2566
2567 uid = INSN_UID (insn);
2568
2569 /* Unlink all refs defined by this insn. */
2570 for (link = df->insns[uid].defs; link; link = link->next)
2571 df_def_unlink (df, link->ref);
2572
2573 /* Unlink all refs used by this insn. */
2574 for (link = df->insns[uid].uses; link; link = link->next)
2575 df_use_unlink (df, link->ref);
2576
2577 df->insns[uid].defs = 0;
2578 df->insns[uid].uses = 0;
2579 }
2580
2581
2582 #if 0
2583 /* Unlink all the insns within BB from their reference information. */
2584 static void
2585 df_bb_refs_unlink (struct df *df, basic_block bb)
2586 {
2587 rtx insn;
2588
2589 /* Scan the block an insn at a time from beginning to end. */
2590 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
2591 {
2592 if (INSN_P (insn))
2593 {
2594 /* Unlink refs for INSN. */
2595 df_insn_refs_unlink (df, bb, insn);
2596 }
2597 if (insn == BB_END (bb))
2598 break;
2599 }
2600 }
2601
2602
2603 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2604 Not currently used. */
2605 static void
2606 df_refs_unlink (struct df *df, bitmap blocks)
2607 {
2608 basic_block bb;
2609
2610 if (blocks)
2611 {
2612 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2613 {
2614 df_bb_refs_unlink (df, bb);
2615 });
2616 }
2617 else
2618 {
2619 FOR_EACH_BB (bb)
2620 df_bb_refs_unlink (df, bb);
2621 }
2622 }
2623 #endif
2624 \f
2625 /* Functions to modify insns. */
2626
2627
2628 /* Delete INSN and all its reference information. */
2629 rtx
2630 df_insn_delete (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
2631 {
2632 /* If the insn is a jump, we should perhaps call delete_insn to
2633 handle the JUMP_LABEL? */
2634
2635 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2636 gcc_assert (insn != BB_HEAD (bb));
2637
2638 /* Delete the insn. */
2639 delete_insn (insn);
2640
2641 df_insn_modify (df, bb, insn);
2642
2643 return NEXT_INSN (insn);
2644 }
2645
2646 /* Mark that basic block BB was modified. */
2647
2648 static void
2649 df_bb_modify (struct df *df, basic_block bb)
2650 {
2651 if ((unsigned) bb->index >= df->n_bbs)
2652 df_bb_table_realloc (df, df->n_bbs);
2653
2654 bitmap_set_bit (df->bbs_modified, bb->index);
2655 }
2656
2657 /* Mark that INSN within BB may have changed (created/modified/deleted).
2658 This may be called multiple times for the same insn. There is no
2659 harm calling this function if the insn wasn't changed; it will just
2660 slow down the rescanning of refs. */
2661 void
2662 df_insn_modify (struct df *df, basic_block bb, rtx insn)
2663 {
2664 unsigned int uid;
2665
2666 uid = INSN_UID (insn);
2667 if (uid >= df->insn_size)
2668 df_insn_table_realloc (df, uid);
2669
2670 df_bb_modify (df, bb);
2671 bitmap_set_bit (df->insns_modified, uid);
2672
2673 /* For incremental updating on the fly, perhaps we could make a copy
2674 of all the refs of the original insn and turn them into
2675 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2676 the original refs. If validate_change fails then these anti-refs
2677 will just get ignored. */
2678 }
2679
2680 typedef struct replace_args
2681 {
2682 rtx match;
2683 rtx replacement;
2684 rtx insn;
2685 int modified;
2686 } replace_args;
2687
2688
2689 /* Replace mem pointed to by PX with its associated pseudo register.
2690 DATA is actually a pointer to a structure describing the
2691 instruction currently being scanned and the MEM we are currently
2692 replacing. */
2693 static int
2694 df_rtx_mem_replace (rtx *px, void *data)
2695 {
2696 replace_args *args = (replace_args *) data;
2697 rtx mem = *px;
2698
2699 if (mem == NULL_RTX)
2700 return 0;
2701
2702 switch (GET_CODE (mem))
2703 {
2704 case MEM:
2705 break;
2706
2707 case CONST_DOUBLE:
2708 /* We're not interested in the MEM associated with a
2709 CONST_DOUBLE, so there's no need to traverse into one. */
2710 return -1;
2711
2712 default:
2713 /* This is not a MEM. */
2714 return 0;
2715 }
2716
2717 if (!rtx_equal_p (args->match, mem))
2718 /* This is not the MEM we are currently replacing. */
2719 return 0;
2720
2721 /* Actually replace the MEM. */
2722 validate_change (args->insn, px, args->replacement, 1);
2723 args->modified++;
2724
2725 return 0;
2726 }
2727
2728
2729 int
2730 df_insn_mem_replace (struct df *df, basic_block bb, rtx insn, rtx mem, rtx reg)
2731 {
2732 replace_args args;
2733
2734 args.insn = insn;
2735 args.match = mem;
2736 args.replacement = reg;
2737 args.modified = 0;
2738
2739 /* Search and replace all matching mems within insn. */
2740 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2741
2742 if (args.modified)
2743 df_insn_modify (df, bb, insn);
2744
2745 /* ???? FIXME. We may have a new def or one or more new uses of REG
2746 in INSN. REG should be a new pseudo so it won't affect the
2747 dataflow information that we currently have. We should add
2748 the new uses and defs to INSN and then recreate the chains
2749 when df_analyze is called. */
2750 return args.modified;
2751 }
2752
2753
2754 /* Replace one register with another. Called through for_each_rtx; PX
2755 points to the rtx being scanned. DATA is actually a pointer to a
2756 structure of arguments. */
2757 static int
2758 df_rtx_reg_replace (rtx *px, void *data)
2759 {
2760 rtx x = *px;
2761 replace_args *args = (replace_args *) data;
2762
2763 if (x == NULL_RTX)
2764 return 0;
2765
2766 if (x == args->match)
2767 {
2768 validate_change (args->insn, px, args->replacement, 1);
2769 args->modified++;
2770 }
2771
2772 return 0;
2773 }
2774
2775
2776 /* Replace the reg within every ref on CHAIN that is within the set
2777 BLOCKS of basic blocks with NEWREG. Also update the regs within
2778 REG_NOTES. */
2779 void
2780 df_refs_reg_replace (struct df *df, bitmap blocks, struct df_link *chain, rtx oldreg, rtx newreg)
2781 {
2782 struct df_link *link;
2783 replace_args args;
2784
2785 if (! blocks)
2786 blocks = df->all_blocks;
2787
2788 args.match = oldreg;
2789 args.replacement = newreg;
2790 args.modified = 0;
2791
2792 for (link = chain; link; link = link->next)
2793 {
2794 struct ref *ref = link->ref;
2795 rtx insn = DF_REF_INSN (ref);
2796
2797 if (! INSN_P (insn))
2798 continue;
2799
2800 gcc_assert (bitmap_bit_p (blocks, DF_REF_BBNO (ref)));
2801
2802 df_ref_reg_replace (df, ref, oldreg, newreg);
2803
2804 /* Replace occurrences of the reg within the REG_NOTES. */
2805 if ((! link->next || DF_REF_INSN (ref)
2806 != DF_REF_INSN (link->next->ref))
2807 && REG_NOTES (insn))
2808 {
2809 args.insn = insn;
2810 for_each_rtx (&REG_NOTES (insn), df_rtx_reg_replace, &args);
2811 }
2812 }
2813 }
2814
2815
2816 /* Replace all occurrences of register OLDREG with register NEWREG in
2817 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2818 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2819 routine expects the reg-use and reg-def chains to be valid. */
2820 int
2821 df_reg_replace (struct df *df, bitmap blocks, rtx oldreg, rtx newreg)
2822 {
2823 unsigned int oldregno = REGNO (oldreg);
2824
2825 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2826 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2827 return 1;
2828 }
2829
2830
2831 /* Try replacing the reg within REF with NEWREG. Do not modify
2832 def-use/use-def chains. */
2833 int
2834 df_ref_reg_replace (struct df *df, struct ref *ref, rtx oldreg, rtx newreg)
2835 {
2836 /* Check that insn was deleted by being converted into a NOTE. If
2837 so ignore this insn. */
2838 if (! INSN_P (DF_REF_INSN (ref)))
2839 return 0;
2840
2841 gcc_assert (!oldreg || oldreg == DF_REF_REG (ref));
2842
2843 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2844 return 0;
2845
2846 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2847 return 1;
2848 }
2849
2850
2851 struct ref*
2852 df_bb_def_use_swap (struct df *df, basic_block bb, rtx def_insn, rtx use_insn, unsigned int regno)
2853 {
2854 struct ref *def;
2855 struct ref *use;
2856 int def_uid;
2857 int use_uid;
2858 struct df_link *link;
2859
2860 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2861 if (! def)
2862 return 0;
2863
2864 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2865 if (! use)
2866 return 0;
2867
2868 /* The USE no longer exists. */
2869 use_uid = INSN_UID (use_insn);
2870 df_use_unlink (df, use);
2871 df_ref_unlink (&df->insns[use_uid].uses, use);
2872
2873 /* The DEF requires shifting so remove it from DEF_INSN
2874 and add it to USE_INSN by reusing LINK. */
2875 def_uid = INSN_UID (def_insn);
2876 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2877 link->ref = def;
2878 link->next = df->insns[use_uid].defs;
2879 df->insns[use_uid].defs = link;
2880
2881 #if 0
2882 link = df_ref_unlink (&df->regs[regno].defs, def);
2883 link->ref = def;
2884 link->next = df->regs[regno].defs;
2885 df->insns[regno].defs = link;
2886 #endif
2887
2888 DF_REF_INSN (def) = use_insn;
2889 return def;
2890 }
2891
2892
2893 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2894 insns must be processed by this routine. */
2895 static void
2896 df_insns_modify (struct df *df, basic_block bb, rtx first_insn, rtx last_insn)
2897 {
2898 rtx insn;
2899
2900 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2901 {
2902 unsigned int uid;
2903
2904 /* A non-const call should not have slipped through the net. If
2905 it does, we need to create a new basic block. Ouch. The
2906 same applies for a label. */
2907 gcc_assert ((!CALL_P (insn) || CONST_OR_PURE_CALL_P (insn))
2908 && !LABEL_P (insn));
2909
2910 uid = INSN_UID (insn);
2911
2912 if (uid >= df->insn_size)
2913 df_insn_table_realloc (df, uid);
2914
2915 df_insn_modify (df, bb, insn);
2916
2917 if (insn == last_insn)
2918 break;
2919 }
2920 }
2921
2922
2923 /* Emit PATTERN before INSN within BB. */
2924 rtx
2925 df_pattern_emit_before (struct df *df, rtx pattern, basic_block bb, rtx insn)
2926 {
2927 rtx ret_insn;
2928 rtx prev_insn = PREV_INSN (insn);
2929
2930 /* We should not be inserting before the start of the block. */
2931 gcc_assert (insn != BB_HEAD (bb));
2932 ret_insn = emit_insn_before (pattern, insn);
2933 if (ret_insn == insn)
2934 return ret_insn;
2935
2936 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2937 return ret_insn;
2938 }
2939
2940
2941 /* Emit PATTERN after INSN within BB. */
2942 rtx
2943 df_pattern_emit_after (struct df *df, rtx pattern, basic_block bb, rtx insn)
2944 {
2945 rtx ret_insn;
2946
2947 ret_insn = emit_insn_after (pattern, insn);
2948 if (ret_insn == insn)
2949 return ret_insn;
2950
2951 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2952 return ret_insn;
2953 }
2954
2955
2956 /* Emit jump PATTERN after INSN within BB. */
2957 rtx
2958 df_jump_pattern_emit_after (struct df *df, rtx pattern, basic_block bb, rtx insn)
2959 {
2960 rtx ret_insn;
2961
2962 ret_insn = emit_jump_insn_after (pattern, insn);
2963 if (ret_insn == insn)
2964 return ret_insn;
2965
2966 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2967 return ret_insn;
2968 }
2969
2970
2971 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2972
2973 This function should only be used to move loop invariant insns
2974 out of a loop where it has been proven that the def-use info
2975 will still be valid. */
2976 rtx
2977 df_insn_move_before (struct df *df, basic_block bb, rtx insn, basic_block before_bb, rtx before_insn)
2978 {
2979 struct df_link *link;
2980 unsigned int uid;
2981
2982 if (! bb)
2983 return df_pattern_emit_before (df, insn, before_bb, before_insn);
2984
2985 uid = INSN_UID (insn);
2986
2987 /* Change bb for all df defined and used by this insn. */
2988 for (link = df->insns[uid].defs; link; link = link->next)
2989 DF_REF_BB (link->ref) = before_bb;
2990 for (link = df->insns[uid].uses; link; link = link->next)
2991 DF_REF_BB (link->ref) = before_bb;
2992
2993 /* The lifetimes of the registers used in this insn will be reduced
2994 while the lifetimes of the registers defined in this insn
2995 are likely to be increased. */
2996
2997 /* ???? Perhaps all the insns moved should be stored on a list
2998 which df_analyze removes when it recalculates data flow. */
2999
3000 return emit_insn_before (insn, before_insn);
3001 }
3002 \f
3003 /* Functions to query dataflow information. */
3004
3005
3006 int
3007 df_insn_regno_def_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3008 rtx insn, unsigned int regno)
3009 {
3010 unsigned int uid;
3011 struct df_link *link;
3012
3013 uid = INSN_UID (insn);
3014
3015 for (link = df->insns[uid].defs; link; link = link->next)
3016 {
3017 struct ref *def = link->ref;
3018
3019 if (DF_REF_REGNO (def) == regno)
3020 return 1;
3021 }
3022
3023 return 0;
3024 }
3025
3026 /* Finds the reference corresponding to the definition of REG in INSN.
3027 DF is the dataflow object. */
3028
3029 struct ref *
3030 df_find_def (struct df *df, rtx insn, rtx reg)
3031 {
3032 struct df_link *defs;
3033
3034 for (defs = DF_INSN_DEFS (df, insn); defs; defs = defs->next)
3035 if (rtx_equal_p (DF_REF_REG (defs->ref), reg))
3036 return defs->ref;
3037
3038 return NULL;
3039 }
3040
3041 /* Return 1 if REG is referenced in INSN, zero otherwise. */
3042
3043 int
3044 df_reg_used (struct df *df, rtx insn, rtx reg)
3045 {
3046 struct df_link *uses;
3047
3048 for (uses = DF_INSN_USES (df, insn); uses; uses = uses->next)
3049 if (rtx_equal_p (DF_REF_REG (uses->ref), reg))
3050 return 1;
3051
3052 return 0;
3053 }
3054
3055 static int
3056 df_def_dominates_all_uses_p (struct df *df ATTRIBUTE_UNUSED, struct ref *def)
3057 {
3058 struct df_link *du_link;
3059
3060 /* Follow def-use chain to find all the uses of this def. */
3061 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3062 {
3063 struct ref *use = du_link->ref;
3064 struct df_link *ud_link;
3065
3066 /* Follow use-def chain to check all the defs for this use. */
3067 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3068 if (ud_link->ref != def)
3069 return 0;
3070 }
3071 return 1;
3072 }
3073
3074
3075 int
3076 df_insn_dominates_all_uses_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3077 rtx insn)
3078 {
3079 unsigned int uid;
3080 struct df_link *link;
3081
3082 uid = INSN_UID (insn);
3083
3084 for (link = df->insns[uid].defs; link; link = link->next)
3085 {
3086 struct ref *def = link->ref;
3087
3088 if (! df_def_dominates_all_uses_p (df, def))
3089 return 0;
3090 }
3091
3092 return 1;
3093 }
3094
3095
3096 /* Return nonzero if all DF dominates all the uses within the bitmap
3097 BLOCKS. */
3098 static int
3099 df_def_dominates_uses_p (struct df *df ATTRIBUTE_UNUSED, struct ref *def,
3100 bitmap blocks)
3101 {
3102 struct df_link *du_link;
3103
3104 /* Follow def-use chain to find all the uses of this def. */
3105 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3106 {
3107 struct ref *use = du_link->ref;
3108 struct df_link *ud_link;
3109
3110 /* Only worry about the uses within BLOCKS. For example,
3111 consider a register defined within a loop that is live at the
3112 loop exits. */
3113 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
3114 {
3115 /* Follow use-def chain to check all the defs for this use. */
3116 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3117 if (ud_link->ref != def)
3118 return 0;
3119 }
3120 }
3121 return 1;
3122 }
3123
3124
3125 /* Return nonzero if all the defs of INSN within BB dominates
3126 all the corresponding uses. */
3127 int
3128 df_insn_dominates_uses_p (struct df *df, basic_block bb ATTRIBUTE_UNUSED,
3129 rtx insn, bitmap blocks)
3130 {
3131 unsigned int uid;
3132 struct df_link *link;
3133
3134 uid = INSN_UID (insn);
3135
3136 for (link = df->insns[uid].defs; link; link = link->next)
3137 {
3138 struct ref *def = link->ref;
3139
3140 /* Only consider the defs within BLOCKS. */
3141 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3142 && ! df_def_dominates_uses_p (df, def, blocks))
3143 return 0;
3144 }
3145 return 1;
3146 }
3147
3148
3149 /* Return the basic block that REG referenced in or NULL if referenced
3150 in multiple basic blocks. */
3151 basic_block
3152 df_regno_bb (struct df *df, unsigned int regno)
3153 {
3154 struct df_link *defs = df->regs[regno].defs;
3155 struct df_link *uses = df->regs[regno].uses;
3156 struct ref *def = defs ? defs->ref : 0;
3157 struct ref *use = uses ? uses->ref : 0;
3158 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3159 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3160
3161 /* Compare blocks of first def and last use. ???? FIXME. What if
3162 the reg-def and reg-use lists are not correctly ordered. */
3163 return bb_def == bb_use ? bb_def : 0;
3164 }
3165
3166
3167 /* Return nonzero if REG used in multiple basic blocks. */
3168 int
3169 df_reg_global_p (struct df *df, rtx reg)
3170 {
3171 return df_regno_bb (df, REGNO (reg)) != 0;
3172 }
3173
3174
3175 /* Return total lifetime (in insns) of REG. */
3176 int
3177 df_reg_lifetime (struct df *df, rtx reg)
3178 {
3179 return df->regs[REGNO (reg)].lifetime;
3180 }
3181
3182
3183 /* Return nonzero if REG live at start of BB. */
3184 int
3185 df_bb_reg_live_start_p (struct df *df, basic_block bb, rtx reg)
3186 {
3187 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3188
3189 gcc_assert (bb_info->lr_in);
3190
3191 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3192 }
3193
3194
3195 /* Return nonzero if REG live at end of BB. */
3196 int
3197 df_bb_reg_live_end_p (struct df *df, basic_block bb, rtx reg)
3198 {
3199 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3200
3201 gcc_assert (bb_info->lr_in);
3202
3203 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3204 }
3205
3206
3207 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3208 after life of REG2, or 0, if the lives overlap. */
3209 int
3210 df_bb_regs_lives_compare (struct df *df, basic_block bb, rtx reg1, rtx reg2)
3211 {
3212 unsigned int regno1 = REGNO (reg1);
3213 unsigned int regno2 = REGNO (reg2);
3214 struct ref *def1;
3215 struct ref *use1;
3216 struct ref *def2;
3217 struct ref *use2;
3218
3219
3220 /* The regs must be local to BB. */
3221 gcc_assert (df_regno_bb (df, regno1) == bb
3222 && df_regno_bb (df, regno2) == bb);
3223
3224 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3225 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3226
3227 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3228 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3229 return -1;
3230
3231 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3232 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3233
3234 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3235 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3236 return 1;
3237
3238 return 0;
3239 }
3240
3241
3242 /* Return last use of REGNO within BB. */
3243 struct ref *
3244 df_bb_regno_last_use_find (struct df *df, basic_block bb, unsigned int regno)
3245 {
3246 struct df_link *link;
3247
3248 /* This assumes that the reg-use list is ordered such that for any
3249 BB, the last use is found first. However, since the BBs are not
3250 ordered, the first use in the chain is not necessarily the last
3251 use in the function. */
3252 for (link = df->regs[regno].uses; link; link = link->next)
3253 {
3254 struct ref *use = link->ref;
3255
3256 if (DF_REF_BB (use) == bb)
3257 return use;
3258 }
3259 return 0;
3260 }
3261
3262
3263 /* Return first def of REGNO within BB. */
3264 struct ref *
3265 df_bb_regno_first_def_find (struct df *df, basic_block bb, unsigned int regno)
3266 {
3267 struct df_link *link;
3268
3269 /* This assumes that the reg-def list is ordered such that for any
3270 BB, the first def is found first. However, since the BBs are not
3271 ordered, the first def in the chain is not necessarily the first
3272 def in the function. */
3273 for (link = df->regs[regno].defs; link; link = link->next)
3274 {
3275 struct ref *def = link->ref;
3276
3277 if (DF_REF_BB (def) == bb)
3278 return def;
3279 }
3280 return 0;
3281 }
3282
3283 /* Return last def of REGNO within BB. */
3284 struct ref *
3285 df_bb_regno_last_def_find (struct df *df, basic_block bb, unsigned int regno)
3286 {
3287 struct df_link *link;
3288 struct ref *last_def = NULL;
3289 int in_bb = 0;
3290
3291 /* This assumes that the reg-def list is ordered such that for any
3292 BB, the first def is found first. However, since the BBs are not
3293 ordered, the first def in the chain is not necessarily the first
3294 def in the function. */
3295 for (link = df->regs[regno].defs; link; link = link->next)
3296 {
3297 struct ref *def = link->ref;
3298 /* The first time in the desired block. */
3299 if (DF_REF_BB (def) == bb)
3300 in_bb = 1;
3301 /* The last def in the desired block. */
3302 else if (in_bb)
3303 return last_def;
3304 last_def = def;
3305 }
3306 return last_def;
3307 }
3308
3309 /* Return first use of REGNO inside INSN within BB. */
3310 static struct ref *
3311 df_bb_insn_regno_last_use_find (struct df *df,
3312 basic_block bb ATTRIBUTE_UNUSED, rtx insn,
3313 unsigned int regno)
3314 {
3315 unsigned int uid;
3316 struct df_link *link;
3317
3318 uid = INSN_UID (insn);
3319
3320 for (link = df->insns[uid].uses; link; link = link->next)
3321 {
3322 struct ref *use = link->ref;
3323
3324 if (DF_REF_REGNO (use) == regno)
3325 return use;
3326 }
3327
3328 return 0;
3329 }
3330
3331
3332 /* Return first def of REGNO inside INSN within BB. */
3333 static struct ref *
3334 df_bb_insn_regno_first_def_find (struct df *df,
3335 basic_block bb ATTRIBUTE_UNUSED, rtx insn,
3336 unsigned int regno)
3337 {
3338 unsigned int uid;
3339 struct df_link *link;
3340
3341 uid = INSN_UID (insn);
3342
3343 for (link = df->insns[uid].defs; link; link = link->next)
3344 {
3345 struct ref *def = link->ref;
3346
3347 if (DF_REF_REGNO (def) == regno)
3348 return def;
3349 }
3350
3351 return 0;
3352 }
3353
3354
3355 /* Return insn using REG if the BB contains only a single
3356 use and def of REG. */
3357 rtx
3358 df_bb_single_def_use_insn_find (struct df *df, basic_block bb, rtx insn, rtx reg)
3359 {
3360 struct ref *def;
3361 struct ref *use;
3362 struct df_link *du_link;
3363
3364 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3365
3366 gcc_assert (def);
3367
3368 du_link = DF_REF_CHAIN (def);
3369
3370 if (! du_link)
3371 return NULL_RTX;
3372
3373 use = du_link->ref;
3374
3375 /* Check if def is dead. */
3376 if (! use)
3377 return NULL_RTX;
3378
3379 /* Check for multiple uses. */
3380 if (du_link->next)
3381 return NULL_RTX;
3382
3383 return DF_REF_INSN (use);
3384 }
3385 \f
3386 /* Functions for debugging/dumping dataflow information. */
3387
3388
3389 /* Dump a def-use or use-def chain for REF to FILE. */
3390 static void
3391 df_chain_dump (struct df_link *link, FILE *file)
3392 {
3393 fprintf (file, "{ ");
3394 for (; link; link = link->next)
3395 {
3396 fprintf (file, "%c%d ",
3397 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3398 DF_REF_ID (link->ref));
3399 }
3400 fprintf (file, "}");
3401 }
3402
3403
3404 /* Dump a chain of refs with the associated regno. */
3405 static void
3406 df_chain_dump_regno (struct df_link *link, FILE *file)
3407 {
3408 fprintf (file, "{ ");
3409 for (; link; link = link->next)
3410 {
3411 fprintf (file, "%c%d(%d) ",
3412 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3413 DF_REF_ID (link->ref),
3414 DF_REF_REGNO (link->ref));
3415 }
3416 fprintf (file, "}");
3417 }
3418
3419
3420 /* Dump dataflow info. */
3421 void
3422 df_dump (struct df *df, int flags, FILE *file)
3423 {
3424 unsigned int j;
3425 basic_block bb;
3426
3427 if (! df || ! file)
3428 return;
3429
3430 fprintf (file, "\nDataflow summary:\n");
3431 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3432 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3433
3434 if (flags & DF_RD)
3435 {
3436 basic_block bb;
3437
3438 fprintf (file, "Reaching defs:\n");
3439 FOR_EACH_BB (bb)
3440 {
3441 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3442
3443 if (! bb_info->rd_in)
3444 continue;
3445
3446 fprintf (file, "bb %d in \t", bb->index);
3447 dump_bitmap (file, bb_info->rd_in);
3448 fprintf (file, "bb %d gen \t", bb->index);
3449 dump_bitmap (file, bb_info->rd_gen);
3450 fprintf (file, "bb %d kill\t", bb->index);
3451 dump_bitmap (file, bb_info->rd_kill);
3452 fprintf (file, "bb %d out \t", bb->index);
3453 dump_bitmap (file, bb_info->rd_out);
3454 }
3455 }
3456
3457 if (flags & DF_UD_CHAIN)
3458 {
3459 fprintf (file, "Use-def chains:\n");
3460 for (j = 0; j < df->n_defs; j++)
3461 {
3462 if (df->defs[j])
3463 {
3464 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3465 j, DF_REF_BBNO (df->defs[j]),
3466 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3467 DF_REF_INSN_UID (df->defs[j]),
3468 DF_REF_REGNO (df->defs[j]));
3469 if (df->defs[j]->flags & DF_REF_READ_WRITE)
3470 fprintf (file, "read/write ");
3471 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3472 fprintf (file, "\n");
3473 }
3474 }
3475 }
3476
3477 if (flags & DF_RU)
3478 {
3479 fprintf (file, "Reaching uses:\n");
3480 FOR_EACH_BB (bb)
3481 {
3482 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3483
3484 if (! bb_info->ru_in)
3485 continue;
3486
3487 fprintf (file, "bb %d in \t", bb->index);
3488 dump_bitmap (file, bb_info->ru_in);
3489 fprintf (file, "bb %d gen \t", bb->index);
3490 dump_bitmap (file, bb_info->ru_gen);
3491 fprintf (file, "bb %d kill\t", bb->index);
3492 dump_bitmap (file, bb_info->ru_kill);
3493 fprintf (file, "bb %d out \t", bb->index);
3494 dump_bitmap (file, bb_info->ru_out);
3495 }
3496 }
3497
3498 if (flags & DF_DU_CHAIN)
3499 {
3500 fprintf (file, "Def-use chains:\n");
3501 for (j = 0; j < df->n_uses; j++)
3502 {
3503 if (df->uses[j])
3504 {
3505 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3506 j, DF_REF_BBNO (df->uses[j]),
3507 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3508 DF_REF_INSN_UID (df->uses[j]),
3509 DF_REF_REGNO (df->uses[j]));
3510 if (df->uses[j]->flags & DF_REF_READ_WRITE)
3511 fprintf (file, "read/write ");
3512 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3513 fprintf (file, "\n");
3514 }
3515 }
3516 }
3517
3518 if (flags & DF_LR)
3519 {
3520 fprintf (file, "Live regs:\n");
3521 FOR_EACH_BB (bb)
3522 {
3523 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3524
3525 if (! bb_info->lr_in)
3526 continue;
3527
3528 fprintf (file, "bb %d in \t", bb->index);
3529 dump_bitmap (file, bb_info->lr_in);
3530 fprintf (file, "bb %d use \t", bb->index);
3531 dump_bitmap (file, bb_info->lr_use);
3532 fprintf (file, "bb %d def \t", bb->index);
3533 dump_bitmap (file, bb_info->lr_def);
3534 fprintf (file, "bb %d out \t", bb->index);
3535 dump_bitmap (file, bb_info->lr_out);
3536 }
3537 }
3538
3539 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3540 {
3541 struct reg_info *reg_info = df->regs;
3542
3543 fprintf (file, "Register info:\n");
3544 for (j = 0; j < df->n_regs; j++)
3545 {
3546 if (((flags & DF_REG_INFO)
3547 && (reg_info[j].n_uses || reg_info[j].n_defs))
3548 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3549 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3550 {
3551 fprintf (file, "reg %d", j);
3552 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3553 {
3554 basic_block bb = df_regno_bb (df, j);
3555
3556 if (bb)
3557 fprintf (file, " bb %d", bb->index);
3558 else
3559 fprintf (file, " bb ?");
3560 }
3561 if (flags & DF_REG_INFO)
3562 {
3563 fprintf (file, " life %d", reg_info[j].lifetime);
3564 }
3565
3566 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3567 {
3568 fprintf (file, " defs ");
3569 if (flags & DF_REG_INFO)
3570 fprintf (file, "%d ", reg_info[j].n_defs);
3571 if (flags & DF_RD_CHAIN)
3572 df_chain_dump (reg_info[j].defs, file);
3573 }
3574
3575 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3576 {
3577 fprintf (file, " uses ");
3578 if (flags & DF_REG_INFO)
3579 fprintf (file, "%d ", reg_info[j].n_uses);
3580 if (flags & DF_RU_CHAIN)
3581 df_chain_dump (reg_info[j].uses, file);
3582 }
3583
3584 fprintf (file, "\n");
3585 }
3586 }
3587 }
3588 fprintf (file, "\n");
3589 }
3590
3591
3592 void
3593 df_insn_debug (struct df *df, rtx insn, FILE *file)
3594 {
3595 unsigned int uid;
3596 int bbi;
3597
3598 uid = INSN_UID (insn);
3599 if (uid >= df->insn_size)
3600 return;
3601
3602 if (df->insns[uid].defs)
3603 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3604 else if (df->insns[uid].uses)
3605 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3606 else
3607 bbi = -1;
3608
3609 fprintf (file, "insn %d bb %d luid %d defs ",
3610 uid, bbi, DF_INSN_LUID (df, insn));
3611 df_chain_dump (df->insns[uid].defs, file);
3612 fprintf (file, " uses ");
3613 df_chain_dump (df->insns[uid].uses, file);
3614 fprintf (file, "\n");
3615 }
3616
3617
3618 void
3619 df_insn_debug_regno (struct df *df, rtx insn, FILE *file)
3620 {
3621 unsigned int uid;
3622 int bbi;
3623
3624 uid = INSN_UID (insn);
3625 if (uid >= df->insn_size)
3626 return;
3627
3628 if (df->insns[uid].defs)
3629 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3630 else if (df->insns[uid].uses)
3631 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3632 else
3633 bbi = -1;
3634
3635 fprintf (file, "insn %d bb %d luid %d defs ",
3636 uid, bbi, DF_INSN_LUID (df, insn));
3637 df_chain_dump_regno (df->insns[uid].defs, file);
3638 fprintf (file, " uses ");
3639 df_chain_dump_regno (df->insns[uid].uses, file);
3640 fprintf (file, "\n");
3641 }
3642
3643
3644 static void
3645 df_regno_debug (struct df *df, unsigned int regno, FILE *file)
3646 {
3647 if (regno >= df->reg_size)
3648 return;
3649
3650 fprintf (file, "reg %d life %d defs ",
3651 regno, df->regs[regno].lifetime);
3652 df_chain_dump (df->regs[regno].defs, file);
3653 fprintf (file, " uses ");
3654 df_chain_dump (df->regs[regno].uses, file);
3655 fprintf (file, "\n");
3656 }
3657
3658
3659 static void
3660 df_ref_debug (struct df *df, struct ref *ref, FILE *file)
3661 {
3662 fprintf (file, "%c%d ",
3663 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3664 DF_REF_ID (ref));
3665 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3666 DF_REF_REGNO (ref),
3667 DF_REF_BBNO (ref),
3668 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3669 INSN_UID (DF_REF_INSN (ref)));
3670 df_chain_dump (DF_REF_CHAIN (ref), file);
3671 fprintf (file, "\n");
3672 }
3673 \f
3674 /* Functions for debugging from GDB. */
3675
3676 void
3677 debug_df_insn (rtx insn)
3678 {
3679 df_insn_debug (ddf, insn, stderr);
3680 debug_rtx (insn);
3681 }
3682
3683
3684 void
3685 debug_df_reg (rtx reg)
3686 {
3687 df_regno_debug (ddf, REGNO (reg), stderr);
3688 }
3689
3690
3691 void
3692 debug_df_regno (unsigned int regno)
3693 {
3694 df_regno_debug (ddf, regno, stderr);
3695 }
3696
3697
3698 void
3699 debug_df_ref (struct ref *ref)
3700 {
3701 df_ref_debug (ddf, ref, stderr);
3702 }
3703
3704
3705 void
3706 debug_df_defno (unsigned int defno)
3707 {
3708 df_ref_debug (ddf, ddf->defs[defno], stderr);
3709 }
3710
3711
3712 void
3713 debug_df_useno (unsigned int defno)
3714 {
3715 df_ref_debug (ddf, ddf->uses[defno], stderr);
3716 }
3717
3718
3719 void
3720 debug_df_chain (struct df_link *link)
3721 {
3722 df_chain_dump (link, stderr);
3723 fputc ('\n', stderr);
3724 }
3725 \f
3726
3727 static void
3728 dataflow_set_a_op_b (enum set_representation repr,
3729 enum df_confluence_op op,
3730 void *rslt, void *op1, void *op2)
3731 {
3732 switch (repr)
3733 {
3734 case SR_SBITMAP:
3735 switch (op)
3736 {
3737 case DF_UNION:
3738 sbitmap_a_or_b (rslt, op1, op2);
3739 break;
3740
3741 case DF_INTERSECTION:
3742 sbitmap_a_and_b (rslt, op1, op2);
3743 break;
3744
3745 default:
3746 gcc_unreachable ();
3747 }
3748 break;
3749
3750 case SR_BITMAP:
3751 switch (op)
3752 {
3753 case DF_UNION:
3754 bitmap_a_or_b (rslt, op1, op2);
3755 break;
3756
3757 case DF_INTERSECTION:
3758 bitmap_a_and_b (rslt, op1, op2);
3759 break;
3760
3761 default:
3762 gcc_unreachable ();
3763 }
3764 break;
3765
3766 default:
3767 gcc_unreachable ();
3768 }
3769 }
3770
3771 static void
3772 dataflow_set_copy (enum set_representation repr, void *dest, void *src)
3773 {
3774 switch (repr)
3775 {
3776 case SR_SBITMAP:
3777 sbitmap_copy (dest, src);
3778 break;
3779
3780 case SR_BITMAP:
3781 bitmap_copy (dest, src);
3782 break;
3783
3784 default:
3785 gcc_unreachable ();
3786 }
3787 }
3788
3789 /* Hybrid search algorithm from "Implementation Techniques for
3790 Efficient Data-Flow Analysis of Large Programs". */
3791
3792 static void
3793 hybrid_search (basic_block bb, struct dataflow *dataflow,
3794 sbitmap visited, sbitmap pending, sbitmap considered)
3795 {
3796 int changed;
3797 int i = bb->index;
3798 edge e;
3799 edge_iterator ei;
3800
3801 SET_BIT (visited, bb->index);
3802 gcc_assert (TEST_BIT (pending, bb->index));
3803 RESET_BIT (pending, i);
3804
3805 #define HS(E_ANTI, E_ANTI_BB, E_ANTI_START_BB, IN_SET, \
3806 E, E_BB, E_START_BB, OUT_SET) \
3807 do \
3808 { \
3809 /* Calculate <conf_op> of predecessor_outs. */ \
3810 bitmap_zero (IN_SET[i]); \
3811 FOR_EACH_EDGE (e, ei, bb->E_ANTI) \
3812 { \
3813 if (e->E_ANTI_BB == E_ANTI_START_BB) \
3814 continue; \
3815 if (!TEST_BIT (considered, e->E_ANTI_BB->index)) \
3816 continue; \
3817 \
3818 dataflow_set_a_op_b (dataflow->repr, dataflow->conf_op, \
3819 IN_SET[i], IN_SET[i], \
3820 OUT_SET[e->E_ANTI_BB->index]); \
3821 } \
3822 \
3823 (*dataflow->transfun)(i, &changed, \
3824 dataflow->in[i], dataflow->out[i], \
3825 dataflow->gen[i], dataflow->kill[i], \
3826 dataflow->data); \
3827 \
3828 if (!changed) \
3829 break; \
3830 \
3831 FOR_EACH_EDGE (e, ei, bb->E) \
3832 { \
3833 if (e->E_BB == E_START_BB || e->E_BB->index == i) \
3834 continue; \
3835 \
3836 if (!TEST_BIT (considered, e->E_BB->index)) \
3837 continue; \
3838 \
3839 SET_BIT (pending, e->E_BB->index); \
3840 } \
3841 \
3842 FOR_EACH_EDGE (e, ei, bb->E) \
3843 { \
3844 if (e->E_BB == E_START_BB || e->E_BB->index == i) \
3845 continue; \
3846 \
3847 if (!TEST_BIT (considered, e->E_BB->index)) \
3848 continue; \
3849 \
3850 if (!TEST_BIT (visited, e->E_BB->index)) \
3851 hybrid_search (e->E_BB, dataflow, visited, pending, considered); \
3852 } \
3853 } while (0)
3854
3855 if (dataflow->dir == DF_FORWARD)
3856 HS (preds, src, ENTRY_BLOCK_PTR, dataflow->in,
3857 succs, dest, EXIT_BLOCK_PTR, dataflow->out);
3858 else
3859 HS (succs, dest, EXIT_BLOCK_PTR, dataflow->out,
3860 preds, src, ENTRY_BLOCK_PTR, dataflow->in);
3861 }
3862
3863 /* This function will perform iterative bitvector dataflow described by
3864 DATAFLOW, producing the in and out sets. Only the part of the cfg
3865 induced by blocks in DATAFLOW->order is taken into account.
3866
3867 For forward problems, you probably want to pass in a mapping of
3868 block number to rc_order (like df->inverse_rc_map). */
3869
3870 void
3871 iterative_dataflow (struct dataflow *dataflow)
3872 {
3873 unsigned i, idx;
3874 sbitmap visited, pending, considered;
3875
3876 pending = sbitmap_alloc (last_basic_block);
3877 visited = sbitmap_alloc (last_basic_block);
3878 considered = sbitmap_alloc (last_basic_block);
3879 sbitmap_zero (pending);
3880 sbitmap_zero (visited);
3881 sbitmap_zero (considered);
3882
3883 for (i = 0; i < dataflow->n_blocks; i++)
3884 {
3885 idx = dataflow->order[i];
3886 SET_BIT (pending, idx);
3887 SET_BIT (considered, idx);
3888 if (dataflow->dir == DF_FORWARD)
3889 dataflow_set_copy (dataflow->repr,
3890 dataflow->out[idx], dataflow->gen[idx]);
3891 else
3892 dataflow_set_copy (dataflow->repr,
3893 dataflow->in[idx], dataflow->gen[idx]);
3894 };
3895
3896 while (1)
3897 {
3898 for (i = 0; i < dataflow->n_blocks; i++)
3899 {
3900 idx = dataflow->order[i];
3901
3902 if (TEST_BIT (pending, idx) && !TEST_BIT (visited, idx))
3903 hybrid_search (BASIC_BLOCK (idx), dataflow,
3904 visited, pending, considered);
3905 }
3906
3907 if (sbitmap_first_set_bit (pending) == -1)
3908 break;
3909
3910 sbitmap_zero (visited);
3911 }
3912
3913 sbitmap_free (pending);
3914 sbitmap_free (visited);
3915 sbitmap_free (considered);
3916 }