alias.c (mark_constant_function): Use INSN_P.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
26
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
30
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
34
35 ** find_basic_blocks **
36
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
41
42 find_basic_blocks also finds any unreachable loops and deletes them.
43
44 ** life_analysis **
45
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
49
50 ** live-register info **
51
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
59
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
66
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
77
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
84
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
88
89 ** Other actions of life_analysis **
90
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
93
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
96
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
102
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
105
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
112
113 /* TODO:
114
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
120 */
121 \f
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
130 #include "regs.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
139 #include "ssa.h"
140
141 #include "obstack.h"
142 #include "splay-tree.h"
143
144 #define obstack_chunk_alloc xmalloc
145 #define obstack_chunk_free free
146
147
148 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
149 the stack pointer does not matter. The value is tested only in
150 functions that have frame pointers.
151 No definition is equivalent to always zero. */
152 #ifndef EXIT_IGNORE_STACK
153 #define EXIT_IGNORE_STACK 0
154 #endif
155
156 #ifndef HAVE_epilogue
157 #define HAVE_epilogue 0
158 #endif
159 #ifndef HAVE_prologue
160 #define HAVE_prologue 0
161 #endif
162 #ifndef HAVE_sibcall_epilogue
163 #define HAVE_sibcall_epilogue 0
164 #endif
165
166 /* The contents of the current function definition are allocated
167 in this obstack, and all are freed at the end of the function.
168 For top-level functions, this is temporary_obstack.
169 Separate obstacks are made for nested functions. */
170
171 extern struct obstack *function_obstack;
172
173 /* Number of basic blocks in the current function. */
174
175 int n_basic_blocks;
176
177 /* Number of edges in the current function. */
178
179 int n_edges;
180
181 /* The basic block array. */
182
183 varray_type basic_block_info;
184
185 /* The special entry and exit blocks. */
186
187 struct basic_block_def entry_exit_blocks[2]
188 = {{NULL, /* head */
189 NULL, /* end */
190 NULL, /* pred */
191 NULL, /* succ */
192 NULL, /* local_set */
193 NULL, /* global_live_at_start */
194 NULL, /* global_live_at_end */
195 NULL, /* aux */
196 ENTRY_BLOCK, /* index */
197 0, /* loop_depth */
198 -1, -1, /* eh_beg, eh_end */
199 0 /* count */
200 },
201 {
202 NULL, /* head */
203 NULL, /* end */
204 NULL, /* pred */
205 NULL, /* succ */
206 NULL, /* local_set */
207 NULL, /* global_live_at_start */
208 NULL, /* global_live_at_end */
209 NULL, /* aux */
210 EXIT_BLOCK, /* index */
211 0, /* loop_depth */
212 -1, -1, /* eh_beg, eh_end */
213 0 /* count */
214 }
215 };
216
217 /* Nonzero if the second flow pass has completed. */
218 int flow2_completed;
219
220 /* Maximum register number used in this function, plus one. */
221
222 int max_regno;
223
224 /* Indexed by n, giving various register information */
225
226 varray_type reg_n_info;
227
228 /* Size of a regset for the current function,
229 in (1) bytes and (2) elements. */
230
231 int regset_bytes;
232 int regset_size;
233
234 /* Regset of regs live when calls to `setjmp'-like functions happen. */
235 /* ??? Does this exist only for the setjmp-clobbered warning message? */
236
237 regset regs_live_at_setjmp;
238
239 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
240 that have to go in the same hard reg.
241 The first two regs in the list are a pair, and the next two
242 are another pair, etc. */
243 rtx regs_may_share;
244
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
247
248 static HARD_REG_SET elim_reg_set;
249
250 /* The basic block structure for every insn, indexed by uid. */
251
252 varray_type basic_block_for_insn;
253
254 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
255 /* ??? Should probably be using LABEL_NUSES instead. It would take a
256 bit of surgery to be able to use or co-opt the routines in jump. */
257
258 static rtx label_value_list;
259 static rtx tail_recursion_label_list;
260
261 /* Holds information for tracking conditional register life information. */
262 struct reg_cond_life_info
263 {
264 /* An EXPR_LIST of conditions under which a register is dead. */
265 rtx condition;
266
267 /* ??? Could store mask of bytes that are dead, so that we could finally
268 track lifetimes of multi-word registers accessed via subregs. */
269 };
270
271 /* For use in communicating between propagate_block and its subroutines.
272 Holds all information needed to compute life and def-use information. */
273
274 struct propagate_block_info
275 {
276 /* The basic block we're considering. */
277 basic_block bb;
278
279 /* Bit N is set if register N is conditionally or unconditionally live. */
280 regset reg_live;
281
282 /* Bit N is set if register N is set this insn. */
283 regset new_set;
284
285 /* Element N is the next insn that uses (hard or pseudo) register N
286 within the current basic block; or zero, if there is no such insn. */
287 rtx *reg_next_use;
288
289 /* Contains a list of all the MEMs we are tracking for dead store
290 elimination. */
291 rtx mem_set_list;
292
293 /* If non-null, record the set of registers set in the basic block. */
294 regset local_set;
295
296 #ifdef HAVE_conditional_execution
297 /* Indexed by register number, holds a reg_cond_life_info for each
298 register that is not unconditionally live or dead. */
299 splay_tree reg_cond_dead;
300
301 /* Bit N is set if register N is in an expression in reg_cond_dead. */
302 regset reg_cond_reg;
303 #endif
304
305 /* Non-zero if the value of CC0 is live. */
306 int cc0_live;
307
308 /* Flags controling the set of information propagate_block collects. */
309 int flags;
310 };
311
312 /* Store the data structures necessary for depth-first search. */
313 struct depth_first_search_dsS {
314 /* stack for backtracking during the algorithm */
315 basic_block *stack;
316
317 /* number of edges in the stack. That is, positions 0, ..., sp-1
318 have edges. */
319 unsigned int sp;
320
321 /* record of basic blocks already seen by depth-first search */
322 sbitmap visited_blocks;
323 };
324 typedef struct depth_first_search_dsS *depth_first_search_ds;
325
326 /* Forward declarations */
327 static int count_basic_blocks PARAMS ((rtx));
328 static void find_basic_blocks_1 PARAMS ((rtx));
329 static rtx find_label_refs PARAMS ((rtx, rtx));
330 static void clear_edges PARAMS ((void));
331 static void make_edges PARAMS ((rtx));
332 static void make_label_edge PARAMS ((sbitmap *, basic_block,
333 rtx, int));
334 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
335 basic_block, rtx, int));
336 static void mark_critical_edges PARAMS ((void));
337 static void move_stray_eh_region_notes PARAMS ((void));
338 static void record_active_eh_regions PARAMS ((rtx));
339
340 static void commit_one_edge_insertion PARAMS ((edge));
341
342 static void delete_unreachable_blocks PARAMS ((void));
343 static void delete_eh_regions PARAMS ((void));
344 static int can_delete_note_p PARAMS ((rtx));
345 static void expunge_block PARAMS ((basic_block));
346 static int can_delete_label_p PARAMS ((rtx));
347 static int tail_recursion_label_p PARAMS ((rtx));
348 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
349 basic_block));
350 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
351 basic_block));
352 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
353 static void try_merge_blocks PARAMS ((void));
354 static void tidy_fallthru_edges PARAMS ((void));
355 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
356 static void verify_wide_reg PARAMS ((int, rtx, rtx));
357 static void verify_local_live_at_start PARAMS ((regset, basic_block));
358 static int set_noop_p PARAMS ((rtx));
359 static int noop_move_p PARAMS ((rtx));
360 static void delete_noop_moves PARAMS ((rtx));
361 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
362 static void notice_stack_pointer_modification PARAMS ((rtx));
363 static void mark_reg PARAMS ((rtx, void *));
364 static void mark_regs_live_at_end PARAMS ((regset));
365 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
366 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
367 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
368 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
369 static int insn_dead_p PARAMS ((struct propagate_block_info *,
370 rtx, int, rtx));
371 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
372 rtx, rtx));
373 static void mark_set_regs PARAMS ((struct propagate_block_info *,
374 rtx, rtx));
375 static void mark_set_1 PARAMS ((struct propagate_block_info *,
376 enum rtx_code, rtx, rtx,
377 rtx, int));
378 #ifdef HAVE_conditional_execution
379 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
380 int, rtx));
381 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
382 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
383 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
384 int));
385 static rtx ior_reg_cond PARAMS ((rtx, rtx));
386 static rtx not_reg_cond PARAMS ((rtx));
387 static rtx nand_reg_cond PARAMS ((rtx, rtx));
388 #endif
389 #ifdef AUTO_INC_DEC
390 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
391 rtx, rtx, rtx, rtx, rtx));
392 static void find_auto_inc PARAMS ((struct propagate_block_info *,
393 rtx, rtx));
394 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
395 rtx));
396 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
397 #endif
398 static void mark_used_reg PARAMS ((struct propagate_block_info *,
399 rtx, rtx, rtx));
400 static void mark_used_regs PARAMS ((struct propagate_block_info *,
401 rtx, rtx, rtx));
402 void dump_flow_info PARAMS ((FILE *));
403 void debug_flow_info PARAMS ((void));
404 static void dump_edge_info PARAMS ((FILE *, edge, int));
405
406 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
407 rtx));
408 static void remove_fake_successors PARAMS ((basic_block));
409 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
410 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
411 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
412 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
413 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
414 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
415 static int flow_depth_first_order_compute PARAMS ((int *, int *));
416 static void flow_dfs_compute_reverse_init
417 PARAMS ((depth_first_search_ds));
418 static void flow_dfs_compute_reverse_add_bb
419 PARAMS ((depth_first_search_ds, basic_block));
420 static basic_block flow_dfs_compute_reverse_execute
421 PARAMS ((depth_first_search_ds));
422 static void flow_dfs_compute_reverse_finish
423 PARAMS ((depth_first_search_ds));
424 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
425 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
426 static void flow_loops_tree_build PARAMS ((struct loops *));
427 static int flow_loop_level_compute PARAMS ((struct loop *, int));
428 static int flow_loops_level_compute PARAMS ((struct loops *));
429 \f
430 /* Find basic blocks of the current function.
431 F is the first insn of the function and NREGS the number of register
432 numbers in use. */
433
434 void
435 find_basic_blocks (f, nregs, file)
436 rtx f;
437 int nregs ATTRIBUTE_UNUSED;
438 FILE *file ATTRIBUTE_UNUSED;
439 {
440 int max_uid;
441
442 /* Flush out existing data. */
443 if (basic_block_info != NULL)
444 {
445 int i;
446
447 clear_edges ();
448
449 /* Clear bb->aux on all extant basic blocks. We'll use this as a
450 tag for reuse during create_basic_block, just in case some pass
451 copies around basic block notes improperly. */
452 for (i = 0; i < n_basic_blocks; ++i)
453 BASIC_BLOCK (i)->aux = NULL;
454
455 VARRAY_FREE (basic_block_info);
456 }
457
458 n_basic_blocks = count_basic_blocks (f);
459
460 /* Size the basic block table. The actual structures will be allocated
461 by find_basic_blocks_1, since we want to keep the structure pointers
462 stable across calls to find_basic_blocks. */
463 /* ??? This whole issue would be much simpler if we called find_basic_blocks
464 exactly once, and thereafter we don't have a single long chain of
465 instructions at all until close to the end of compilation when we
466 actually lay them out. */
467
468 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
469
470 find_basic_blocks_1 (f);
471
472 /* Record the block to which an insn belongs. */
473 /* ??? This should be done another way, by which (perhaps) a label is
474 tagged directly with the basic block that it starts. It is used for
475 more than that currently, but IMO that is the only valid use. */
476
477 max_uid = get_max_uid ();
478 #ifdef AUTO_INC_DEC
479 /* Leave space for insns life_analysis makes in some cases for auto-inc.
480 These cases are rare, so we don't need too much space. */
481 max_uid += max_uid / 10;
482 #endif
483
484 compute_bb_for_insn (max_uid);
485
486 /* Discover the edges of our cfg. */
487 record_active_eh_regions (f);
488 make_edges (label_value_list);
489
490 /* Do very simple cleanup now, for the benefit of code that runs between
491 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
492 tidy_fallthru_edges ();
493
494 mark_critical_edges ();
495
496 #ifdef ENABLE_CHECKING
497 verify_flow_info ();
498 #endif
499 }
500
501 /* Count the basic blocks of the function. */
502
503 static int
504 count_basic_blocks (f)
505 rtx f;
506 {
507 register rtx insn;
508 register RTX_CODE prev_code;
509 register int count = 0;
510 int eh_region = 0;
511 int call_had_abnormal_edge = 0;
512
513 prev_code = JUMP_INSN;
514 for (insn = f; insn; insn = NEXT_INSN (insn))
515 {
516 register RTX_CODE code = GET_CODE (insn);
517
518 if (code == CODE_LABEL
519 || (GET_RTX_CLASS (code) == 'i'
520 && (prev_code == JUMP_INSN
521 || prev_code == BARRIER
522 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
523 count++;
524
525 /* Record whether this call created an edge. */
526 if (code == CALL_INSN)
527 {
528 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
529 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
530
531 call_had_abnormal_edge = 0;
532
533 /* If there is an EH region or rethrow, we have an edge. */
534 if ((eh_region && region > 0)
535 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
536 call_had_abnormal_edge = 1;
537 else if (nonlocal_goto_handler_labels && region >= 0)
538 /* If there is a nonlocal goto label and the specified
539 region number isn't -1, we have an edge. (0 means
540 no throw, but might have a nonlocal goto). */
541 call_had_abnormal_edge = 1;
542 }
543
544 if (code != NOTE)
545 prev_code = code;
546 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
547 ++eh_region;
548 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
549 --eh_region;
550 }
551
552 /* The rest of the compiler works a bit smoother when we don't have to
553 check for the edge case of do-nothing functions with no basic blocks. */
554 if (count == 0)
555 {
556 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
557 count = 1;
558 }
559
560 return count;
561 }
562
563 /* Scan a list of insns for labels referred to other than by jumps.
564 This is used to scan the alternatives of a call placeholder. */
565 static rtx
566 find_label_refs (f, lvl)
567 rtx f;
568 rtx lvl;
569 {
570 rtx insn;
571
572 for (insn = f; insn; insn = NEXT_INSN (insn))
573 if (INSN_P (insn))
574 {
575 rtx note;
576
577 /* Make a list of all labels referred to other than by jumps
578 (which just don't have the REG_LABEL notes).
579
580 Make a special exception for labels followed by an ADDR*VEC,
581 as this would be a part of the tablejump setup code.
582
583 Make a special exception for the eh_return_stub_label, which
584 we know isn't part of any otherwise visible control flow. */
585
586 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
587 if (REG_NOTE_KIND (note) == REG_LABEL)
588 {
589 rtx lab = XEXP (note, 0), next;
590
591 if (lab == eh_return_stub_label)
592 ;
593 else if ((next = next_nonnote_insn (lab)) != NULL
594 && GET_CODE (next) == JUMP_INSN
595 && (GET_CODE (PATTERN (next)) == ADDR_VEC
596 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
597 ;
598 else if (GET_CODE (lab) == NOTE)
599 ;
600 else
601 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
602 }
603 }
604
605 return lvl;
606 }
607
608 /* Find all basic blocks of the function whose first insn is F.
609
610 Collect and return a list of labels whose addresses are taken. This
611 will be used in make_edges for use with computed gotos. */
612
613 static void
614 find_basic_blocks_1 (f)
615 rtx f;
616 {
617 register rtx insn, next;
618 int i = 0;
619 rtx bb_note = NULL_RTX;
620 rtx eh_list = NULL_RTX;
621 rtx lvl = NULL_RTX;
622 rtx trll = NULL_RTX;
623 rtx head = NULL_RTX;
624 rtx end = NULL_RTX;
625
626 /* We process the instructions in a slightly different way than we did
627 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
628 closed out the previous block, so that it gets attached at the proper
629 place. Since this form should be equivalent to the previous,
630 count_basic_blocks continues to use the old form as a check. */
631
632 for (insn = f; insn; insn = next)
633 {
634 enum rtx_code code = GET_CODE (insn);
635
636 next = NEXT_INSN (insn);
637
638 switch (code)
639 {
640 case NOTE:
641 {
642 int kind = NOTE_LINE_NUMBER (insn);
643
644 /* Keep a LIFO list of the currently active exception notes. */
645 if (kind == NOTE_INSN_EH_REGION_BEG)
646 eh_list = alloc_INSN_LIST (insn, eh_list);
647 else if (kind == NOTE_INSN_EH_REGION_END)
648 {
649 rtx t = eh_list;
650
651 eh_list = XEXP (eh_list, 1);
652 free_INSN_LIST_node (t);
653 }
654
655 /* Look for basic block notes with which to keep the
656 basic_block_info pointers stable. Unthread the note now;
657 we'll put it back at the right place in create_basic_block.
658 Or not at all if we've already found a note in this block. */
659 else if (kind == NOTE_INSN_BASIC_BLOCK)
660 {
661 if (bb_note == NULL_RTX)
662 bb_note = insn;
663 else
664 next = flow_delete_insn (insn);
665 }
666 break;
667 }
668
669 case CODE_LABEL:
670 /* A basic block starts at a label. If we've closed one off due
671 to a barrier or some such, no need to do it again. */
672 if (head != NULL_RTX)
673 {
674 /* While we now have edge lists with which other portions of
675 the compiler might determine a call ending a basic block
676 does not imply an abnormal edge, it will be a bit before
677 everything can be updated. So continue to emit a noop at
678 the end of such a block. */
679 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
680 {
681 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
682 end = emit_insn_after (nop, end);
683 }
684
685 create_basic_block (i++, head, end, bb_note);
686 bb_note = NULL_RTX;
687 }
688
689 head = end = insn;
690 break;
691
692 case JUMP_INSN:
693 /* A basic block ends at a jump. */
694 if (head == NULL_RTX)
695 head = insn;
696 else
697 {
698 /* ??? Make a special check for table jumps. The way this
699 happens is truly and amazingly gross. We are about to
700 create a basic block that contains just a code label and
701 an addr*vec jump insn. Worse, an addr_diff_vec creates
702 its own natural loop.
703
704 Prevent this bit of brain damage, pasting things together
705 correctly in make_edges.
706
707 The correct solution involves emitting the table directly
708 on the tablejump instruction as a note, or JUMP_LABEL. */
709
710 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
711 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
712 {
713 head = end = NULL;
714 n_basic_blocks--;
715 break;
716 }
717 }
718 end = insn;
719 goto new_bb_inclusive;
720
721 case BARRIER:
722 /* A basic block ends at a barrier. It may be that an unconditional
723 jump already closed the basic block -- no need to do it again. */
724 if (head == NULL_RTX)
725 break;
726
727 /* While we now have edge lists with which other portions of the
728 compiler might determine a call ending a basic block does not
729 imply an abnormal edge, it will be a bit before everything can
730 be updated. So continue to emit a noop at the end of such a
731 block. */
732 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
733 {
734 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
735 end = emit_insn_after (nop, end);
736 }
737 goto new_bb_exclusive;
738
739 case CALL_INSN:
740 {
741 /* Record whether this call created an edge. */
742 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
743 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
744 int call_has_abnormal_edge = 0;
745
746 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
747 {
748 /* Scan each of the alternatives for label refs. */
749 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
750 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
751 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
752 /* Record its tail recursion label, if any. */
753 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
754 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
755 }
756
757 /* If there is an EH region or rethrow, we have an edge. */
758 if ((eh_list && region > 0)
759 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
760 call_has_abnormal_edge = 1;
761 else if (nonlocal_goto_handler_labels && region >= 0)
762 /* If there is a nonlocal goto label and the specified
763 region number isn't -1, we have an edge. (0 means
764 no throw, but might have a nonlocal goto). */
765 call_has_abnormal_edge = 1;
766
767 /* A basic block ends at a call that can either throw or
768 do a non-local goto. */
769 if (call_has_abnormal_edge)
770 {
771 new_bb_inclusive:
772 if (head == NULL_RTX)
773 head = insn;
774 end = insn;
775
776 new_bb_exclusive:
777 create_basic_block (i++, head, end, bb_note);
778 head = end = NULL_RTX;
779 bb_note = NULL_RTX;
780 break;
781 }
782 }
783 /* FALLTHRU */
784
785 default:
786 if (GET_RTX_CLASS (code) == 'i')
787 {
788 if (head == NULL_RTX)
789 head = insn;
790 end = insn;
791 }
792 break;
793 }
794
795 if (GET_RTX_CLASS (code) == 'i')
796 {
797 rtx note;
798
799 /* Make a list of all labels referred to other than by jumps
800 (which just don't have the REG_LABEL notes).
801
802 Make a special exception for labels followed by an ADDR*VEC,
803 as this would be a part of the tablejump setup code.
804
805 Make a special exception for the eh_return_stub_label, which
806 we know isn't part of any otherwise visible control flow. */
807
808 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
809 if (REG_NOTE_KIND (note) == REG_LABEL)
810 {
811 rtx lab = XEXP (note, 0), next;
812
813 if (lab == eh_return_stub_label)
814 ;
815 else if ((next = next_nonnote_insn (lab)) != NULL
816 && GET_CODE (next) == JUMP_INSN
817 && (GET_CODE (PATTERN (next)) == ADDR_VEC
818 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
819 ;
820 else if (GET_CODE (lab) == NOTE)
821 ;
822 else
823 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
824 }
825 }
826 }
827
828 if (head != NULL_RTX)
829 create_basic_block (i++, head, end, bb_note);
830 else if (bb_note)
831 flow_delete_insn (bb_note);
832
833 if (i != n_basic_blocks)
834 abort ();
835
836 label_value_list = lvl;
837 tail_recursion_label_list = trll;
838 }
839
840 /* Tidy the CFG by deleting unreachable code and whatnot. */
841
842 void
843 cleanup_cfg (f)
844 rtx f;
845 {
846 delete_unreachable_blocks ();
847 move_stray_eh_region_notes ();
848 record_active_eh_regions (f);
849 try_merge_blocks ();
850 mark_critical_edges ();
851
852 /* Kill the data we won't maintain. */
853 free_EXPR_LIST_list (&label_value_list);
854 free_EXPR_LIST_list (&tail_recursion_label_list);
855 }
856
857 /* Create a new basic block consisting of the instructions between
858 HEAD and END inclusive. Reuses the note and basic block struct
859 in BB_NOTE, if any. */
860
861 void
862 create_basic_block (index, head, end, bb_note)
863 int index;
864 rtx head, end, bb_note;
865 {
866 basic_block bb;
867
868 if (bb_note
869 && ! RTX_INTEGRATED_P (bb_note)
870 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
871 && bb->aux == NULL)
872 {
873 /* If we found an existing note, thread it back onto the chain. */
874
875 rtx after;
876
877 if (GET_CODE (head) == CODE_LABEL)
878 after = head;
879 else
880 {
881 after = PREV_INSN (head);
882 head = bb_note;
883 }
884
885 if (after != bb_note && NEXT_INSN (after) != bb_note)
886 reorder_insns (bb_note, bb_note, after);
887 }
888 else
889 {
890 /* Otherwise we must create a note and a basic block structure.
891 Since we allow basic block structs in rtl, give the struct
892 the same lifetime by allocating it off the function obstack
893 rather than using malloc. */
894
895 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
896 memset (bb, 0, sizeof (*bb));
897
898 if (GET_CODE (head) == CODE_LABEL)
899 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
900 else
901 {
902 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
903 head = bb_note;
904 }
905 NOTE_BASIC_BLOCK (bb_note) = bb;
906 }
907
908 /* Always include the bb note in the block. */
909 if (NEXT_INSN (end) == bb_note)
910 end = bb_note;
911
912 bb->head = head;
913 bb->end = end;
914 bb->index = index;
915 BASIC_BLOCK (index) = bb;
916
917 /* Tag the block so that we know it has been used when considering
918 other basic block notes. */
919 bb->aux = bb;
920 }
921 \f
922 /* Records the basic block struct in BB_FOR_INSN, for every instruction
923 indexed by INSN_UID. MAX is the size of the array. */
924
925 void
926 compute_bb_for_insn (max)
927 int max;
928 {
929 int i;
930
931 if (basic_block_for_insn)
932 VARRAY_FREE (basic_block_for_insn);
933 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
934
935 for (i = 0; i < n_basic_blocks; ++i)
936 {
937 basic_block bb = BASIC_BLOCK (i);
938 rtx insn, end;
939
940 end = bb->end;
941 insn = bb->head;
942 while (1)
943 {
944 int uid = INSN_UID (insn);
945 if (uid < max)
946 VARRAY_BB (basic_block_for_insn, uid) = bb;
947 if (insn == end)
948 break;
949 insn = NEXT_INSN (insn);
950 }
951 }
952 }
953
954 /* Free the memory associated with the edge structures. */
955
956 static void
957 clear_edges ()
958 {
959 int i;
960 edge n, e;
961
962 for (i = 0; i < n_basic_blocks; ++i)
963 {
964 basic_block bb = BASIC_BLOCK (i);
965
966 for (e = bb->succ; e ; e = n)
967 {
968 n = e->succ_next;
969 free (e);
970 }
971
972 bb->succ = 0;
973 bb->pred = 0;
974 }
975
976 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
977 {
978 n = e->succ_next;
979 free (e);
980 }
981
982 ENTRY_BLOCK_PTR->succ = 0;
983 EXIT_BLOCK_PTR->pred = 0;
984
985 n_edges = 0;
986 }
987
988 /* Identify the edges between basic blocks.
989
990 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
991 that are otherwise unreachable may be reachable with a non-local goto.
992
993 BB_EH_END is an array indexed by basic block number in which we record
994 the list of exception regions active at the end of the basic block. */
995
996 static void
997 make_edges (label_value_list)
998 rtx label_value_list;
999 {
1000 int i;
1001 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1002 sbitmap *edge_cache = NULL;
1003
1004 /* Assume no computed jump; revise as we create edges. */
1005 current_function_has_computed_jump = 0;
1006
1007 /* Heavy use of computed goto in machine-generated code can lead to
1008 nearly fully-connected CFGs. In that case we spend a significant
1009 amount of time searching the edge lists for duplicates. */
1010 if (forced_labels || label_value_list)
1011 {
1012 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1013 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1014 }
1015
1016 /* By nature of the way these get numbered, block 0 is always the entry. */
1017 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1018
1019 for (i = 0; i < n_basic_blocks; ++i)
1020 {
1021 basic_block bb = BASIC_BLOCK (i);
1022 rtx insn, x;
1023 enum rtx_code code;
1024 int force_fallthru = 0;
1025
1026 /* Examine the last instruction of the block, and discover the
1027 ways we can leave the block. */
1028
1029 insn = bb->end;
1030 code = GET_CODE (insn);
1031
1032 /* A branch. */
1033 if (code == JUMP_INSN)
1034 {
1035 rtx tmp;
1036
1037 /* ??? Recognize a tablejump and do the right thing. */
1038 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1039 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1040 && GET_CODE (tmp) == JUMP_INSN
1041 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1042 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1043 {
1044 rtvec vec;
1045 int j;
1046
1047 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1048 vec = XVEC (PATTERN (tmp), 0);
1049 else
1050 vec = XVEC (PATTERN (tmp), 1);
1051
1052 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1053 make_label_edge (edge_cache, bb,
1054 XEXP (RTVEC_ELT (vec, j), 0), 0);
1055
1056 /* Some targets (eg, ARM) emit a conditional jump that also
1057 contains the out-of-range target. Scan for these and
1058 add an edge if necessary. */
1059 if ((tmp = single_set (insn)) != NULL
1060 && SET_DEST (tmp) == pc_rtx
1061 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1062 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1063 make_label_edge (edge_cache, bb,
1064 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1065
1066 #ifdef CASE_DROPS_THROUGH
1067 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1068 us naturally detecting fallthru into the next block. */
1069 force_fallthru = 1;
1070 #endif
1071 }
1072
1073 /* If this is a computed jump, then mark it as reaching
1074 everything on the label_value_list and forced_labels list. */
1075 else if (computed_jump_p (insn))
1076 {
1077 current_function_has_computed_jump = 1;
1078
1079 for (x = label_value_list; x; x = XEXP (x, 1))
1080 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1081
1082 for (x = forced_labels; x; x = XEXP (x, 1))
1083 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1084 }
1085
1086 /* Returns create an exit out. */
1087 else if (returnjump_p (insn))
1088 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1089
1090 /* Otherwise, we have a plain conditional or unconditional jump. */
1091 else
1092 {
1093 if (! JUMP_LABEL (insn))
1094 abort ();
1095 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1096 }
1097 }
1098
1099 /* If this is a sibling call insn, then this is in effect a
1100 combined call and return, and so we need an edge to the
1101 exit block. No need to worry about EH edges, since we
1102 wouldn't have created the sibling call in the first place. */
1103
1104 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1105 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1106 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1107 else
1108
1109 /* If this is a CALL_INSN, then mark it as reaching the active EH
1110 handler for this CALL_INSN. If we're handling asynchronous
1111 exceptions then any insn can reach any of the active handlers.
1112
1113 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1114
1115 if (code == CALL_INSN || asynchronous_exceptions)
1116 {
1117 /* Add any appropriate EH edges. We do this unconditionally
1118 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1119 on the call, and this needn't be within an EH region. */
1120 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1121
1122 /* If we have asynchronous exceptions, do the same for *all*
1123 exception regions active in the block. */
1124 if (asynchronous_exceptions
1125 && bb->eh_beg != bb->eh_end)
1126 {
1127 if (bb->eh_beg >= 0)
1128 make_eh_edge (edge_cache, eh_nest_info, bb,
1129 NULL_RTX, bb->eh_beg);
1130
1131 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1132 if (GET_CODE (x) == NOTE
1133 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1134 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1135 {
1136 int region = NOTE_EH_HANDLER (x);
1137 make_eh_edge (edge_cache, eh_nest_info, bb,
1138 NULL_RTX, region);
1139 }
1140 }
1141
1142 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1143 {
1144 /* ??? This could be made smarter: in some cases it's possible
1145 to tell that certain calls will not do a nonlocal goto.
1146
1147 For example, if the nested functions that do the nonlocal
1148 gotos do not have their addresses taken, then only calls to
1149 those functions or to other nested functions that use them
1150 could possibly do nonlocal gotos. */
1151 /* We do know that a REG_EH_REGION note with a value less
1152 than 0 is guaranteed not to perform a non-local goto. */
1153 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1154 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1155 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1156 make_label_edge (edge_cache, bb, XEXP (x, 0),
1157 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1158 }
1159 }
1160
1161 /* We know something about the structure of the function __throw in
1162 libgcc2.c. It is the only function that ever contains eh_stub
1163 labels. It modifies its return address so that the last block
1164 returns to one of the eh_stub labels within it. So we have to
1165 make additional edges in the flow graph. */
1166 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1167 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1168
1169 /* Find out if we can drop through to the next block. */
1170 insn = next_nonnote_insn (insn);
1171 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1172 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1173 else if (i + 1 < n_basic_blocks)
1174 {
1175 rtx tmp = BLOCK_HEAD (i + 1);
1176 if (GET_CODE (tmp) == NOTE)
1177 tmp = next_nonnote_insn (tmp);
1178 if (force_fallthru || insn == tmp)
1179 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1180 }
1181 }
1182
1183 free_eh_nesting_info (eh_nest_info);
1184 if (edge_cache)
1185 sbitmap_vector_free (edge_cache);
1186 }
1187
1188 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1189 about the edge that is accumulated between calls. */
1190
1191 void
1192 make_edge (edge_cache, src, dst, flags)
1193 sbitmap *edge_cache;
1194 basic_block src, dst;
1195 int flags;
1196 {
1197 int use_edge_cache;
1198 edge e;
1199
1200 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1201 many edges to them, and we didn't allocate memory for it. */
1202 use_edge_cache = (edge_cache
1203 && src != ENTRY_BLOCK_PTR
1204 && dst != EXIT_BLOCK_PTR);
1205
1206 /* Make sure we don't add duplicate edges. */
1207 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1208 for (e = src->succ; e ; e = e->succ_next)
1209 if (e->dest == dst)
1210 {
1211 e->flags |= flags;
1212 return;
1213 }
1214
1215 e = (edge) xcalloc (1, sizeof (*e));
1216 n_edges++;
1217
1218 e->succ_next = src->succ;
1219 e->pred_next = dst->pred;
1220 e->src = src;
1221 e->dest = dst;
1222 e->flags = flags;
1223
1224 src->succ = e;
1225 dst->pred = e;
1226
1227 if (use_edge_cache)
1228 SET_BIT (edge_cache[src->index], dst->index);
1229 }
1230
1231 /* Create an edge from a basic block to a label. */
1232
1233 static void
1234 make_label_edge (edge_cache, src, label, flags)
1235 sbitmap *edge_cache;
1236 basic_block src;
1237 rtx label;
1238 int flags;
1239 {
1240 if (GET_CODE (label) != CODE_LABEL)
1241 abort ();
1242
1243 /* If the label was never emitted, this insn is junk, but avoid a
1244 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1245 as a result of a syntax error and a diagnostic has already been
1246 printed. */
1247
1248 if (INSN_UID (label) == 0)
1249 return;
1250
1251 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1252 }
1253
1254 /* Create the edges generated by INSN in REGION. */
1255
1256 static void
1257 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1258 sbitmap *edge_cache;
1259 eh_nesting_info *eh_nest_info;
1260 basic_block src;
1261 rtx insn;
1262 int region;
1263 {
1264 handler_info **handler_list;
1265 int num, is_call;
1266
1267 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1268 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1269 while (--num >= 0)
1270 {
1271 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1272 EDGE_ABNORMAL | EDGE_EH | is_call);
1273 }
1274 }
1275
1276 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1277 dangerous if we intend to move basic blocks around. Move such notes
1278 into the following block. */
1279
1280 static void
1281 move_stray_eh_region_notes ()
1282 {
1283 int i;
1284 basic_block b1, b2;
1285
1286 if (n_basic_blocks < 2)
1287 return;
1288
1289 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1290 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1291 {
1292 rtx insn, next, list = NULL_RTX;
1293
1294 b1 = BASIC_BLOCK (i);
1295 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1296 {
1297 next = NEXT_INSN (insn);
1298 if (GET_CODE (insn) == NOTE
1299 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1300 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1301 {
1302 /* Unlink from the insn chain. */
1303 NEXT_INSN (PREV_INSN (insn)) = next;
1304 PREV_INSN (next) = PREV_INSN (insn);
1305
1306 /* Queue it. */
1307 NEXT_INSN (insn) = list;
1308 list = insn;
1309 }
1310 }
1311
1312 if (list == NULL_RTX)
1313 continue;
1314
1315 /* Find where to insert these things. */
1316 insn = b2->head;
1317 if (GET_CODE (insn) == CODE_LABEL)
1318 insn = NEXT_INSN (insn);
1319
1320 while (list)
1321 {
1322 next = NEXT_INSN (list);
1323 add_insn_after (list, insn);
1324 list = next;
1325 }
1326 }
1327 }
1328
1329 /* Recompute eh_beg/eh_end for each basic block. */
1330
1331 static void
1332 record_active_eh_regions (f)
1333 rtx f;
1334 {
1335 rtx insn, eh_list = NULL_RTX;
1336 int i = 0;
1337 basic_block bb = BASIC_BLOCK (0);
1338
1339 for (insn = f; insn ; insn = NEXT_INSN (insn))
1340 {
1341 if (bb->head == insn)
1342 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1343
1344 if (GET_CODE (insn) == NOTE)
1345 {
1346 int kind = NOTE_LINE_NUMBER (insn);
1347 if (kind == NOTE_INSN_EH_REGION_BEG)
1348 eh_list = alloc_INSN_LIST (insn, eh_list);
1349 else if (kind == NOTE_INSN_EH_REGION_END)
1350 {
1351 rtx t = XEXP (eh_list, 1);
1352 free_INSN_LIST_node (eh_list);
1353 eh_list = t;
1354 }
1355 }
1356
1357 if (bb->end == insn)
1358 {
1359 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1360 i += 1;
1361 if (i == n_basic_blocks)
1362 break;
1363 bb = BASIC_BLOCK (i);
1364 }
1365 }
1366 }
1367
1368 /* Identify critical edges and set the bits appropriately. */
1369
1370 static void
1371 mark_critical_edges ()
1372 {
1373 int i, n = n_basic_blocks;
1374 basic_block bb;
1375
1376 /* We begin with the entry block. This is not terribly important now,
1377 but could be if a front end (Fortran) implemented alternate entry
1378 points. */
1379 bb = ENTRY_BLOCK_PTR;
1380 i = -1;
1381
1382 while (1)
1383 {
1384 edge e;
1385
1386 /* (1) Critical edges must have a source with multiple successors. */
1387 if (bb->succ && bb->succ->succ_next)
1388 {
1389 for (e = bb->succ; e ; e = e->succ_next)
1390 {
1391 /* (2) Critical edges must have a destination with multiple
1392 predecessors. Note that we know there is at least one
1393 predecessor -- the edge we followed to get here. */
1394 if (e->dest->pred->pred_next)
1395 e->flags |= EDGE_CRITICAL;
1396 else
1397 e->flags &= ~EDGE_CRITICAL;
1398 }
1399 }
1400 else
1401 {
1402 for (e = bb->succ; e ; e = e->succ_next)
1403 e->flags &= ~EDGE_CRITICAL;
1404 }
1405
1406 if (++i >= n)
1407 break;
1408 bb = BASIC_BLOCK (i);
1409 }
1410 }
1411 \f
1412 /* Split a (typically critical) edge. Return the new block.
1413 Abort on abnormal edges.
1414
1415 ??? The code generally expects to be called on critical edges.
1416 The case of a block ending in an unconditional jump to a
1417 block with multiple predecessors is not handled optimally. */
1418
1419 basic_block
1420 split_edge (edge_in)
1421 edge edge_in;
1422 {
1423 basic_block old_pred, bb, old_succ;
1424 edge edge_out;
1425 rtx bb_note;
1426 int i, j;
1427
1428 /* Abnormal edges cannot be split. */
1429 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1430 abort ();
1431
1432 old_pred = edge_in->src;
1433 old_succ = edge_in->dest;
1434
1435 /* Remove the existing edge from the destination's pred list. */
1436 {
1437 edge *pp;
1438 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1439 continue;
1440 *pp = edge_in->pred_next;
1441 edge_in->pred_next = NULL;
1442 }
1443
1444 /* Create the new structures. */
1445 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1446 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1447 n_edges++;
1448
1449 memset (bb, 0, sizeof (*bb));
1450
1451 /* ??? This info is likely going to be out of date very soon. */
1452 if (old_succ->global_live_at_start)
1453 {
1454 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1455 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1456 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1457 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1458 }
1459
1460 /* Wire them up. */
1461 bb->pred = edge_in;
1462 bb->succ = edge_out;
1463 bb->count = edge_in->count;
1464
1465 edge_in->dest = bb;
1466 edge_in->flags &= ~EDGE_CRITICAL;
1467
1468 edge_out->pred_next = old_succ->pred;
1469 edge_out->succ_next = NULL;
1470 edge_out->src = bb;
1471 edge_out->dest = old_succ;
1472 edge_out->flags = EDGE_FALLTHRU;
1473 edge_out->probability = REG_BR_PROB_BASE;
1474 edge_out->count = edge_in->count;
1475
1476 old_succ->pred = edge_out;
1477
1478 /* Tricky case -- if there existed a fallthru into the successor
1479 (and we're not it) we must add a new unconditional jump around
1480 the new block we're actually interested in.
1481
1482 Further, if that edge is critical, this means a second new basic
1483 block must be created to hold it. In order to simplify correct
1484 insn placement, do this before we touch the existing basic block
1485 ordering for the block we were really wanting. */
1486 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1487 {
1488 edge e;
1489 for (e = edge_out->pred_next; e ; e = e->pred_next)
1490 if (e->flags & EDGE_FALLTHRU)
1491 break;
1492
1493 if (e)
1494 {
1495 basic_block jump_block;
1496 rtx pos;
1497
1498 if ((e->flags & EDGE_CRITICAL) == 0
1499 && e->src != ENTRY_BLOCK_PTR)
1500 {
1501 /* Non critical -- we can simply add a jump to the end
1502 of the existing predecessor. */
1503 jump_block = e->src;
1504 }
1505 else
1506 {
1507 /* We need a new block to hold the jump. The simplest
1508 way to do the bulk of the work here is to recursively
1509 call ourselves. */
1510 jump_block = split_edge (e);
1511 e = jump_block->succ;
1512 }
1513
1514 /* Now add the jump insn ... */
1515 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1516 jump_block->end);
1517 jump_block->end = pos;
1518 if (basic_block_for_insn)
1519 set_block_for_insn (pos, jump_block);
1520 emit_barrier_after (pos);
1521
1522 /* ... let jump know that label is in use, ... */
1523 JUMP_LABEL (pos) = old_succ->head;
1524 ++LABEL_NUSES (old_succ->head);
1525
1526 /* ... and clear fallthru on the outgoing edge. */
1527 e->flags &= ~EDGE_FALLTHRU;
1528
1529 /* Continue splitting the interesting edge. */
1530 }
1531 }
1532
1533 /* Place the new block just in front of the successor. */
1534 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1535 if (old_succ == EXIT_BLOCK_PTR)
1536 j = n_basic_blocks - 1;
1537 else
1538 j = old_succ->index;
1539 for (i = n_basic_blocks - 1; i > j; --i)
1540 {
1541 basic_block tmp = BASIC_BLOCK (i - 1);
1542 BASIC_BLOCK (i) = tmp;
1543 tmp->index = i;
1544 }
1545 BASIC_BLOCK (i) = bb;
1546 bb->index = i;
1547
1548 /* Create the basic block note.
1549
1550 Where we place the note can have a noticable impact on the generated
1551 code. Consider this cfg:
1552
1553
1554 E
1555 |
1556 0
1557 / \
1558 +->1-->2--->E
1559 | |
1560 +--+
1561
1562 If we need to insert an insn on the edge from block 0 to block 1,
1563 we want to ensure the instructions we insert are outside of any
1564 loop notes that physically sit between block 0 and block 1. Otherwise
1565 we confuse the loop optimizer into thinking the loop is a phony. */
1566 if (old_succ != EXIT_BLOCK_PTR
1567 && PREV_INSN (old_succ->head)
1568 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1569 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1570 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1571 PREV_INSN (old_succ->head));
1572 else if (old_succ != EXIT_BLOCK_PTR)
1573 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1574 else
1575 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1576 NOTE_BASIC_BLOCK (bb_note) = bb;
1577 bb->head = bb->end = bb_note;
1578
1579 /* Not quite simple -- for non-fallthru edges, we must adjust the
1580 predecessor's jump instruction to target our new block. */
1581 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1582 {
1583 rtx tmp, insn = old_pred->end;
1584 rtx old_label = old_succ->head;
1585 rtx new_label = gen_label_rtx ();
1586
1587 if (GET_CODE (insn) != JUMP_INSN)
1588 abort ();
1589
1590 /* ??? Recognize a tablejump and adjust all matching cases. */
1591 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1592 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1593 && GET_CODE (tmp) == JUMP_INSN
1594 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1595 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1596 {
1597 rtvec vec;
1598 int j;
1599
1600 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1601 vec = XVEC (PATTERN (tmp), 0);
1602 else
1603 vec = XVEC (PATTERN (tmp), 1);
1604
1605 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1606 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1607 {
1608 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1609 --LABEL_NUSES (old_label);
1610 ++LABEL_NUSES (new_label);
1611 }
1612
1613 /* Handle casesi dispatch insns */
1614 if ((tmp = single_set (insn)) != NULL
1615 && SET_DEST (tmp) == pc_rtx
1616 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1617 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1618 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1619 {
1620 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1621 new_label);
1622 --LABEL_NUSES (old_label);
1623 ++LABEL_NUSES (new_label);
1624 }
1625 }
1626 else
1627 {
1628 /* This would have indicated an abnormal edge. */
1629 if (computed_jump_p (insn))
1630 abort ();
1631
1632 /* A return instruction can't be redirected. */
1633 if (returnjump_p (insn))
1634 abort ();
1635
1636 /* If the insn doesn't go where we think, we're confused. */
1637 if (JUMP_LABEL (insn) != old_label)
1638 abort ();
1639
1640 redirect_jump (insn, new_label, 0);
1641 }
1642
1643 emit_label_before (new_label, bb_note);
1644 bb->head = new_label;
1645 }
1646
1647 return bb;
1648 }
1649
1650 /* Queue instructions for insertion on an edge between two basic blocks.
1651 The new instructions and basic blocks (if any) will not appear in the
1652 CFG until commit_edge_insertions is called. */
1653
1654 void
1655 insert_insn_on_edge (pattern, e)
1656 rtx pattern;
1657 edge e;
1658 {
1659 /* We cannot insert instructions on an abnormal critical edge.
1660 It will be easier to find the culprit if we die now. */
1661 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1662 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1663 abort ();
1664
1665 if (e->insns == NULL_RTX)
1666 start_sequence ();
1667 else
1668 push_to_sequence (e->insns);
1669
1670 emit_insn (pattern);
1671
1672 e->insns = get_insns ();
1673 end_sequence();
1674 }
1675
1676 /* Update the CFG for the instructions queued on edge E. */
1677
1678 static void
1679 commit_one_edge_insertion (e)
1680 edge e;
1681 {
1682 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1683 basic_block bb;
1684
1685 /* Pull the insns off the edge now since the edge might go away. */
1686 insns = e->insns;
1687 e->insns = NULL_RTX;
1688
1689 /* Figure out where to put these things. If the destination has
1690 one predecessor, insert there. Except for the exit block. */
1691 if (e->dest->pred->pred_next == NULL
1692 && e->dest != EXIT_BLOCK_PTR)
1693 {
1694 bb = e->dest;
1695
1696 /* Get the location correct wrt a code label, and "nice" wrt
1697 a basic block note, and before everything else. */
1698 tmp = bb->head;
1699 if (GET_CODE (tmp) == CODE_LABEL)
1700 tmp = NEXT_INSN (tmp);
1701 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1702 tmp = NEXT_INSN (tmp);
1703 if (tmp == bb->head)
1704 before = tmp;
1705 else
1706 after = PREV_INSN (tmp);
1707 }
1708
1709 /* If the source has one successor and the edge is not abnormal,
1710 insert there. Except for the entry block. */
1711 else if ((e->flags & EDGE_ABNORMAL) == 0
1712 && e->src->succ->succ_next == NULL
1713 && e->src != ENTRY_BLOCK_PTR)
1714 {
1715 bb = e->src;
1716 /* It is possible to have a non-simple jump here. Consider a target
1717 where some forms of unconditional jumps clobber a register. This
1718 happens on the fr30 for example.
1719
1720 We know this block has a single successor, so we can just emit
1721 the queued insns before the jump. */
1722 if (GET_CODE (bb->end) == JUMP_INSN)
1723 {
1724 before = bb->end;
1725 }
1726 else
1727 {
1728 /* We'd better be fallthru, or we've lost track of what's what. */
1729 if ((e->flags & EDGE_FALLTHRU) == 0)
1730 abort ();
1731
1732 after = bb->end;
1733 }
1734 }
1735
1736 /* Otherwise we must split the edge. */
1737 else
1738 {
1739 bb = split_edge (e);
1740 after = bb->end;
1741 }
1742
1743 /* Now that we've found the spot, do the insertion. */
1744
1745 /* Set the new block number for these insns, if structure is allocated. */
1746 if (basic_block_for_insn)
1747 {
1748 rtx i;
1749 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1750 set_block_for_insn (i, bb);
1751 }
1752
1753 if (before)
1754 {
1755 emit_insns_before (insns, before);
1756 if (before == bb->head)
1757 bb->head = insns;
1758
1759 last = prev_nonnote_insn (before);
1760 }
1761 else
1762 {
1763 last = emit_insns_after (insns, after);
1764 if (after == bb->end)
1765 bb->end = last;
1766 }
1767
1768 if (returnjump_p (last))
1769 {
1770 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1771 This is not currently a problem because this only happens
1772 for the (single) epilogue, which already has a fallthru edge
1773 to EXIT. */
1774
1775 e = bb->succ;
1776 if (e->dest != EXIT_BLOCK_PTR
1777 || e->succ_next != NULL
1778 || (e->flags & EDGE_FALLTHRU) == 0)
1779 abort ();
1780 e->flags &= ~EDGE_FALLTHRU;
1781
1782 emit_barrier_after (last);
1783 bb->end = last;
1784
1785 if (before)
1786 flow_delete_insn (before);
1787 }
1788 else if (GET_CODE (last) == JUMP_INSN)
1789 abort ();
1790 }
1791
1792 /* Update the CFG for all queued instructions. */
1793
1794 void
1795 commit_edge_insertions ()
1796 {
1797 int i;
1798 basic_block bb;
1799
1800 #ifdef ENABLE_CHECKING
1801 verify_flow_info ();
1802 #endif
1803
1804 i = -1;
1805 bb = ENTRY_BLOCK_PTR;
1806 while (1)
1807 {
1808 edge e, next;
1809
1810 for (e = bb->succ; e ; e = next)
1811 {
1812 next = e->succ_next;
1813 if (e->insns)
1814 commit_one_edge_insertion (e);
1815 }
1816
1817 if (++i >= n_basic_blocks)
1818 break;
1819 bb = BASIC_BLOCK (i);
1820 }
1821 }
1822 \f
1823 /* Delete all unreachable basic blocks. */
1824
1825 static void
1826 delete_unreachable_blocks ()
1827 {
1828 basic_block *worklist, *tos;
1829 int deleted_handler;
1830 edge e;
1831 int i, n;
1832
1833 n = n_basic_blocks;
1834 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1835
1836 /* Use basic_block->aux as a marker. Clear them all. */
1837
1838 for (i = 0; i < n; ++i)
1839 BASIC_BLOCK (i)->aux = NULL;
1840
1841 /* Add our starting points to the worklist. Almost always there will
1842 be only one. It isn't inconcievable that we might one day directly
1843 support Fortran alternate entry points. */
1844
1845 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1846 {
1847 *tos++ = e->dest;
1848
1849 /* Mark the block with a handy non-null value. */
1850 e->dest->aux = e;
1851 }
1852
1853 /* Iterate: find everything reachable from what we've already seen. */
1854
1855 while (tos != worklist)
1856 {
1857 basic_block b = *--tos;
1858
1859 for (e = b->succ; e ; e = e->succ_next)
1860 if (!e->dest->aux)
1861 {
1862 *tos++ = e->dest;
1863 e->dest->aux = e;
1864 }
1865 }
1866
1867 /* Delete all unreachable basic blocks. Count down so that we don't
1868 interfere with the block renumbering that happens in flow_delete_block. */
1869
1870 deleted_handler = 0;
1871
1872 for (i = n - 1; i >= 0; --i)
1873 {
1874 basic_block b = BASIC_BLOCK (i);
1875
1876 if (b->aux != NULL)
1877 /* This block was found. Tidy up the mark. */
1878 b->aux = NULL;
1879 else
1880 deleted_handler |= flow_delete_block (b);
1881 }
1882
1883 tidy_fallthru_edges ();
1884
1885 /* If we deleted an exception handler, we may have EH region begin/end
1886 blocks to remove as well. */
1887 if (deleted_handler)
1888 delete_eh_regions ();
1889
1890 free (worklist);
1891 }
1892
1893 /* Find EH regions for which there is no longer a handler, and delete them. */
1894
1895 static void
1896 delete_eh_regions ()
1897 {
1898 rtx insn;
1899
1900 update_rethrow_references ();
1901
1902 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1903 if (GET_CODE (insn) == NOTE)
1904 {
1905 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1906 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1907 {
1908 int num = NOTE_EH_HANDLER (insn);
1909 /* A NULL handler indicates a region is no longer needed,
1910 as long as its rethrow label isn't used. */
1911 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1912 {
1913 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1914 NOTE_SOURCE_FILE (insn) = 0;
1915 }
1916 }
1917 }
1918 }
1919
1920 /* Return true if NOTE is not one of the ones that must be kept paired,
1921 so that we may simply delete them. */
1922
1923 static int
1924 can_delete_note_p (note)
1925 rtx note;
1926 {
1927 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1928 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1929 }
1930
1931 /* Unlink a chain of insns between START and FINISH, leaving notes
1932 that must be paired. */
1933
1934 void
1935 flow_delete_insn_chain (start, finish)
1936 rtx start, finish;
1937 {
1938 /* Unchain the insns one by one. It would be quicker to delete all
1939 of these with a single unchaining, rather than one at a time, but
1940 we need to keep the NOTE's. */
1941
1942 rtx next;
1943
1944 while (1)
1945 {
1946 next = NEXT_INSN (start);
1947 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1948 ;
1949 else if (GET_CODE (start) == CODE_LABEL
1950 && ! can_delete_label_p (start))
1951 {
1952 const char *name = LABEL_NAME (start);
1953 PUT_CODE (start, NOTE);
1954 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1955 NOTE_SOURCE_FILE (start) = name;
1956 }
1957 else
1958 next = flow_delete_insn (start);
1959
1960 if (start == finish)
1961 break;
1962 start = next;
1963 }
1964 }
1965
1966 /* Delete the insns in a (non-live) block. We physically delete every
1967 non-deleted-note insn, and update the flow graph appropriately.
1968
1969 Return nonzero if we deleted an exception handler. */
1970
1971 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1972 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1973
1974 int
1975 flow_delete_block (b)
1976 basic_block b;
1977 {
1978 int deleted_handler = 0;
1979 rtx insn, end, tmp;
1980
1981 /* If the head of this block is a CODE_LABEL, then it might be the
1982 label for an exception handler which can't be reached.
1983
1984 We need to remove the label from the exception_handler_label list
1985 and remove the associated NOTE_INSN_EH_REGION_BEG and
1986 NOTE_INSN_EH_REGION_END notes. */
1987
1988 insn = b->head;
1989
1990 never_reached_warning (insn);
1991
1992 if (GET_CODE (insn) == CODE_LABEL)
1993 {
1994 rtx x, *prev = &exception_handler_labels;
1995
1996 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1997 {
1998 if (XEXP (x, 0) == insn)
1999 {
2000 /* Found a match, splice this label out of the EH label list. */
2001 *prev = XEXP (x, 1);
2002 XEXP (x, 1) = NULL_RTX;
2003 XEXP (x, 0) = NULL_RTX;
2004
2005 /* Remove the handler from all regions */
2006 remove_handler (insn);
2007 deleted_handler = 1;
2008 break;
2009 }
2010 prev = &XEXP (x, 1);
2011 }
2012 }
2013
2014 /* Include any jump table following the basic block. */
2015 end = b->end;
2016 if (GET_CODE (end) == JUMP_INSN
2017 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2018 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2019 && GET_CODE (tmp) == JUMP_INSN
2020 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2021 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2022 end = tmp;
2023
2024 /* Include any barrier that may follow the basic block. */
2025 tmp = next_nonnote_insn (end);
2026 if (tmp && GET_CODE (tmp) == BARRIER)
2027 end = tmp;
2028
2029 /* Selectively delete the entire chain. */
2030 flow_delete_insn_chain (insn, end);
2031
2032 /* Remove the edges into and out of this block. Note that there may
2033 indeed be edges in, if we are removing an unreachable loop. */
2034 {
2035 edge e, next, *q;
2036
2037 for (e = b->pred; e ; e = next)
2038 {
2039 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2040 continue;
2041 *q = e->succ_next;
2042 next = e->pred_next;
2043 n_edges--;
2044 free (e);
2045 }
2046 for (e = b->succ; e ; e = next)
2047 {
2048 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2049 continue;
2050 *q = e->pred_next;
2051 next = e->succ_next;
2052 n_edges--;
2053 free (e);
2054 }
2055
2056 b->pred = NULL;
2057 b->succ = NULL;
2058 }
2059
2060 /* Remove the basic block from the array, and compact behind it. */
2061 expunge_block (b);
2062
2063 return deleted_handler;
2064 }
2065
2066 /* Remove block B from the basic block array and compact behind it. */
2067
2068 static void
2069 expunge_block (b)
2070 basic_block b;
2071 {
2072 int i, n = n_basic_blocks;
2073
2074 for (i = b->index; i + 1 < n; ++i)
2075 {
2076 basic_block x = BASIC_BLOCK (i + 1);
2077 BASIC_BLOCK (i) = x;
2078 x->index = i;
2079 }
2080
2081 basic_block_info->num_elements--;
2082 n_basic_blocks--;
2083 }
2084
2085 /* Delete INSN by patching it out. Return the next insn. */
2086
2087 rtx
2088 flow_delete_insn (insn)
2089 rtx insn;
2090 {
2091 rtx prev = PREV_INSN (insn);
2092 rtx next = NEXT_INSN (insn);
2093 rtx note;
2094
2095 PREV_INSN (insn) = NULL_RTX;
2096 NEXT_INSN (insn) = NULL_RTX;
2097 INSN_DELETED_P (insn) = 1;
2098
2099 if (prev)
2100 NEXT_INSN (prev) = next;
2101 if (next)
2102 PREV_INSN (next) = prev;
2103 else
2104 set_last_insn (prev);
2105
2106 if (GET_CODE (insn) == CODE_LABEL)
2107 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2108
2109 /* If deleting a jump, decrement the use count of the label. Deleting
2110 the label itself should happen in the normal course of block merging. */
2111 if (GET_CODE (insn) == JUMP_INSN
2112 && JUMP_LABEL (insn)
2113 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2114 LABEL_NUSES (JUMP_LABEL (insn))--;
2115
2116 /* Also if deleting an insn that references a label. */
2117 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2118 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2119 LABEL_NUSES (XEXP (note, 0))--;
2120
2121 return next;
2122 }
2123
2124 /* True if a given label can be deleted. */
2125
2126 static int
2127 can_delete_label_p (label)
2128 rtx label;
2129 {
2130 rtx x;
2131
2132 if (LABEL_PRESERVE_P (label))
2133 return 0;
2134
2135 for (x = forced_labels; x ; x = XEXP (x, 1))
2136 if (label == XEXP (x, 0))
2137 return 0;
2138 for (x = label_value_list; x ; x = XEXP (x, 1))
2139 if (label == XEXP (x, 0))
2140 return 0;
2141 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2142 if (label == XEXP (x, 0))
2143 return 0;
2144
2145 /* User declared labels must be preserved. */
2146 if (LABEL_NAME (label) != 0)
2147 return 0;
2148
2149 return 1;
2150 }
2151
2152 static int
2153 tail_recursion_label_p (label)
2154 rtx label;
2155 {
2156 rtx x;
2157
2158 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2159 if (label == XEXP (x, 0))
2160 return 1;
2161
2162 return 0;
2163 }
2164
2165 /* Blocks A and B are to be merged into a single block A. The insns
2166 are already contiguous, hence `nomove'. */
2167
2168 void
2169 merge_blocks_nomove (a, b)
2170 basic_block a, b;
2171 {
2172 edge e;
2173 rtx b_head, b_end, a_end;
2174 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2175 int b_empty = 0;
2176
2177 /* If there was a CODE_LABEL beginning B, delete it. */
2178 b_head = b->head;
2179 b_end = b->end;
2180 if (GET_CODE (b_head) == CODE_LABEL)
2181 {
2182 /* Detect basic blocks with nothing but a label. This can happen
2183 in particular at the end of a function. */
2184 if (b_head == b_end)
2185 b_empty = 1;
2186 del_first = del_last = b_head;
2187 b_head = NEXT_INSN (b_head);
2188 }
2189
2190 /* Delete the basic block note. */
2191 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2192 {
2193 if (b_head == b_end)
2194 b_empty = 1;
2195 if (! del_last)
2196 del_first = b_head;
2197 del_last = b_head;
2198 b_head = NEXT_INSN (b_head);
2199 }
2200
2201 /* If there was a jump out of A, delete it. */
2202 a_end = a->end;
2203 if (GET_CODE (a_end) == JUMP_INSN)
2204 {
2205 rtx prev;
2206
2207 prev = prev_nonnote_insn (a_end);
2208 if (!prev)
2209 prev = a->head;
2210
2211 del_first = a_end;
2212
2213 #ifdef HAVE_cc0
2214 /* If this was a conditional jump, we need to also delete
2215 the insn that set cc0. */
2216 if (prev && sets_cc0_p (prev))
2217 {
2218 rtx tmp = prev;
2219 prev = prev_nonnote_insn (prev);
2220 if (!prev)
2221 prev = a->head;
2222 del_first = tmp;
2223 }
2224 #endif
2225
2226 a_end = prev;
2227 }
2228 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2229 del_first = NEXT_INSN (a_end);
2230
2231 /* Delete everything marked above as well as crap that might be
2232 hanging out between the two blocks. */
2233 flow_delete_insn_chain (del_first, del_last);
2234
2235 /* Normally there should only be one successor of A and that is B, but
2236 partway though the merge of blocks for conditional_execution we'll
2237 be merging a TEST block with THEN and ELSE successors. Free the
2238 whole lot of them and hope the caller knows what they're doing. */
2239 while (a->succ)
2240 remove_edge (a->succ);
2241
2242 /* Adjust the edges out of B for the new owner. */
2243 for (e = b->succ; e ; e = e->succ_next)
2244 e->src = a;
2245 a->succ = b->succ;
2246
2247 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2248 b->pred = b->succ = NULL;
2249
2250 /* Reassociate the insns of B with A. */
2251 if (!b_empty)
2252 {
2253 if (basic_block_for_insn)
2254 {
2255 BLOCK_FOR_INSN (b_head) = a;
2256 while (b_head != b_end)
2257 {
2258 b_head = NEXT_INSN (b_head);
2259 BLOCK_FOR_INSN (b_head) = a;
2260 }
2261 }
2262 a_end = b_end;
2263 }
2264 a->end = a_end;
2265
2266 expunge_block (b);
2267 }
2268
2269 /* Blocks A and B are to be merged into a single block. A has no incoming
2270 fallthru edge, so it can be moved before B without adding or modifying
2271 any jumps (aside from the jump from A to B). */
2272
2273 static int
2274 merge_blocks_move_predecessor_nojumps (a, b)
2275 basic_block a, b;
2276 {
2277 rtx start, end, barrier;
2278 int index;
2279
2280 start = a->head;
2281 end = a->end;
2282
2283 barrier = next_nonnote_insn (end);
2284 if (GET_CODE (barrier) != BARRIER)
2285 abort ();
2286 flow_delete_insn (barrier);
2287
2288 /* Move block and loop notes out of the chain so that we do not
2289 disturb their order.
2290
2291 ??? A better solution would be to squeeze out all the non-nested notes
2292 and adjust the block trees appropriately. Even better would be to have
2293 a tighter connection between block trees and rtl so that this is not
2294 necessary. */
2295 start = squeeze_notes (start, end);
2296
2297 /* Scramble the insn chain. */
2298 if (end != PREV_INSN (b->head))
2299 reorder_insns (start, end, PREV_INSN (b->head));
2300
2301 if (rtl_dump_file)
2302 {
2303 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2304 a->index, b->index);
2305 }
2306
2307 /* Swap the records for the two blocks around. Although we are deleting B,
2308 A is now where B was and we want to compact the BB array from where
2309 A used to be. */
2310 BASIC_BLOCK(a->index) = b;
2311 BASIC_BLOCK(b->index) = a;
2312 index = a->index;
2313 a->index = b->index;
2314 b->index = index;
2315
2316 /* Now blocks A and B are contiguous. Merge them. */
2317 merge_blocks_nomove (a, b);
2318
2319 return 1;
2320 }
2321
2322 /* Blocks A and B are to be merged into a single block. B has no outgoing
2323 fallthru edge, so it can be moved after A without adding or modifying
2324 any jumps (aside from the jump from A to B). */
2325
2326 static int
2327 merge_blocks_move_successor_nojumps (a, b)
2328 basic_block a, b;
2329 {
2330 rtx start, end, barrier;
2331
2332 start = b->head;
2333 end = b->end;
2334 barrier = NEXT_INSN (end);
2335
2336 /* Recognize a jump table following block B. */
2337 if (GET_CODE (barrier) == CODE_LABEL
2338 && NEXT_INSN (barrier)
2339 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2340 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2341 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2342 {
2343 end = NEXT_INSN (barrier);
2344 barrier = NEXT_INSN (end);
2345 }
2346
2347 /* There had better have been a barrier there. Delete it. */
2348 if (GET_CODE (barrier) != BARRIER)
2349 abort ();
2350 flow_delete_insn (barrier);
2351
2352 /* Move block and loop notes out of the chain so that we do not
2353 disturb their order.
2354
2355 ??? A better solution would be to squeeze out all the non-nested notes
2356 and adjust the block trees appropriately. Even better would be to have
2357 a tighter connection between block trees and rtl so that this is not
2358 necessary. */
2359 start = squeeze_notes (start, end);
2360
2361 /* Scramble the insn chain. */
2362 reorder_insns (start, end, a->end);
2363
2364 /* Now blocks A and B are contiguous. Merge them. */
2365 merge_blocks_nomove (a, b);
2366
2367 if (rtl_dump_file)
2368 {
2369 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2370 b->index, a->index);
2371 }
2372
2373 return 1;
2374 }
2375
2376 /* Attempt to merge basic blocks that are potentially non-adjacent.
2377 Return true iff the attempt succeeded. */
2378
2379 static int
2380 merge_blocks (e, b, c)
2381 edge e;
2382 basic_block b, c;
2383 {
2384 /* If C has a tail recursion label, do not merge. There is no
2385 edge recorded from the call_placeholder back to this label, as
2386 that would make optimize_sibling_and_tail_recursive_calls more
2387 complex for no gain. */
2388 if (GET_CODE (c->head) == CODE_LABEL
2389 && tail_recursion_label_p (c->head))
2390 return 0;
2391
2392 /* If B has a fallthru edge to C, no need to move anything. */
2393 if (e->flags & EDGE_FALLTHRU)
2394 {
2395 merge_blocks_nomove (b, c);
2396
2397 if (rtl_dump_file)
2398 {
2399 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2400 b->index, c->index);
2401 }
2402
2403 return 1;
2404 }
2405 else
2406 {
2407 edge tmp_edge;
2408 basic_block d;
2409 int c_has_outgoing_fallthru;
2410 int b_has_incoming_fallthru;
2411
2412 /* We must make sure to not munge nesting of exception regions,
2413 lexical blocks, and loop notes.
2414
2415 The first is taken care of by requiring that the active eh
2416 region at the end of one block always matches the active eh
2417 region at the beginning of the next block.
2418
2419 The later two are taken care of by squeezing out all the notes. */
2420
2421 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2422 executed and we may want to treat blocks which have two out
2423 edges, one normal, one abnormal as only having one edge for
2424 block merging purposes. */
2425
2426 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2427 if (tmp_edge->flags & EDGE_FALLTHRU)
2428 break;
2429 c_has_outgoing_fallthru = (tmp_edge != NULL);
2430
2431 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2432 if (tmp_edge->flags & EDGE_FALLTHRU)
2433 break;
2434 b_has_incoming_fallthru = (tmp_edge != NULL);
2435
2436 /* If B does not have an incoming fallthru, and the exception regions
2437 match, then it can be moved immediately before C without introducing
2438 or modifying jumps.
2439
2440 C can not be the first block, so we do not have to worry about
2441 accessing a non-existent block. */
2442 d = BASIC_BLOCK (c->index - 1);
2443 if (! b_has_incoming_fallthru
2444 && d->eh_end == b->eh_beg
2445 && b->eh_end == c->eh_beg)
2446 return merge_blocks_move_predecessor_nojumps (b, c);
2447
2448 /* Otherwise, we're going to try to move C after B. Make sure the
2449 exception regions match.
2450
2451 If B is the last basic block, then we must not try to access the
2452 block structure for block B + 1. Luckily in that case we do not
2453 need to worry about matching exception regions. */
2454 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2455 if (b->eh_end == c->eh_beg
2456 && (d == NULL || c->eh_end == d->eh_beg))
2457 {
2458 /* If C does not have an outgoing fallthru, then it can be moved
2459 immediately after B without introducing or modifying jumps. */
2460 if (! c_has_outgoing_fallthru)
2461 return merge_blocks_move_successor_nojumps (b, c);
2462
2463 /* Otherwise, we'll need to insert an extra jump, and possibly
2464 a new block to contain it. */
2465 /* ??? Not implemented yet. */
2466 }
2467
2468 return 0;
2469 }
2470 }
2471
2472 /* Top level driver for merge_blocks. */
2473
2474 static void
2475 try_merge_blocks ()
2476 {
2477 int i;
2478
2479 /* Attempt to merge blocks as made possible by edge removal. If a block
2480 has only one successor, and the successor has only one predecessor,
2481 they may be combined. */
2482
2483 for (i = 0; i < n_basic_blocks; )
2484 {
2485 basic_block c, b = BASIC_BLOCK (i);
2486 edge s;
2487
2488 /* A loop because chains of blocks might be combineable. */
2489 while ((s = b->succ) != NULL
2490 && s->succ_next == NULL
2491 && (s->flags & EDGE_EH) == 0
2492 && (c = s->dest) != EXIT_BLOCK_PTR
2493 && c->pred->pred_next == NULL
2494 /* If the jump insn has side effects, we can't kill the edge. */
2495 && (GET_CODE (b->end) != JUMP_INSN
2496 || onlyjump_p (b->end))
2497 && merge_blocks (s, b, c))
2498 continue;
2499
2500 /* Don't get confused by the index shift caused by deleting blocks. */
2501 i = b->index + 1;
2502 }
2503 }
2504
2505 /* The given edge should potentially be a fallthru edge. If that is in
2506 fact true, delete the jump and barriers that are in the way. */
2507
2508 void
2509 tidy_fallthru_edge (e, b, c)
2510 edge e;
2511 basic_block b, c;
2512 {
2513 rtx q;
2514
2515 /* ??? In a late-running flow pass, other folks may have deleted basic
2516 blocks by nopping out blocks, leaving multiple BARRIERs between here
2517 and the target label. They ought to be chastized and fixed.
2518
2519 We can also wind up with a sequence of undeletable labels between
2520 one block and the next.
2521
2522 So search through a sequence of barriers, labels, and notes for
2523 the head of block C and assert that we really do fall through. */
2524
2525 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2526 return;
2527
2528 /* Remove what will soon cease being the jump insn from the source block.
2529 If block B consisted only of this single jump, turn it into a deleted
2530 note. */
2531 q = b->end;
2532 if (GET_CODE (q) == JUMP_INSN
2533 && onlyjump_p (q)
2534 && (any_uncondjump_p (q)
2535 || (b->succ == e && e->succ_next == NULL)))
2536 {
2537 #ifdef HAVE_cc0
2538 /* If this was a conditional jump, we need to also delete
2539 the insn that set cc0. */
2540 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2541 q = PREV_INSN (q);
2542 #endif
2543
2544 if (b->head == q)
2545 {
2546 PUT_CODE (q, NOTE);
2547 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2548 NOTE_SOURCE_FILE (q) = 0;
2549 }
2550 else
2551 b->end = q = PREV_INSN (q);
2552 }
2553
2554 /* Selectively unlink the sequence. */
2555 if (q != PREV_INSN (c->head))
2556 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2557
2558 e->flags |= EDGE_FALLTHRU;
2559 }
2560
2561 /* Fix up edges that now fall through, or rather should now fall through
2562 but previously required a jump around now deleted blocks. Simplify
2563 the search by only examining blocks numerically adjacent, since this
2564 is how find_basic_blocks created them. */
2565
2566 static void
2567 tidy_fallthru_edges ()
2568 {
2569 int i;
2570
2571 for (i = 1; i < n_basic_blocks; ++i)
2572 {
2573 basic_block b = BASIC_BLOCK (i - 1);
2574 basic_block c = BASIC_BLOCK (i);
2575 edge s;
2576
2577 /* We care about simple conditional or unconditional jumps with
2578 a single successor.
2579
2580 If we had a conditional branch to the next instruction when
2581 find_basic_blocks was called, then there will only be one
2582 out edge for the block which ended with the conditional
2583 branch (since we do not create duplicate edges).
2584
2585 Furthermore, the edge will be marked as a fallthru because we
2586 merge the flags for the duplicate edges. So we do not want to
2587 check that the edge is not a FALLTHRU edge. */
2588 if ((s = b->succ) != NULL
2589 && s->succ_next == NULL
2590 && s->dest == c
2591 /* If the jump insn has side effects, we can't tidy the edge. */
2592 && (GET_CODE (b->end) != JUMP_INSN
2593 || onlyjump_p (b->end)))
2594 tidy_fallthru_edge (s, b, c);
2595 }
2596 }
2597 \f
2598 /* Perform data flow analysis.
2599 F is the first insn of the function; FLAGS is a set of PROP_* flags
2600 to be used in accumulating flow info. */
2601
2602 void
2603 life_analysis (f, file, flags)
2604 rtx f;
2605 FILE *file;
2606 int flags;
2607 {
2608 #ifdef ELIMINABLE_REGS
2609 register int i;
2610 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2611 #endif
2612
2613 /* Record which registers will be eliminated. We use this in
2614 mark_used_regs. */
2615
2616 CLEAR_HARD_REG_SET (elim_reg_set);
2617
2618 #ifdef ELIMINABLE_REGS
2619 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2620 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2621 #else
2622 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2623 #endif
2624
2625 if (! optimize)
2626 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2627
2628 /* The post-reload life analysis have (on a global basis) the same
2629 registers live as was computed by reload itself. elimination
2630 Otherwise offsets and such may be incorrect.
2631
2632 Reload will make some registers as live even though they do not
2633 appear in the rtl.
2634
2635 We don't want to create new auto-incs after reload, since they
2636 are unlikely to be useful and can cause problems with shared
2637 stack slots. */
2638 if (reload_completed)
2639 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2640
2641 /* We want alias analysis information for local dead store elimination. */
2642 if (flags & PROP_SCAN_DEAD_CODE)
2643 init_alias_analysis ();
2644
2645 /* Always remove no-op moves. Do this before other processing so
2646 that we don't have to keep re-scanning them. */
2647 delete_noop_moves (f);
2648
2649 /* Some targets can emit simpler epilogues if they know that sp was
2650 not ever modified during the function. After reload, of course,
2651 we've already emitted the epilogue so there's no sense searching. */
2652 if (! reload_completed)
2653 notice_stack_pointer_modification (f);
2654
2655 /* Allocate and zero out data structures that will record the
2656 data from lifetime analysis. */
2657 allocate_reg_life_data ();
2658 allocate_bb_life_data ();
2659
2660 /* Find the set of registers live on function exit. */
2661 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2662
2663 /* "Update" life info from zero. It'd be nice to begin the
2664 relaxation with just the exit and noreturn blocks, but that set
2665 is not immediately handy. */
2666
2667 if (flags & PROP_REG_INFO)
2668 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2669 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2670
2671 /* Clean up. */
2672 if (flags & PROP_SCAN_DEAD_CODE)
2673 end_alias_analysis ();
2674
2675 if (file)
2676 dump_flow_info (file);
2677
2678 free_basic_block_vars (1);
2679 }
2680
2681 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2682 Search for REGNO. If found, abort if it is not wider than word_mode. */
2683
2684 static int
2685 verify_wide_reg_1 (px, pregno)
2686 rtx *px;
2687 void *pregno;
2688 {
2689 rtx x = *px;
2690 unsigned int regno = *(int *) pregno;
2691
2692 if (GET_CODE (x) == REG && REGNO (x) == regno)
2693 {
2694 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2695 abort ();
2696 return 1;
2697 }
2698 return 0;
2699 }
2700
2701 /* A subroutine of verify_local_live_at_start. Search through insns
2702 between HEAD and END looking for register REGNO. */
2703
2704 static void
2705 verify_wide_reg (regno, head, end)
2706 int regno;
2707 rtx head, end;
2708 {
2709 while (1)
2710 {
2711 if (INSN_P (head)
2712 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2713 return;
2714 if (head == end)
2715 break;
2716 head = NEXT_INSN (head);
2717 }
2718
2719 /* We didn't find the register at all. Something's way screwy. */
2720 abort ();
2721 }
2722
2723 /* A subroutine of update_life_info. Verify that there are no untoward
2724 changes in live_at_start during a local update. */
2725
2726 static void
2727 verify_local_live_at_start (new_live_at_start, bb)
2728 regset new_live_at_start;
2729 basic_block bb;
2730 {
2731 if (reload_completed)
2732 {
2733 /* After reload, there are no pseudos, nor subregs of multi-word
2734 registers. The regsets should exactly match. */
2735 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2736 abort ();
2737 }
2738 else
2739 {
2740 int i;
2741
2742 /* Find the set of changed registers. */
2743 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2744
2745 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2746 {
2747 /* No registers should die. */
2748 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2749 abort ();
2750 /* Verify that the now-live register is wider than word_mode. */
2751 verify_wide_reg (i, bb->head, bb->end);
2752 });
2753 }
2754 }
2755
2756 /* Updates life information starting with the basic blocks set in BLOCKS.
2757 If BLOCKS is null, consider it to be the universal set.
2758
2759 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2760 we are only expecting local modifications to basic blocks. If we find
2761 extra registers live at the beginning of a block, then we either killed
2762 useful data, or we have a broken split that wants data not provided.
2763 If we find registers removed from live_at_start, that means we have
2764 a broken peephole that is killing a register it shouldn't.
2765
2766 ??? This is not true in one situation -- when a pre-reload splitter
2767 generates subregs of a multi-word pseudo, current life analysis will
2768 lose the kill. So we _can_ have a pseudo go live. How irritating.
2769
2770 Including PROP_REG_INFO does not properly refresh regs_ever_live
2771 unless the caller resets it to zero. */
2772
2773 void
2774 update_life_info (blocks, extent, prop_flags)
2775 sbitmap blocks;
2776 enum update_life_extent extent;
2777 int prop_flags;
2778 {
2779 regset tmp;
2780 regset_head tmp_head;
2781 int i;
2782
2783 tmp = INITIALIZE_REG_SET (tmp_head);
2784
2785 /* For a global update, we go through the relaxation process again. */
2786 if (extent != UPDATE_LIFE_LOCAL)
2787 {
2788 calculate_global_regs_live (blocks, blocks,
2789 prop_flags & PROP_SCAN_DEAD_CODE);
2790
2791 /* If asked, remove notes from the blocks we'll update. */
2792 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2793 count_or_remove_death_notes (blocks, 1);
2794 }
2795
2796 if (blocks)
2797 {
2798 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2799 {
2800 basic_block bb = BASIC_BLOCK (i);
2801
2802 COPY_REG_SET (tmp, bb->global_live_at_end);
2803 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2804
2805 if (extent == UPDATE_LIFE_LOCAL)
2806 verify_local_live_at_start (tmp, bb);
2807 });
2808 }
2809 else
2810 {
2811 for (i = n_basic_blocks - 1; i >= 0; --i)
2812 {
2813 basic_block bb = BASIC_BLOCK (i);
2814
2815 COPY_REG_SET (tmp, bb->global_live_at_end);
2816 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2817
2818 if (extent == UPDATE_LIFE_LOCAL)
2819 verify_local_live_at_start (tmp, bb);
2820 }
2821 }
2822
2823 FREE_REG_SET (tmp);
2824
2825 if (prop_flags & PROP_REG_INFO)
2826 {
2827 /* The only pseudos that are live at the beginning of the function
2828 are those that were not set anywhere in the function. local-alloc
2829 doesn't know how to handle these correctly, so mark them as not
2830 local to any one basic block. */
2831 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2832 FIRST_PSEUDO_REGISTER, i,
2833 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2834
2835 /* We have a problem with any pseudoreg that lives across the setjmp.
2836 ANSI says that if a user variable does not change in value between
2837 the setjmp and the longjmp, then the longjmp preserves it. This
2838 includes longjmp from a place where the pseudo appears dead.
2839 (In principle, the value still exists if it is in scope.)
2840 If the pseudo goes in a hard reg, some other value may occupy
2841 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2842 Conclusion: such a pseudo must not go in a hard reg. */
2843 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2844 FIRST_PSEUDO_REGISTER, i,
2845 {
2846 if (regno_reg_rtx[i] != 0)
2847 {
2848 REG_LIVE_LENGTH (i) = -1;
2849 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2850 }
2851 });
2852 }
2853 }
2854
2855 /* Free the variables allocated by find_basic_blocks.
2856
2857 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2858
2859 void
2860 free_basic_block_vars (keep_head_end_p)
2861 int keep_head_end_p;
2862 {
2863 if (basic_block_for_insn)
2864 {
2865 VARRAY_FREE (basic_block_for_insn);
2866 basic_block_for_insn = NULL;
2867 }
2868
2869 if (! keep_head_end_p)
2870 {
2871 clear_edges ();
2872 VARRAY_FREE (basic_block_info);
2873 n_basic_blocks = 0;
2874
2875 ENTRY_BLOCK_PTR->aux = NULL;
2876 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2877 EXIT_BLOCK_PTR->aux = NULL;
2878 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2879 }
2880 }
2881
2882 /* Return nonzero if the destination of SET equals the source. */
2883 static int
2884 set_noop_p (set)
2885 rtx set;
2886 {
2887 rtx src = SET_SRC (set);
2888 rtx dst = SET_DEST (set);
2889
2890 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2891 {
2892 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2893 return 0;
2894 src = SUBREG_REG (src);
2895 dst = SUBREG_REG (dst);
2896 }
2897
2898 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2899 && REGNO (src) == REGNO (dst));
2900 }
2901
2902 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2903 value to itself. */
2904 static int
2905 noop_move_p (insn)
2906 rtx insn;
2907 {
2908 rtx pat = PATTERN (insn);
2909
2910 /* Insns carrying these notes are useful later on. */
2911 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2912 return 0;
2913
2914 if (GET_CODE (pat) == SET && set_noop_p (pat))
2915 return 1;
2916
2917 if (GET_CODE (pat) == PARALLEL)
2918 {
2919 int i;
2920 /* If nothing but SETs of registers to themselves,
2921 this insn can also be deleted. */
2922 for (i = 0; i < XVECLEN (pat, 0); i++)
2923 {
2924 rtx tem = XVECEXP (pat, 0, i);
2925
2926 if (GET_CODE (tem) == USE
2927 || GET_CODE (tem) == CLOBBER)
2928 continue;
2929
2930 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2931 return 0;
2932 }
2933
2934 return 1;
2935 }
2936 return 0;
2937 }
2938
2939 /* Delete any insns that copy a register to itself. */
2940
2941 static void
2942 delete_noop_moves (f)
2943 rtx f;
2944 {
2945 rtx insn;
2946 for (insn = f; insn; insn = NEXT_INSN (insn))
2947 {
2948 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2949 {
2950 PUT_CODE (insn, NOTE);
2951 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2952 NOTE_SOURCE_FILE (insn) = 0;
2953 }
2954 }
2955 }
2956
2957 /* Determine if the stack pointer is constant over the life of the function.
2958 Only useful before prologues have been emitted. */
2959
2960 static void
2961 notice_stack_pointer_modification_1 (x, pat, data)
2962 rtx x;
2963 rtx pat ATTRIBUTE_UNUSED;
2964 void *data ATTRIBUTE_UNUSED;
2965 {
2966 if (x == stack_pointer_rtx
2967 /* The stack pointer is only modified indirectly as the result
2968 of a push until later in flow. See the comments in rtl.texi
2969 regarding Embedded Side-Effects on Addresses. */
2970 || (GET_CODE (x) == MEM
2971 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2972 || GET_CODE (XEXP (x, 0)) == PRE_INC
2973 || GET_CODE (XEXP (x, 0)) == POST_DEC
2974 || GET_CODE (XEXP (x, 0)) == POST_INC)
2975 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2976 current_function_sp_is_unchanging = 0;
2977 }
2978
2979 static void
2980 notice_stack_pointer_modification (f)
2981 rtx f;
2982 {
2983 rtx insn;
2984
2985 /* Assume that the stack pointer is unchanging if alloca hasn't
2986 been used. */
2987 current_function_sp_is_unchanging = !current_function_calls_alloca;
2988 if (! current_function_sp_is_unchanging)
2989 return;
2990
2991 for (insn = f; insn; insn = NEXT_INSN (insn))
2992 {
2993 if (INSN_P (insn))
2994 {
2995 /* Check if insn modifies the stack pointer. */
2996 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2997 NULL);
2998 if (! current_function_sp_is_unchanging)
2999 return;
3000 }
3001 }
3002 }
3003
3004 /* Mark a register in SET. Hard registers in large modes get all
3005 of their component registers set as well. */
3006 static void
3007 mark_reg (reg, xset)
3008 rtx reg;
3009 void *xset;
3010 {
3011 regset set = (regset) xset;
3012 int regno = REGNO (reg);
3013
3014 if (GET_MODE (reg) == BLKmode)
3015 abort ();
3016
3017 SET_REGNO_REG_SET (set, regno);
3018 if (regno < FIRST_PSEUDO_REGISTER)
3019 {
3020 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3021 while (--n > 0)
3022 SET_REGNO_REG_SET (set, regno + n);
3023 }
3024 }
3025
3026 /* Mark those regs which are needed at the end of the function as live
3027 at the end of the last basic block. */
3028 static void
3029 mark_regs_live_at_end (set)
3030 regset set;
3031 {
3032 int i;
3033
3034 /* If exiting needs the right stack value, consider the stack pointer
3035 live at the end of the function. */
3036 if ((HAVE_epilogue && reload_completed)
3037 || ! EXIT_IGNORE_STACK
3038 || (! FRAME_POINTER_REQUIRED
3039 && ! current_function_calls_alloca
3040 && flag_omit_frame_pointer)
3041 || current_function_sp_is_unchanging)
3042 {
3043 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3044 }
3045
3046 /* Mark the frame pointer if needed at the end of the function. If
3047 we end up eliminating it, it will be removed from the live list
3048 of each basic block by reload. */
3049
3050 if (! reload_completed || frame_pointer_needed)
3051 {
3052 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3053 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3054 /* If they are different, also mark the hard frame pointer as live */
3055 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3056 #endif
3057 }
3058
3059 #ifdef PIC_OFFSET_TABLE_REGNUM
3060 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3061 /* Many architectures have a GP register even without flag_pic.
3062 Assume the pic register is not in use, or will be handled by
3063 other means, if it is not fixed. */
3064 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3065 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3066 #endif
3067 #endif
3068
3069 /* Mark all global registers, and all registers used by the epilogue
3070 as being live at the end of the function since they may be
3071 referenced by our caller. */
3072 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3073 if (global_regs[i]
3074 #ifdef EPILOGUE_USES
3075 || EPILOGUE_USES (i)
3076 #endif
3077 )
3078 SET_REGNO_REG_SET (set, i);
3079
3080 /* Mark all call-saved registers that we actaully used. */
3081 if (HAVE_epilogue && reload_completed)
3082 {
3083 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3084 if (! call_used_regs[i] && regs_ever_live[i])
3085 SET_REGNO_REG_SET (set, i);
3086 }
3087
3088 /* Mark function return value. */
3089 diddle_return_value (mark_reg, set);
3090 }
3091
3092 /* Callback function for for_each_successor_phi. DATA is a regset.
3093 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3094 INSN, in the regset. */
3095
3096 static int
3097 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3098 rtx insn ATTRIBUTE_UNUSED;
3099 int dest_regno ATTRIBUTE_UNUSED;
3100 int src_regno;
3101 void *data;
3102 {
3103 regset live = (regset) data;
3104 SET_REGNO_REG_SET (live, src_regno);
3105 return 0;
3106 }
3107
3108 /* Propagate global life info around the graph of basic blocks. Begin
3109 considering blocks with their corresponding bit set in BLOCKS_IN.
3110 If BLOCKS_IN is null, consider it the universal set.
3111
3112 BLOCKS_OUT is set for every block that was changed. */
3113
3114 static void
3115 calculate_global_regs_live (blocks_in, blocks_out, flags)
3116 sbitmap blocks_in, blocks_out;
3117 int flags;
3118 {
3119 basic_block *queue, *qhead, *qtail, *qend;
3120 regset tmp, new_live_at_end;
3121 regset_head tmp_head;
3122 regset_head new_live_at_end_head;
3123 int i;
3124
3125 tmp = INITIALIZE_REG_SET (tmp_head);
3126 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3127
3128 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3129 because the `head == tail' style test for an empty queue doesn't
3130 work with a full queue. */
3131 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3132 qtail = queue;
3133 qhead = qend = queue + n_basic_blocks + 2;
3134
3135 /* Clear out the garbage that might be hanging out in bb->aux. */
3136 for (i = n_basic_blocks - 1; i >= 0; --i)
3137 BASIC_BLOCK (i)->aux = NULL;
3138
3139 /* Queue the blocks set in the initial mask. Do this in reverse block
3140 number order so that we are more likely for the first round to do
3141 useful work. We use AUX non-null to flag that the block is queued. */
3142 if (blocks_in)
3143 {
3144 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3145 {
3146 basic_block bb = BASIC_BLOCK (i);
3147 *--qhead = bb;
3148 bb->aux = bb;
3149 });
3150 }
3151 else
3152 {
3153 for (i = 0; i < n_basic_blocks; ++i)
3154 {
3155 basic_block bb = BASIC_BLOCK (i);
3156 *--qhead = bb;
3157 bb->aux = bb;
3158 }
3159 }
3160
3161 if (blocks_out)
3162 sbitmap_zero (blocks_out);
3163
3164 while (qhead != qtail)
3165 {
3166 int rescan, changed;
3167 basic_block bb;
3168 edge e;
3169
3170 bb = *qhead++;
3171 if (qhead == qend)
3172 qhead = queue;
3173 bb->aux = NULL;
3174
3175 /* Begin by propogating live_at_start from the successor blocks. */
3176 CLEAR_REG_SET (new_live_at_end);
3177 for (e = bb->succ; e ; e = e->succ_next)
3178 {
3179 basic_block sb = e->dest;
3180 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3181 }
3182
3183 /* Force the stack pointer to be live -- which might not already be
3184 the case for blocks within infinite loops. */
3185 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3186
3187 /* Regs used in phi nodes are not included in
3188 global_live_at_start, since they are live only along a
3189 particular edge. Set those regs that are live because of a
3190 phi node alternative corresponding to this particular block. */
3191 if (in_ssa_form)
3192 for_each_successor_phi (bb, &set_phi_alternative_reg,
3193 new_live_at_end);
3194
3195 if (bb == ENTRY_BLOCK_PTR)
3196 {
3197 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3198 continue;
3199 }
3200
3201 /* On our first pass through this block, we'll go ahead and continue.
3202 Recognize first pass by local_set NULL. On subsequent passes, we
3203 get to skip out early if live_at_end wouldn't have changed. */
3204
3205 if (bb->local_set == NULL)
3206 {
3207 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3208 rescan = 1;
3209 }
3210 else
3211 {
3212 /* If any bits were removed from live_at_end, we'll have to
3213 rescan the block. This wouldn't be necessary if we had
3214 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3215 local_live is really dependent on live_at_end. */
3216 CLEAR_REG_SET (tmp);
3217 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3218 new_live_at_end, BITMAP_AND_COMPL);
3219
3220 if (! rescan)
3221 {
3222 /* Find the set of changed bits. Take this opportunity
3223 to notice that this set is empty and early out. */
3224 CLEAR_REG_SET (tmp);
3225 changed = bitmap_operation (tmp, bb->global_live_at_end,
3226 new_live_at_end, BITMAP_XOR);
3227 if (! changed)
3228 continue;
3229
3230 /* If any of the changed bits overlap with local_set,
3231 we'll have to rescan the block. Detect overlap by
3232 the AND with ~local_set turning off bits. */
3233 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3234 BITMAP_AND_COMPL);
3235 }
3236 }
3237
3238 /* Let our caller know that BB changed enough to require its
3239 death notes updated. */
3240 if (blocks_out)
3241 SET_BIT (blocks_out, bb->index);
3242
3243 if (! rescan)
3244 {
3245 /* Add to live_at_start the set of all registers in
3246 new_live_at_end that aren't in the old live_at_end. */
3247
3248 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3249 BITMAP_AND_COMPL);
3250 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3251
3252 changed = bitmap_operation (bb->global_live_at_start,
3253 bb->global_live_at_start,
3254 tmp, BITMAP_IOR);
3255 if (! changed)
3256 continue;
3257 }
3258 else
3259 {
3260 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3261
3262 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3263 into live_at_start. */
3264 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3265
3266 /* If live_at start didn't change, no need to go farther. */
3267 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3268 continue;
3269
3270 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3271 }
3272
3273 /* Queue all predecessors of BB so that we may re-examine
3274 their live_at_end. */
3275 for (e = bb->pred; e ; e = e->pred_next)
3276 {
3277 basic_block pb = e->src;
3278 if (pb->aux == NULL)
3279 {
3280 *qtail++ = pb;
3281 if (qtail == qend)
3282 qtail = queue;
3283 pb->aux = pb;
3284 }
3285 }
3286 }
3287
3288 FREE_REG_SET (tmp);
3289 FREE_REG_SET (new_live_at_end);
3290
3291 if (blocks_out)
3292 {
3293 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3294 {
3295 basic_block bb = BASIC_BLOCK (i);
3296 FREE_REG_SET (bb->local_set);
3297 });
3298 }
3299 else
3300 {
3301 for (i = n_basic_blocks - 1; i >= 0; --i)
3302 {
3303 basic_block bb = BASIC_BLOCK (i);
3304 FREE_REG_SET (bb->local_set);
3305 }
3306 }
3307
3308 free (queue);
3309 }
3310 \f
3311 /* Subroutines of life analysis. */
3312
3313 /* Allocate the permanent data structures that represent the results
3314 of life analysis. Not static since used also for stupid life analysis. */
3315
3316 void
3317 allocate_bb_life_data ()
3318 {
3319 register int i;
3320
3321 for (i = 0; i < n_basic_blocks; i++)
3322 {
3323 basic_block bb = BASIC_BLOCK (i);
3324
3325 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3326 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3327 }
3328
3329 ENTRY_BLOCK_PTR->global_live_at_end
3330 = OBSTACK_ALLOC_REG_SET (function_obstack);
3331 EXIT_BLOCK_PTR->global_live_at_start
3332 = OBSTACK_ALLOC_REG_SET (function_obstack);
3333
3334 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3335 }
3336
3337 void
3338 allocate_reg_life_data ()
3339 {
3340 int i;
3341
3342 max_regno = max_reg_num ();
3343
3344 /* Recalculate the register space, in case it has grown. Old style
3345 vector oriented regsets would set regset_{size,bytes} here also. */
3346 allocate_reg_info (max_regno, FALSE, FALSE);
3347
3348 /* Reset all the data we'll collect in propagate_block and its
3349 subroutines. */
3350 for (i = 0; i < max_regno; i++)
3351 {
3352 REG_N_SETS (i) = 0;
3353 REG_N_REFS (i) = 0;
3354 REG_N_DEATHS (i) = 0;
3355 REG_N_CALLS_CROSSED (i) = 0;
3356 REG_LIVE_LENGTH (i) = 0;
3357 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3358 }
3359 }
3360
3361 /* Delete dead instructions for propagate_block. */
3362
3363 static void
3364 propagate_block_delete_insn (bb, insn)
3365 basic_block bb;
3366 rtx insn;
3367 {
3368 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3369
3370 /* If the insn referred to a label, and that label was attached to
3371 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3372 pretty much mandatory to delete it, because the ADDR_VEC may be
3373 referencing labels that no longer exist. */
3374
3375 if (inote)
3376 {
3377 rtx label = XEXP (inote, 0);
3378 rtx next;
3379
3380 if (LABEL_NUSES (label) == 1
3381 && (next = next_nonnote_insn (label)) != NULL
3382 && GET_CODE (next) == JUMP_INSN
3383 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3384 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3385 {
3386 rtx pat = PATTERN (next);
3387 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3388 int len = XVECLEN (pat, diff_vec_p);
3389 int i;
3390
3391 for (i = 0; i < len; i++)
3392 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3393
3394 flow_delete_insn (next);
3395 }
3396 }
3397
3398 if (bb->end == insn)
3399 bb->end = PREV_INSN (insn);
3400 flow_delete_insn (insn);
3401 }
3402
3403 /* Delete dead libcalls for propagate_block. Return the insn
3404 before the libcall. */
3405
3406 static rtx
3407 propagate_block_delete_libcall (bb, insn, note)
3408 basic_block bb;
3409 rtx insn, note;
3410 {
3411 rtx first = XEXP (note, 0);
3412 rtx before = PREV_INSN (first);
3413
3414 if (insn == bb->end)
3415 bb->end = before;
3416
3417 flow_delete_insn_chain (first, insn);
3418 return before;
3419 }
3420
3421 /* Update the life-status of regs for one insn. Return the previous insn. */
3422
3423 rtx
3424 propagate_one_insn (pbi, insn)
3425 struct propagate_block_info *pbi;
3426 rtx insn;
3427 {
3428 rtx prev = PREV_INSN (insn);
3429 int flags = pbi->flags;
3430 int insn_is_dead = 0;
3431 int libcall_is_dead = 0;
3432 rtx note;
3433 int i;
3434
3435 if (! INSN_P (insn))
3436 return prev;
3437
3438 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3439 if (flags & PROP_SCAN_DEAD_CODE)
3440 {
3441 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3442 REG_NOTES (insn));
3443 libcall_is_dead = (insn_is_dead && note != 0
3444 && libcall_dead_p (pbi, note, insn));
3445 }
3446
3447 /* We almost certainly don't want to delete prologue or epilogue
3448 instructions. Warn about probable compiler losage. */
3449 if (insn_is_dead
3450 && reload_completed
3451 && (((HAVE_epilogue || HAVE_prologue)
3452 && prologue_epilogue_contains (insn))
3453 || (HAVE_sibcall_epilogue
3454 && sibcall_epilogue_contains (insn)))
3455 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3456 {
3457 if (flags & PROP_KILL_DEAD_CODE)
3458 {
3459 warning ("ICE: would have deleted prologue/epilogue insn");
3460 if (!inhibit_warnings)
3461 debug_rtx (insn);
3462 }
3463 libcall_is_dead = insn_is_dead = 0;
3464 }
3465
3466 /* If an instruction consists of just dead store(s) on final pass,
3467 delete it. */
3468 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3469 {
3470 /* Record sets. Do this even for dead instructions, since they
3471 would have killed the values if they hadn't been deleted. */
3472 mark_set_regs (pbi, PATTERN (insn), insn);
3473
3474 /* CC0 is now known to be dead. Either this insn used it,
3475 in which case it doesn't anymore, or clobbered it,
3476 so the next insn can't use it. */
3477 pbi->cc0_live = 0;
3478
3479 if (libcall_is_dead)
3480 {
3481 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3482 insn = NEXT_INSN (prev);
3483 }
3484 else
3485 propagate_block_delete_insn (pbi->bb, insn);
3486
3487 return prev;
3488 }
3489
3490 /* See if this is an increment or decrement that can be merged into
3491 a following memory address. */
3492 #ifdef AUTO_INC_DEC
3493 {
3494 register rtx x = single_set (insn);
3495
3496 /* Does this instruction increment or decrement a register? */
3497 if ((flags & PROP_AUTOINC)
3498 && x != 0
3499 && GET_CODE (SET_DEST (x)) == REG
3500 && (GET_CODE (SET_SRC (x)) == PLUS
3501 || GET_CODE (SET_SRC (x)) == MINUS)
3502 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3503 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3504 /* Ok, look for a following memory ref we can combine with.
3505 If one is found, change the memory ref to a PRE_INC
3506 or PRE_DEC, cancel this insn, and return 1.
3507 Return 0 if nothing has been done. */
3508 && try_pre_increment_1 (pbi, insn))
3509 return prev;
3510 }
3511 #endif /* AUTO_INC_DEC */
3512
3513 CLEAR_REG_SET (pbi->new_set);
3514
3515 /* If this is not the final pass, and this insn is copying the value of
3516 a library call and it's dead, don't scan the insns that perform the
3517 library call, so that the call's arguments are not marked live. */
3518 if (libcall_is_dead)
3519 {
3520 /* Record the death of the dest reg. */
3521 mark_set_regs (pbi, PATTERN (insn), insn);
3522
3523 insn = XEXP (note, 0);
3524 return PREV_INSN (insn);
3525 }
3526 else if (GET_CODE (PATTERN (insn)) == SET
3527 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3528 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3529 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3530 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3531 /* We have an insn to pop a constant amount off the stack.
3532 (Such insns use PLUS regardless of the direction of the stack,
3533 and any insn to adjust the stack by a constant is always a pop.)
3534 These insns, if not dead stores, have no effect on life. */
3535 ;
3536 else
3537 {
3538 /* Any regs live at the time of a call instruction must not go
3539 in a register clobbered by calls. Find all regs now live and
3540 record this for them. */
3541
3542 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3543 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3544 { REG_N_CALLS_CROSSED (i)++; });
3545
3546 /* Record sets. Do this even for dead instructions, since they
3547 would have killed the values if they hadn't been deleted. */
3548 mark_set_regs (pbi, PATTERN (insn), insn);
3549
3550 if (GET_CODE (insn) == CALL_INSN)
3551 {
3552 register int i;
3553 rtx note, cond;
3554
3555 cond = NULL_RTX;
3556 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3557 cond = COND_EXEC_TEST (PATTERN (insn));
3558
3559 /* Non-constant calls clobber memory. */
3560 if (! CONST_CALL_P (insn))
3561 free_EXPR_LIST_list (&pbi->mem_set_list);
3562
3563 /* There may be extra registers to be clobbered. */
3564 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3565 note;
3566 note = XEXP (note, 1))
3567 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3568 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3569 cond, insn, pbi->flags);
3570
3571 /* Calls change all call-used and global registers. */
3572 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3573 if (call_used_regs[i] && ! global_regs[i]
3574 && ! fixed_regs[i])
3575 {
3576 /* We do not want REG_UNUSED notes for these registers. */
3577 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3578 cond, insn,
3579 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3580 }
3581 }
3582
3583 /* If an insn doesn't use CC0, it becomes dead since we assume
3584 that every insn clobbers it. So show it dead here;
3585 mark_used_regs will set it live if it is referenced. */
3586 pbi->cc0_live = 0;
3587
3588 /* Record uses. */
3589 if (! insn_is_dead)
3590 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3591
3592 /* Sometimes we may have inserted something before INSN (such as a move)
3593 when we make an auto-inc. So ensure we will scan those insns. */
3594 #ifdef AUTO_INC_DEC
3595 prev = PREV_INSN (insn);
3596 #endif
3597
3598 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3599 {
3600 register int i;
3601 rtx note, cond;
3602
3603 cond = NULL_RTX;
3604 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3605 cond = COND_EXEC_TEST (PATTERN (insn));
3606
3607 /* Calls use their arguments. */
3608 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3609 note;
3610 note = XEXP (note, 1))
3611 if (GET_CODE (XEXP (note, 0)) == USE)
3612 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3613 cond, insn);
3614
3615 /* The stack ptr is used (honorarily) by a CALL insn. */
3616 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3617
3618 /* Calls may also reference any of the global registers,
3619 so they are made live. */
3620 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3621 if (global_regs[i])
3622 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3623 cond, insn);
3624 }
3625 }
3626
3627 /* On final pass, update counts of how many insns in which each reg
3628 is live. */
3629 if (flags & PROP_REG_INFO)
3630 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3631 { REG_LIVE_LENGTH (i)++; });
3632
3633 return prev;
3634 }
3635
3636 /* Initialize a propagate_block_info struct for public consumption.
3637 Note that the structure itself is opaque to this file, but that
3638 the user can use the regsets provided here. */
3639
3640 struct propagate_block_info *
3641 init_propagate_block_info (bb, live, local_set, flags)
3642 basic_block bb;
3643 regset live;
3644 regset local_set;
3645 int flags;
3646 {
3647 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3648
3649 pbi->bb = bb;
3650 pbi->reg_live = live;
3651 pbi->mem_set_list = NULL_RTX;
3652 pbi->local_set = local_set;
3653 pbi->cc0_live = 0;
3654 pbi->flags = flags;
3655
3656 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3657 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3658 else
3659 pbi->reg_next_use = NULL;
3660
3661 pbi->new_set = BITMAP_XMALLOC ();
3662
3663 #ifdef HAVE_conditional_execution
3664 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3665 free_reg_cond_life_info);
3666 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3667
3668 /* If this block ends in a conditional branch, for each register live
3669 from one side of the branch and not the other, record the register
3670 as conditionally dead. */
3671 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3672 && GET_CODE (bb->end) == JUMP_INSN
3673 && any_condjump_p (bb->end))
3674 {
3675 regset_head diff_head;
3676 regset diff = INITIALIZE_REG_SET (diff_head);
3677 basic_block bb_true, bb_false;
3678 rtx cond_true, cond_false, set_src;
3679 int i;
3680
3681 /* Identify the successor blocks. */
3682 bb_true = bb->succ->dest;
3683 if (bb->succ->succ_next != NULL)
3684 {
3685 bb_false = bb->succ->succ_next->dest;
3686
3687 if (bb->succ->flags & EDGE_FALLTHRU)
3688 {
3689 basic_block t = bb_false;
3690 bb_false = bb_true;
3691 bb_true = t;
3692 }
3693 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3694 abort ();
3695 }
3696 else
3697 {
3698 /* This can happen with a conditional jump to the next insn. */
3699 if (JUMP_LABEL (bb->end) != bb_true->head)
3700 abort ();
3701
3702 /* Simplest way to do nothing. */
3703 bb_false = bb_true;
3704 }
3705
3706 /* Extract the condition from the branch. */
3707 set_src = SET_SRC (pc_set (bb->end));
3708 cond_true = XEXP (set_src, 0);
3709 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3710 GET_MODE (cond_true), XEXP (cond_true, 0),
3711 XEXP (cond_true, 1));
3712 if (GET_CODE (XEXP (set_src, 1)) == PC)
3713 {
3714 rtx t = cond_false;
3715 cond_false = cond_true;
3716 cond_true = t;
3717 }
3718
3719 /* Compute which register lead different lives in the successors. */
3720 if (bitmap_operation (diff, bb_true->global_live_at_start,
3721 bb_false->global_live_at_start, BITMAP_XOR))
3722 {
3723 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3724 abort ();
3725 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3726
3727 /* For each such register, mark it conditionally dead. */
3728 EXECUTE_IF_SET_IN_REG_SET
3729 (diff, 0, i,
3730 {
3731 struct reg_cond_life_info *rcli;
3732 rtx cond;
3733
3734 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3735
3736 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3737 cond = cond_false;
3738 else
3739 cond = cond_true;
3740 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3741
3742 splay_tree_insert (pbi->reg_cond_dead, i,
3743 (splay_tree_value) rcli);
3744 });
3745 }
3746
3747 FREE_REG_SET (diff);
3748 }
3749 #endif
3750
3751 /* If this block has no successors, any stores to the frame that aren't
3752 used later in the block are dead. So make a pass over the block
3753 recording any such that are made and show them dead at the end. We do
3754 a very conservative and simple job here. */
3755 if ((flags & PROP_SCAN_DEAD_CODE)
3756 && (bb->succ == NULL
3757 || (bb->succ->succ_next == NULL
3758 && bb->succ->dest == EXIT_BLOCK_PTR)))
3759 {
3760 rtx insn;
3761 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3762 if (GET_CODE (insn) == INSN
3763 && GET_CODE (PATTERN (insn)) == SET
3764 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3765 {
3766 rtx mem = SET_DEST (PATTERN (insn));
3767
3768 if (XEXP (mem, 0) == frame_pointer_rtx
3769 || (GET_CODE (XEXP (mem, 0)) == PLUS
3770 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3771 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3772 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3773 }
3774 }
3775
3776 return pbi;
3777 }
3778
3779 /* Release a propagate_block_info struct. */
3780
3781 void
3782 free_propagate_block_info (pbi)
3783 struct propagate_block_info *pbi;
3784 {
3785 free_EXPR_LIST_list (&pbi->mem_set_list);
3786
3787 BITMAP_XFREE (pbi->new_set);
3788
3789 #ifdef HAVE_conditional_execution
3790 splay_tree_delete (pbi->reg_cond_dead);
3791 BITMAP_XFREE (pbi->reg_cond_reg);
3792 #endif
3793
3794 if (pbi->reg_next_use)
3795 free (pbi->reg_next_use);
3796
3797 free (pbi);
3798 }
3799
3800 /* Compute the registers live at the beginning of a basic block BB from
3801 those live at the end.
3802
3803 When called, REG_LIVE contains those live at the end. On return, it
3804 contains those live at the beginning.
3805
3806 LOCAL_SET, if non-null, will be set with all registers killed by
3807 this basic block. */
3808
3809 void
3810 propagate_block (bb, live, local_set, flags)
3811 basic_block bb;
3812 regset live;
3813 regset local_set;
3814 int flags;
3815 {
3816 struct propagate_block_info *pbi;
3817 rtx insn, prev;
3818
3819 pbi = init_propagate_block_info (bb, live, local_set, flags);
3820
3821 if (flags & PROP_REG_INFO)
3822 {
3823 register int i;
3824
3825 /* Process the regs live at the end of the block.
3826 Mark them as not local to any one basic block. */
3827 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3828 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3829 }
3830
3831 /* Scan the block an insn at a time from end to beginning. */
3832
3833 for (insn = bb->end; ; insn = prev)
3834 {
3835 /* If this is a call to `setjmp' et al, warn if any
3836 non-volatile datum is live. */
3837 if ((flags & PROP_REG_INFO)
3838 && GET_CODE (insn) == NOTE
3839 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3840 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3841
3842 prev = propagate_one_insn (pbi, insn);
3843
3844 if (insn == bb->head)
3845 break;
3846 }
3847
3848 free_propagate_block_info (pbi);
3849 }
3850 \f
3851 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3852 (SET expressions whose destinations are registers dead after the insn).
3853 NEEDED is the regset that says which regs are alive after the insn.
3854
3855 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3856
3857 If X is the entire body of an insn, NOTES contains the reg notes
3858 pertaining to the insn. */
3859
3860 static int
3861 insn_dead_p (pbi, x, call_ok, notes)
3862 struct propagate_block_info *pbi;
3863 rtx x;
3864 int call_ok;
3865 rtx notes ATTRIBUTE_UNUSED;
3866 {
3867 enum rtx_code code = GET_CODE (x);
3868
3869 #ifdef AUTO_INC_DEC
3870 /* If flow is invoked after reload, we must take existing AUTO_INC
3871 expresions into account. */
3872 if (reload_completed)
3873 {
3874 for ( ; notes; notes = XEXP (notes, 1))
3875 {
3876 if (REG_NOTE_KIND (notes) == REG_INC)
3877 {
3878 int regno = REGNO (XEXP (notes, 0));
3879
3880 /* Don't delete insns to set global regs. */
3881 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3882 || REGNO_REG_SET_P (pbi->reg_live, regno))
3883 return 0;
3884 }
3885 }
3886 }
3887 #endif
3888
3889 /* If setting something that's a reg or part of one,
3890 see if that register's altered value will be live. */
3891
3892 if (code == SET)
3893 {
3894 rtx r = SET_DEST (x);
3895
3896 #ifdef HAVE_cc0
3897 if (GET_CODE (r) == CC0)
3898 return ! pbi->cc0_live;
3899 #endif
3900
3901 /* A SET that is a subroutine call cannot be dead. */
3902 if (GET_CODE (SET_SRC (x)) == CALL)
3903 {
3904 if (! call_ok)
3905 return 0;
3906 }
3907
3908 /* Don't eliminate loads from volatile memory or volatile asms. */
3909 else if (volatile_refs_p (SET_SRC (x)))
3910 return 0;
3911
3912 if (GET_CODE (r) == MEM)
3913 {
3914 rtx temp;
3915
3916 if (MEM_VOLATILE_P (r))
3917 return 0;
3918
3919 /* Walk the set of memory locations we are currently tracking
3920 and see if one is an identical match to this memory location.
3921 If so, this memory write is dead (remember, we're walking
3922 backwards from the end of the block to the start). */
3923 temp = pbi->mem_set_list;
3924 while (temp)
3925 {
3926 if (rtx_equal_p (XEXP (temp, 0), r))
3927 return 1;
3928 temp = XEXP (temp, 1);
3929 }
3930 }
3931 else
3932 {
3933 while (GET_CODE (r) == SUBREG
3934 || GET_CODE (r) == STRICT_LOW_PART
3935 || GET_CODE (r) == ZERO_EXTRACT)
3936 r = XEXP (r, 0);
3937
3938 if (GET_CODE (r) == REG)
3939 {
3940 int regno = REGNO (r);
3941
3942 /* Obvious. */
3943 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3944 return 0;
3945
3946 /* If this is a hard register, verify that subsequent
3947 words are not needed. */
3948 if (regno < FIRST_PSEUDO_REGISTER)
3949 {
3950 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3951
3952 while (--n > 0)
3953 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3954 return 0;
3955 }
3956
3957 /* Don't delete insns to set global regs. */
3958 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3959 return 0;
3960
3961 /* Make sure insns to set the stack pointer aren't deleted. */
3962 if (regno == STACK_POINTER_REGNUM)
3963 return 0;
3964
3965 /* Make sure insns to set the frame pointer aren't deleted. */
3966 if (regno == FRAME_POINTER_REGNUM
3967 && (! reload_completed || frame_pointer_needed))
3968 return 0;
3969 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3970 if (regno == HARD_FRAME_POINTER_REGNUM
3971 && (! reload_completed || frame_pointer_needed))
3972 return 0;
3973 #endif
3974
3975 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3976 /* Make sure insns to set arg pointer are never deleted
3977 (if the arg pointer isn't fixed, there will be a USE
3978 for it, so we can treat it normally). */
3979 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3980 return 0;
3981 #endif
3982
3983 #ifdef PIC_OFFSET_TABLE_REGNUM
3984 /* Before reload, do not allow sets of the pic register
3985 to be deleted. Reload can insert references to
3986 constant pool memory anywhere in the function, making
3987 the PIC register live where it wasn't before. */
3988 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3989 && ! reload_completed)
3990 return 0;
3991 #endif
3992
3993 /* Otherwise, the set is dead. */
3994 return 1;
3995 }
3996 }
3997 }
3998
3999 /* If performing several activities, insn is dead if each activity
4000 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4001 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4002 worth keeping. */
4003 else if (code == PARALLEL)
4004 {
4005 int i = XVECLEN (x, 0);
4006
4007 for (i--; i >= 0; i--)
4008 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4009 && GET_CODE (XVECEXP (x, 0, i)) != USE
4010 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4011 return 0;
4012
4013 return 1;
4014 }
4015
4016 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4017 is not necessarily true for hard registers. */
4018 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4019 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4020 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4021 return 1;
4022
4023 /* We do not check other CLOBBER or USE here. An insn consisting of just
4024 a CLOBBER or just a USE should not be deleted. */
4025 return 0;
4026 }
4027
4028 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4029 return 1 if the entire library call is dead.
4030 This is true if INSN copies a register (hard or pseudo)
4031 and if the hard return reg of the call insn is dead.
4032 (The caller should have tested the destination of the SET inside
4033 INSN already for death.)
4034
4035 If this insn doesn't just copy a register, then we don't
4036 have an ordinary libcall. In that case, cse could not have
4037 managed to substitute the source for the dest later on,
4038 so we can assume the libcall is dead.
4039
4040 PBI is the block info giving pseudoregs live before this insn.
4041 NOTE is the REG_RETVAL note of the insn. */
4042
4043 static int
4044 libcall_dead_p (pbi, note, insn)
4045 struct propagate_block_info *pbi;
4046 rtx note;
4047 rtx insn;
4048 {
4049 rtx x = single_set (insn);
4050
4051 if (x)
4052 {
4053 register rtx r = SET_SRC (x);
4054 if (GET_CODE (r) == REG)
4055 {
4056 rtx call = XEXP (note, 0);
4057 rtx call_pat;
4058 register int i;
4059
4060 /* Find the call insn. */
4061 while (call != insn && GET_CODE (call) != CALL_INSN)
4062 call = NEXT_INSN (call);
4063
4064 /* If there is none, do nothing special,
4065 since ordinary death handling can understand these insns. */
4066 if (call == insn)
4067 return 0;
4068
4069 /* See if the hard reg holding the value is dead.
4070 If this is a PARALLEL, find the call within it. */
4071 call_pat = PATTERN (call);
4072 if (GET_CODE (call_pat) == PARALLEL)
4073 {
4074 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4075 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4076 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4077 break;
4078
4079 /* This may be a library call that is returning a value
4080 via invisible pointer. Do nothing special, since
4081 ordinary death handling can understand these insns. */
4082 if (i < 0)
4083 return 0;
4084
4085 call_pat = XVECEXP (call_pat, 0, i);
4086 }
4087
4088 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4089 }
4090 }
4091 return 1;
4092 }
4093
4094 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4095 live at function entry. Don't count global register variables, variables
4096 in registers that can be used for function arg passing, or variables in
4097 fixed hard registers. */
4098
4099 int
4100 regno_uninitialized (regno)
4101 int regno;
4102 {
4103 if (n_basic_blocks == 0
4104 || (regno < FIRST_PSEUDO_REGISTER
4105 && (global_regs[regno]
4106 || fixed_regs[regno]
4107 || FUNCTION_ARG_REGNO_P (regno))))
4108 return 0;
4109
4110 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4111 }
4112
4113 /* 1 if register REGNO was alive at a place where `setjmp' was called
4114 and was set more than once or is an argument.
4115 Such regs may be clobbered by `longjmp'. */
4116
4117 int
4118 regno_clobbered_at_setjmp (regno)
4119 int regno;
4120 {
4121 if (n_basic_blocks == 0)
4122 return 0;
4123
4124 return ((REG_N_SETS (regno) > 1
4125 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4126 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4127 }
4128 \f
4129 /* INSN references memory, possibly using autoincrement addressing modes.
4130 Find any entries on the mem_set_list that need to be invalidated due
4131 to an address change. */
4132
4133 static void
4134 invalidate_mems_from_autoinc (pbi, insn)
4135 struct propagate_block_info *pbi;
4136 rtx insn;
4137 {
4138 rtx note = REG_NOTES (insn);
4139 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4140 {
4141 if (REG_NOTE_KIND (note) == REG_INC)
4142 {
4143 rtx temp = pbi->mem_set_list;
4144 rtx prev = NULL_RTX;
4145 rtx next;
4146
4147 while (temp)
4148 {
4149 next = XEXP (temp, 1);
4150 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4151 {
4152 /* Splice temp out of list. */
4153 if (prev)
4154 XEXP (prev, 1) = next;
4155 else
4156 pbi->mem_set_list = next;
4157 free_EXPR_LIST_node (temp);
4158 }
4159 else
4160 prev = temp;
4161 temp = next;
4162 }
4163 }
4164 }
4165 }
4166
4167 /* Process the registers that are set within X. Their bits are set to
4168 1 in the regset DEAD, because they are dead prior to this insn.
4169
4170 If INSN is nonzero, it is the insn being processed.
4171
4172 FLAGS is the set of operations to perform. */
4173
4174 static void
4175 mark_set_regs (pbi, x, insn)
4176 struct propagate_block_info *pbi;
4177 rtx x, insn;
4178 {
4179 rtx cond = NULL_RTX;
4180 rtx link;
4181 enum rtx_code code;
4182
4183 if (insn)
4184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4185 {
4186 if (REG_NOTE_KIND (link) == REG_INC)
4187 mark_set_1 (pbi, SET, XEXP (link, 0),
4188 (GET_CODE (x) == COND_EXEC
4189 ? COND_EXEC_TEST (x) : NULL_RTX),
4190 insn, pbi->flags);
4191 }
4192 retry:
4193 switch (code = GET_CODE (x))
4194 {
4195 case SET:
4196 case CLOBBER:
4197 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4198 return;
4199
4200 case COND_EXEC:
4201 cond = COND_EXEC_TEST (x);
4202 x = COND_EXEC_CODE (x);
4203 goto retry;
4204
4205 case PARALLEL:
4206 {
4207 register int i;
4208 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4209 {
4210 rtx sub = XVECEXP (x, 0, i);
4211 switch (code = GET_CODE (sub))
4212 {
4213 case COND_EXEC:
4214 if (cond != NULL_RTX)
4215 abort ();
4216
4217 cond = COND_EXEC_TEST (sub);
4218 sub = COND_EXEC_CODE (sub);
4219 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4220 break;
4221 /* FALLTHRU */
4222
4223 case SET:
4224 case CLOBBER:
4225 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4226 break;
4227
4228 default:
4229 break;
4230 }
4231 }
4232 break;
4233 }
4234
4235 default:
4236 break;
4237 }
4238 }
4239
4240 /* Process a single SET rtx, X. */
4241
4242 static void
4243 mark_set_1 (pbi, code, reg, cond, insn, flags)
4244 struct propagate_block_info *pbi;
4245 enum rtx_code code;
4246 rtx reg, cond, insn;
4247 int flags;
4248 {
4249 int regno_first = -1, regno_last = -1;
4250 int not_dead = 0;
4251 int i;
4252
4253 /* Some targets place small structures in registers for
4254 return values of functions. We have to detect this
4255 case specially here to get correct flow information. */
4256 if (GET_CODE (reg) == PARALLEL
4257 && GET_MODE (reg) == BLKmode)
4258 {
4259 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4260 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4261 return;
4262 }
4263
4264 /* Modifying just one hardware register of a multi-reg value or just a
4265 byte field of a register does not mean the value from before this insn
4266 is now dead. Of course, if it was dead after it's unused now. */
4267
4268 switch (GET_CODE (reg))
4269 {
4270 case ZERO_EXTRACT:
4271 case SIGN_EXTRACT:
4272 case STRICT_LOW_PART:
4273 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4274 do
4275 reg = XEXP (reg, 0);
4276 while (GET_CODE (reg) == SUBREG
4277 || GET_CODE (reg) == ZERO_EXTRACT
4278 || GET_CODE (reg) == SIGN_EXTRACT
4279 || GET_CODE (reg) == STRICT_LOW_PART);
4280 if (GET_CODE (reg) == MEM)
4281 break;
4282 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4283 /* FALLTHRU */
4284
4285 case REG:
4286 regno_last = regno_first = REGNO (reg);
4287 if (regno_first < FIRST_PSEUDO_REGISTER)
4288 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4289 break;
4290
4291 case SUBREG:
4292 if (GET_CODE (SUBREG_REG (reg)) == REG)
4293 {
4294 enum machine_mode outer_mode = GET_MODE (reg);
4295 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4296
4297 /* Identify the range of registers affected. This is moderately
4298 tricky for hard registers. See alter_subreg. */
4299
4300 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4301 if (regno_first < FIRST_PSEUDO_REGISTER)
4302 {
4303 #ifdef ALTER_HARD_SUBREG
4304 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4305 inner_mode, regno_first);
4306 #else
4307 regno_first += SUBREG_WORD (reg);
4308 #endif
4309 regno_last = (regno_first
4310 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4311
4312 /* Since we've just adjusted the register number ranges, make
4313 sure REG matches. Otherwise some_was_live will be clear
4314 when it shouldn't have been, and we'll create incorrect
4315 REG_UNUSED notes. */
4316 reg = gen_rtx_REG (outer_mode, regno_first);
4317 }
4318 else
4319 {
4320 /* If the number of words in the subreg is less than the number
4321 of words in the full register, we have a well-defined partial
4322 set. Otherwise the high bits are undefined.
4323
4324 This is only really applicable to pseudos, since we just took
4325 care of multi-word hard registers. */
4326 if (((GET_MODE_SIZE (outer_mode)
4327 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4328 < ((GET_MODE_SIZE (inner_mode)
4329 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4330 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4331
4332 reg = SUBREG_REG (reg);
4333 }
4334 }
4335 else
4336 reg = SUBREG_REG (reg);
4337 break;
4338
4339 default:
4340 break;
4341 }
4342
4343 /* If this set is a MEM, then it kills any aliased writes.
4344 If this set is a REG, then it kills any MEMs which use the reg. */
4345 if (flags & PROP_SCAN_DEAD_CODE)
4346 {
4347 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4348 {
4349 rtx temp = pbi->mem_set_list;
4350 rtx prev = NULL_RTX;
4351 rtx next;
4352
4353 while (temp)
4354 {
4355 next = XEXP (temp, 1);
4356 if ((GET_CODE (reg) == MEM
4357 && output_dependence (XEXP (temp, 0), reg))
4358 || (GET_CODE (reg) == REG
4359 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4360 {
4361 /* Splice this entry out of the list. */
4362 if (prev)
4363 XEXP (prev, 1) = next;
4364 else
4365 pbi->mem_set_list = next;
4366 free_EXPR_LIST_node (temp);
4367 }
4368 else
4369 prev = temp;
4370 temp = next;
4371 }
4372 }
4373
4374 /* If the memory reference had embedded side effects (autoincrement
4375 address modes. Then we may need to kill some entries on the
4376 memory set list. */
4377 if (insn && GET_CODE (reg) == MEM)
4378 invalidate_mems_from_autoinc (pbi, insn);
4379
4380 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4381 /* ??? With more effort we could track conditional memory life. */
4382 && ! cond
4383 /* We do not know the size of a BLKmode store, so we do not track
4384 them for redundant store elimination. */
4385 && GET_MODE (reg) != BLKmode
4386 /* There are no REG_INC notes for SP, so we can't assume we'll see
4387 everything that invalidates it. To be safe, don't eliminate any
4388 stores though SP; none of them should be redundant anyway. */
4389 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4390 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4391 }
4392
4393 if (GET_CODE (reg) == REG
4394 && ! (regno_first == FRAME_POINTER_REGNUM
4395 && (! reload_completed || frame_pointer_needed))
4396 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4397 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4398 && (! reload_completed || frame_pointer_needed))
4399 #endif
4400 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4401 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4402 #endif
4403 )
4404 {
4405 int some_was_live = 0, some_was_dead = 0;
4406
4407 for (i = regno_first; i <= regno_last; ++i)
4408 {
4409 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4410 if (pbi->local_set)
4411 SET_REGNO_REG_SET (pbi->local_set, i);
4412 if (code != CLOBBER)
4413 SET_REGNO_REG_SET (pbi->new_set, i);
4414
4415 some_was_live |= needed_regno;
4416 some_was_dead |= ! needed_regno;
4417 }
4418
4419 #ifdef HAVE_conditional_execution
4420 /* Consider conditional death in deciding that the register needs
4421 a death note. */
4422 if (some_was_live && ! not_dead
4423 /* The stack pointer is never dead. Well, not strictly true,
4424 but it's very difficult to tell from here. Hopefully
4425 combine_stack_adjustments will fix up the most egregious
4426 errors. */
4427 && regno_first != STACK_POINTER_REGNUM)
4428 {
4429 for (i = regno_first; i <= regno_last; ++i)
4430 if (! mark_regno_cond_dead (pbi, i, cond))
4431 not_dead = 1;
4432 }
4433 #endif
4434
4435 /* Additional data to record if this is the final pass. */
4436 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4437 | PROP_DEATH_NOTES | PROP_AUTOINC))
4438 {
4439 register rtx y;
4440 register int blocknum = pbi->bb->index;
4441
4442 y = NULL_RTX;
4443 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4444 {
4445 y = pbi->reg_next_use[regno_first];
4446
4447 /* The next use is no longer next, since a store intervenes. */
4448 for (i = regno_first; i <= regno_last; ++i)
4449 pbi->reg_next_use[i] = 0;
4450 }
4451
4452 if (flags & PROP_REG_INFO)
4453 {
4454 for (i = regno_first; i <= regno_last; ++i)
4455 {
4456 /* Count (weighted) references, stores, etc. This counts a
4457 register twice if it is modified, but that is correct. */
4458 REG_N_SETS (i) += 1;
4459 REG_N_REFS (i) += (optimize_size ? 1
4460 : pbi->bb->loop_depth + 1);
4461
4462 /* The insns where a reg is live are normally counted
4463 elsewhere, but we want the count to include the insn
4464 where the reg is set, and the normal counting mechanism
4465 would not count it. */
4466 REG_LIVE_LENGTH (i) += 1;
4467 }
4468
4469 /* If this is a hard reg, record this function uses the reg. */
4470 if (regno_first < FIRST_PSEUDO_REGISTER)
4471 {
4472 for (i = regno_first; i <= regno_last; i++)
4473 regs_ever_live[i] = 1;
4474 }
4475 else
4476 {
4477 /* Keep track of which basic blocks each reg appears in. */
4478 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4479 REG_BASIC_BLOCK (regno_first) = blocknum;
4480 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4481 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4482 }
4483 }
4484
4485 if (! some_was_dead)
4486 {
4487 if (flags & PROP_LOG_LINKS)
4488 {
4489 /* Make a logical link from the next following insn
4490 that uses this register, back to this insn.
4491 The following insns have already been processed.
4492
4493 We don't build a LOG_LINK for hard registers containing
4494 in ASM_OPERANDs. If these registers get replaced,
4495 we might wind up changing the semantics of the insn,
4496 even if reload can make what appear to be valid
4497 assignments later. */
4498 if (y && (BLOCK_NUM (y) == blocknum)
4499 && (regno_first >= FIRST_PSEUDO_REGISTER
4500 || asm_noperands (PATTERN (y)) < 0))
4501 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4502 }
4503 }
4504 else if (not_dead)
4505 ;
4506 else if (! some_was_live)
4507 {
4508 if (flags & PROP_REG_INFO)
4509 REG_N_DEATHS (regno_first) += 1;
4510
4511 if (flags & PROP_DEATH_NOTES)
4512 {
4513 /* Note that dead stores have already been deleted
4514 when possible. If we get here, we have found a
4515 dead store that cannot be eliminated (because the
4516 same insn does something useful). Indicate this
4517 by marking the reg being set as dying here. */
4518 REG_NOTES (insn)
4519 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4520 }
4521 }
4522 else
4523 {
4524 if (flags & PROP_DEATH_NOTES)
4525 {
4526 /* This is a case where we have a multi-word hard register
4527 and some, but not all, of the words of the register are
4528 needed in subsequent insns. Write REG_UNUSED notes
4529 for those parts that were not needed. This case should
4530 be rare. */
4531
4532 for (i = regno_first; i <= regno_last; ++i)
4533 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4534 REG_NOTES (insn)
4535 = alloc_EXPR_LIST (REG_UNUSED,
4536 gen_rtx_REG (reg_raw_mode[i], i),
4537 REG_NOTES (insn));
4538 }
4539 }
4540 }
4541
4542 /* Mark the register as being dead. */
4543 if (some_was_live
4544 && ! not_dead
4545 /* The stack pointer is never dead. Well, not strictly true,
4546 but it's very difficult to tell from here. Hopefully
4547 combine_stack_adjustments will fix up the most egregious
4548 errors. */
4549 && regno_first != STACK_POINTER_REGNUM)
4550 {
4551 for (i = regno_first; i <= regno_last; ++i)
4552 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4553 }
4554 }
4555 else if (GET_CODE (reg) == REG)
4556 {
4557 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4558 pbi->reg_next_use[regno_first] = 0;
4559 }
4560
4561 /* If this is the last pass and this is a SCRATCH, show it will be dying
4562 here and count it. */
4563 else if (GET_CODE (reg) == SCRATCH)
4564 {
4565 if (flags & PROP_DEATH_NOTES)
4566 REG_NOTES (insn)
4567 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4568 }
4569 }
4570 \f
4571 #ifdef HAVE_conditional_execution
4572 /* Mark REGNO conditionally dead. Return true if the register is
4573 now unconditionally dead. */
4574
4575 static int
4576 mark_regno_cond_dead (pbi, regno, cond)
4577 struct propagate_block_info *pbi;
4578 int regno;
4579 rtx cond;
4580 {
4581 /* If this is a store to a predicate register, the value of the
4582 predicate is changing, we don't know that the predicate as seen
4583 before is the same as that seen after. Flush all dependent
4584 conditions from reg_cond_dead. This will make all such
4585 conditionally live registers unconditionally live. */
4586 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4587 flush_reg_cond_reg (pbi, regno);
4588
4589 /* If this is an unconditional store, remove any conditional
4590 life that may have existed. */
4591 if (cond == NULL_RTX)
4592 splay_tree_remove (pbi->reg_cond_dead, regno);
4593 else
4594 {
4595 splay_tree_node node;
4596 struct reg_cond_life_info *rcli;
4597 rtx ncond;
4598
4599 /* Otherwise this is a conditional set. Record that fact.
4600 It may have been conditionally used, or there may be a
4601 subsequent set with a complimentary condition. */
4602
4603 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4604 if (node == NULL)
4605 {
4606 /* The register was unconditionally live previously.
4607 Record the current condition as the condition under
4608 which it is dead. */
4609 rcli = (struct reg_cond_life_info *)
4610 xmalloc (sizeof (*rcli));
4611 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4612 splay_tree_insert (pbi->reg_cond_dead, regno,
4613 (splay_tree_value) rcli);
4614
4615 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4616 REGNO (XEXP (cond, 0)));
4617
4618 /* Not unconditionaly dead. */
4619 return 0;
4620 }
4621 else
4622 {
4623 /* The register was conditionally live previously.
4624 Add the new condition to the old. */
4625 rcli = (struct reg_cond_life_info *) node->value;
4626 ncond = rcli->condition;
4627 ncond = ior_reg_cond (ncond, cond);
4628
4629 /* If the register is now unconditionally dead,
4630 remove the entry in the splay_tree. */
4631 if (ncond == const1_rtx)
4632 splay_tree_remove (pbi->reg_cond_dead, regno);
4633 else
4634 {
4635 rcli->condition = ncond;
4636
4637 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4638 REGNO (XEXP (cond, 0)));
4639
4640 /* Not unconditionaly dead. */
4641 return 0;
4642 }
4643 }
4644 }
4645
4646 return 1;
4647 }
4648
4649 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4650
4651 static void
4652 free_reg_cond_life_info (value)
4653 splay_tree_value value;
4654 {
4655 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4656 free_EXPR_LIST_list (&rcli->condition);
4657 free (rcli);
4658 }
4659
4660 /* Helper function for flush_reg_cond_reg. */
4661
4662 static int
4663 flush_reg_cond_reg_1 (node, data)
4664 splay_tree_node node;
4665 void *data;
4666 {
4667 struct reg_cond_life_info *rcli;
4668 int *xdata = (int *) data;
4669 unsigned int regno = xdata[0];
4670 rtx c, *prev;
4671
4672 /* Don't need to search if last flushed value was farther on in
4673 the in-order traversal. */
4674 if (xdata[1] >= (int) node->key)
4675 return 0;
4676
4677 /* Splice out portions of the expression that refer to regno. */
4678 rcli = (struct reg_cond_life_info *) node->value;
4679 c = *(prev = &rcli->condition);
4680 while (c)
4681 {
4682 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4683 {
4684 rtx next = XEXP (c, 1);
4685 free_EXPR_LIST_node (c);
4686 c = *prev = next;
4687 }
4688 else
4689 c = *(prev = &XEXP (c, 1));
4690 }
4691
4692 /* If the entire condition is now NULL, signal the node to be removed. */
4693 if (! rcli->condition)
4694 {
4695 xdata[1] = node->key;
4696 return -1;
4697 }
4698 else
4699 return 0;
4700 }
4701
4702 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4703
4704 static void
4705 flush_reg_cond_reg (pbi, regno)
4706 struct propagate_block_info *pbi;
4707 int regno;
4708 {
4709 int pair[2];
4710
4711 pair[0] = regno;
4712 pair[1] = -1;
4713 while (splay_tree_foreach (pbi->reg_cond_dead,
4714 flush_reg_cond_reg_1, pair) == -1)
4715 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4716
4717 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4718 }
4719
4720 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4721 We actually use EXPR_LIST to chain the sub-expressions together
4722 instead of IOR because it's easier to manipulate and we have
4723 the lists.c functions to reuse nodes.
4724
4725 Return a new rtl expression as appropriate. */
4726
4727 static rtx
4728 ior_reg_cond (old, x)
4729 rtx old, x;
4730 {
4731 enum rtx_code x_code;
4732 rtx x_reg;
4733 rtx c;
4734
4735 /* We expect these conditions to be of the form (eq reg 0). */
4736 x_code = GET_CODE (x);
4737 if (GET_RTX_CLASS (x_code) != '<'
4738 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4739 || XEXP (x, 1) != const0_rtx)
4740 abort ();
4741
4742 /* Search the expression for an existing sub-expression of X_REG. */
4743 for (c = old; c ; c = XEXP (c, 1))
4744 {
4745 rtx y = XEXP (c, 0);
4746 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4747 {
4748 /* If we find X already present in OLD, we need do nothing. */
4749 if (GET_CODE (y) == x_code)
4750 return old;
4751
4752 /* If we find X being a compliment of a condition in OLD,
4753 then the entire condition is true. */
4754 if (GET_CODE (y) == reverse_condition (x_code))
4755 return const1_rtx;
4756 }
4757 }
4758
4759 /* Otherwise just add to the chain. */
4760 return alloc_EXPR_LIST (0, x, old);
4761 }
4762
4763 static rtx
4764 not_reg_cond (x)
4765 rtx x;
4766 {
4767 enum rtx_code x_code;
4768 rtx x_reg;
4769
4770 /* We expect these conditions to be of the form (eq reg 0). */
4771 x_code = GET_CODE (x);
4772 if (GET_RTX_CLASS (x_code) != '<'
4773 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4774 || XEXP (x, 1) != const0_rtx)
4775 abort ();
4776
4777 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4778 VOIDmode, x_reg, const0_rtx),
4779 NULL_RTX);
4780 }
4781
4782 static rtx
4783 nand_reg_cond (old, x)
4784 rtx old, x;
4785 {
4786 enum rtx_code x_code;
4787 rtx x_reg;
4788 rtx c, *prev;
4789
4790 /* We expect these conditions to be of the form (eq reg 0). */
4791 x_code = GET_CODE (x);
4792 if (GET_RTX_CLASS (x_code) != '<'
4793 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4794 || XEXP (x, 1) != const0_rtx)
4795 abort ();
4796
4797 /* Search the expression for an existing sub-expression of X_REG. */
4798
4799 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4800 {
4801 rtx y = XEXP (c, 0);
4802 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4803 {
4804 /* If we find X already present in OLD, then we need to
4805 splice it out. */
4806 if (GET_CODE (y) == x_code)
4807 {
4808 *prev = XEXP (c, 1);
4809 free_EXPR_LIST_node (c);
4810 return old ? old : const0_rtx;
4811 }
4812
4813 /* If we find X being a compliment of a condition in OLD,
4814 then we need do nothing. */
4815 if (GET_CODE (y) == reverse_condition (x_code))
4816 return old;
4817 }
4818 }
4819
4820 /* Otherwise, by implication, the register in question is now live for
4821 the inverse of the condition X. */
4822 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4823 VOIDmode, x_reg, const0_rtx),
4824 old);
4825 }
4826 #endif /* HAVE_conditional_execution */
4827 \f
4828 #ifdef AUTO_INC_DEC
4829
4830 /* Try to substitute the auto-inc expression INC as the address inside
4831 MEM which occurs in INSN. Currently, the address of MEM is an expression
4832 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
4833 that has a single set whose source is a PLUS of INCR_REG and something
4834 else. */
4835
4836 static void
4837 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
4838 struct propagate_block_info *pbi;
4839 rtx inc, insn, mem, incr, incr_reg;
4840 {
4841 int regno = REGNO (incr_reg);
4842 rtx set = single_set (incr);
4843 rtx q = SET_DEST (set);
4844 rtx y = SET_SRC (set);
4845 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
4846
4847 /* Make sure this reg appears only once in this insn. */
4848 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
4849 return;
4850
4851 if (dead_or_set_p (incr, incr_reg)
4852 /* Mustn't autoinc an eliminable register. */
4853 && (regno >= FIRST_PSEUDO_REGISTER
4854 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4855 {
4856 /* This is the simple case. Try to make the auto-inc. If
4857 we can't, we are done. Otherwise, we will do any
4858 needed updates below. */
4859 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
4860 return;
4861 }
4862 else if (GET_CODE (q) == REG
4863 /* PREV_INSN used here to check the semi-open interval
4864 [insn,incr). */
4865 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4866 /* We must also check for sets of q as q may be
4867 a call clobbered hard register and there may
4868 be a call between PREV_INSN (insn) and incr. */
4869 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4870 {
4871 /* We have *p followed sometime later by q = p+size.
4872 Both p and q must be live afterward,
4873 and q is not used between INSN and its assignment.
4874 Change it to q = p, ...*q..., q = q+size.
4875 Then fall into the usual case. */
4876 rtx insns, temp;
4877 basic_block bb;
4878
4879 start_sequence ();
4880 emit_move_insn (q, incr_reg);
4881 insns = get_insns ();
4882 end_sequence ();
4883
4884 if (basic_block_for_insn)
4885 for (temp = insns; temp; temp = NEXT_INSN (temp))
4886 set_block_for_insn (temp, pbi->bb);
4887
4888 /* If we can't make the auto-inc, or can't make the
4889 replacement into Y, exit. There's no point in making
4890 the change below if we can't do the auto-inc and doing
4891 so is not correct in the pre-inc case. */
4892
4893 XEXP (inc, 0) = q;
4894 validate_change (insn, &XEXP (mem, 0), inc, 1);
4895 validate_change (incr, &XEXP (y, opnum), q, 1);
4896 if (! apply_change_group ())
4897 return;
4898
4899 /* We now know we'll be doing this change, so emit the
4900 new insn(s) and do the updates. */
4901 emit_insns_before (insns, insn);
4902
4903 if (pbi->bb->head == insn)
4904 pbi->bb->head = insns;
4905
4906 /* INCR will become a NOTE and INSN won't contain a
4907 use of INCR_REG. If a use of INCR_REG was just placed in
4908 the insn before INSN, make that the next use.
4909 Otherwise, invalidate it. */
4910 if (GET_CODE (PREV_INSN (insn)) == INSN
4911 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4912 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
4913 pbi->reg_next_use[regno] = PREV_INSN (insn);
4914 else
4915 pbi->reg_next_use[regno] = 0;
4916
4917 incr_reg = q;
4918 regno = REGNO (q);
4919
4920 /* REGNO is now used in INCR which is below INSN, but
4921 it previously wasn't live here. If we don't mark
4922 it as live, we'll put a REG_DEAD note for it
4923 on this insn, which is incorrect. */
4924 SET_REGNO_REG_SET (pbi->reg_live, regno);
4925
4926 /* If there are any calls between INSN and INCR, show
4927 that REGNO now crosses them. */
4928 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4929 if (GET_CODE (temp) == CALL_INSN)
4930 REG_N_CALLS_CROSSED (regno)++;
4931 }
4932 else
4933 return;
4934
4935 /* If we haven't returned, it means we were able to make the
4936 auto-inc, so update the status. First, record that this insn
4937 has an implicit side effect. */
4938
4939 REG_NOTES (insn)
4940 = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
4941
4942 /* Modify the old increment-insn to simply copy
4943 the already-incremented value of our register. */
4944 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
4945 abort ();
4946
4947 /* If that makes it a no-op (copying the register into itself) delete
4948 it so it won't appear to be a "use" and a "set" of this
4949 register. */
4950 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
4951 {
4952 /* If the original source was dead, it's dead now. */
4953 rtx note;
4954
4955 while (note = find_reg_note (incr, REG_DEAD, NULL_RTX))
4956 {
4957 remove_note (incr, note);
4958 if (XEXP (note, 0) != incr_reg)
4959 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4960 }
4961
4962 PUT_CODE (incr, NOTE);
4963 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4964 NOTE_SOURCE_FILE (incr) = 0;
4965 }
4966
4967 if (regno >= FIRST_PSEUDO_REGISTER)
4968 {
4969 /* Count an extra reference to the reg. When a reg is
4970 incremented, spilling it is worse, so we want to make
4971 that less likely. */
4972 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
4973
4974 /* Count the increment as a setting of the register,
4975 even though it isn't a SET in rtl. */
4976 REG_N_SETS (regno)++;
4977 }
4978 }
4979
4980 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4981 reference. */
4982
4983 static void
4984 find_auto_inc (pbi, x, insn)
4985 struct propagate_block_info *pbi;
4986 rtx x;
4987 rtx insn;
4988 {
4989 rtx addr = XEXP (x, 0);
4990 HOST_WIDE_INT offset = 0;
4991 rtx set, y, incr, inc_val;
4992 int regno;
4993 int size = GET_MODE_SIZE (GET_MODE (x));
4994
4995 if (GET_CODE (insn) == JUMP_INSN)
4996 return;
4997
4998 /* Here we detect use of an index register which might be good for
4999 postincrement, postdecrement, preincrement, or predecrement. */
5000
5001 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5002 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5003
5004 if (GET_CODE (addr) != REG)
5005 return;
5006
5007 regno = REGNO (addr);
5008
5009 /* Is the next use an increment that might make auto-increment? */
5010 incr = pbi->reg_next_use[regno];
5011 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5012 return;
5013 set = single_set (incr);
5014 if (set == 0 || GET_CODE (set) != SET)
5015 return;
5016 y = SET_SRC (set);
5017
5018 if (GET_CODE (y) != PLUS)
5019 return;
5020
5021 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5022 inc_val = XEXP (y, 1);
5023 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5024 inc_val = XEXP (y, 0);
5025 else
5026 return;
5027
5028 if (GET_CODE (inc_val) == CONST_INT)
5029 {
5030 if (HAVE_POST_INCREMENT
5031 && (INTVAL (inc_val) == size && offset == 0))
5032 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5033 incr, addr);
5034 else if (HAVE_POST_DECREMENT
5035 && (INTVAL (inc_val) == - size && offset == 0))
5036 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5037 incr, addr);
5038 else if (HAVE_PRE_INCREMENT
5039 && (INTVAL (inc_val) == size && offset == size))
5040 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5041 incr, addr);
5042 else if (HAVE_PRE_DECREMENT
5043 && (INTVAL (inc_val) == - size && offset == - size))
5044 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5045 incr, addr);
5046 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5047 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5048 gen_rtx_PLUS (Pmode,
5049 addr,
5050 inc_val)),
5051 insn, x, incr, addr);
5052 }
5053 else if (GET_CODE (inc_val) == REG
5054 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5055 NEXT_INSN (incr)))
5056
5057 {
5058 if (HAVE_POST_MODIFY_REG && offset == 0)
5059 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5060 gen_rtx_PLUS (Pmode,
5061 addr,
5062 inc_val)),
5063 insn, x, incr, addr);
5064 }
5065 }
5066
5067 #endif /* AUTO_INC_DEC */
5068 \f
5069 static void
5070 mark_used_reg (pbi, reg, cond, insn)
5071 struct propagate_block_info *pbi;
5072 rtx reg;
5073 rtx cond ATTRIBUTE_UNUSED;
5074 rtx insn;
5075 {
5076 int regno = REGNO (reg);
5077 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5078 int some_was_dead = ! some_was_live;
5079 int some_not_set;
5080 int n;
5081
5082 /* A hard reg in a wide mode may really be multiple registers.
5083 If so, mark all of them just like the first. */
5084 if (regno < FIRST_PSEUDO_REGISTER)
5085 {
5086 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5087 while (--n > 0)
5088 {
5089 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5090 some_was_live |= needed_regno;
5091 some_was_dead |= ! needed_regno;
5092 }
5093 }
5094
5095 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5096 {
5097 /* Record where each reg is used, so when the reg is set we know
5098 the next insn that uses it. */
5099 pbi->reg_next_use[regno] = insn;
5100 }
5101
5102 if (pbi->flags & PROP_REG_INFO)
5103 {
5104 if (regno < FIRST_PSEUDO_REGISTER)
5105 {
5106 /* If this is a register we are going to try to eliminate,
5107 don't mark it live here. If we are successful in
5108 eliminating it, it need not be live unless it is used for
5109 pseudos, in which case it will have been set live when it
5110 was allocated to the pseudos. If the register will not
5111 be eliminated, reload will set it live at that point.
5112
5113 Otherwise, record that this function uses this register. */
5114 /* ??? The PPC backend tries to "eliminate" on the pic
5115 register to itself. This should be fixed. In the mean
5116 time, hack around it. */
5117
5118 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5119 && (regno == FRAME_POINTER_REGNUM
5120 || regno == ARG_POINTER_REGNUM)))
5121 {
5122 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5123 do
5124 regs_ever_live[regno + --n] = 1;
5125 while (n > 0);
5126 }
5127 }
5128 else
5129 {
5130 /* Keep track of which basic block each reg appears in. */
5131
5132 register int blocknum = pbi->bb->index;
5133 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5134 REG_BASIC_BLOCK (regno) = blocknum;
5135 else if (REG_BASIC_BLOCK (regno) != blocknum)
5136 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5137
5138 /* Count (weighted) number of uses of each reg. */
5139 REG_N_REFS (regno) += (optimize_size ? 1
5140 : pbi->bb->loop_depth + 1);
5141 }
5142 }
5143
5144 /* Find out if any of the register was set this insn. */
5145 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5146 if (regno < FIRST_PSEUDO_REGISTER)
5147 {
5148 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5149 while (--n > 0)
5150 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5151 }
5152
5153 /* Record and count the insns in which a reg dies. If it is used in
5154 this insn and was dead below the insn then it dies in this insn.
5155 If it was set in this insn, we do not make a REG_DEAD note;
5156 likewise if we already made such a note. */
5157 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5158 && some_was_dead
5159 && some_not_set)
5160 {
5161 /* Check for the case where the register dying partially
5162 overlaps the register set by this insn. */
5163 if (regno < FIRST_PSEUDO_REGISTER
5164 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5165 {
5166 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5167 while (--n >= 0)
5168 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5169 }
5170
5171 /* If none of the words in X is needed, make a REG_DEAD note.
5172 Otherwise, we must make partial REG_DEAD notes. */
5173 if (! some_was_live)
5174 {
5175 if ((pbi->flags & PROP_DEATH_NOTES)
5176 && ! find_regno_note (insn, REG_DEAD, regno))
5177 REG_NOTES (insn)
5178 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5179
5180 if (pbi->flags & PROP_REG_INFO)
5181 REG_N_DEATHS (regno)++;
5182 }
5183 else
5184 {
5185 /* Don't make a REG_DEAD note for a part of a register
5186 that is set in the insn. */
5187
5188 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5189 for (; n >= regno; n--)
5190 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5191 && ! dead_or_set_regno_p (insn, n))
5192 REG_NOTES (insn)
5193 = alloc_EXPR_LIST (REG_DEAD,
5194 gen_rtx_REG (reg_raw_mode[n], n),
5195 REG_NOTES (insn));
5196 }
5197 }
5198
5199 SET_REGNO_REG_SET (pbi->reg_live, regno);
5200 if (regno < FIRST_PSEUDO_REGISTER)
5201 {
5202 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5203 while (--n > 0)
5204 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5205 }
5206
5207 #ifdef HAVE_conditional_execution
5208 /* If this is a conditional use, record that fact. If it is later
5209 conditionally set, we'll know to kill the register. */
5210 if (cond != NULL_RTX)
5211 {
5212 splay_tree_node node;
5213 struct reg_cond_life_info *rcli;
5214 rtx ncond;
5215
5216 if (some_was_live)
5217 {
5218 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5219 if (node == NULL)
5220 {
5221 /* The register was unconditionally live previously.
5222 No need to do anything. */
5223 }
5224 else
5225 {
5226 /* The register was conditionally live previously.
5227 Subtract the new life cond from the old death cond. */
5228 rcli = (struct reg_cond_life_info *) node->value;
5229 ncond = rcli->condition;
5230 ncond = nand_reg_cond (ncond, cond);
5231
5232 /* If the register is now unconditionally live, remove the
5233 entry in the splay_tree. */
5234 if (ncond == const0_rtx)
5235 {
5236 rcli->condition = NULL_RTX;
5237 splay_tree_remove (pbi->reg_cond_dead, regno);
5238 }
5239 else
5240 rcli->condition = ncond;
5241 }
5242 }
5243 else
5244 {
5245 /* The register was not previously live at all. Record
5246 the condition under which it is still dead. */
5247 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5248 rcli->condition = not_reg_cond (cond);
5249 splay_tree_insert (pbi->reg_cond_dead, regno,
5250 (splay_tree_value) rcli);
5251 }
5252 }
5253 else if (some_was_live)
5254 {
5255 splay_tree_node node;
5256 struct reg_cond_life_info *rcli;
5257
5258 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5259 if (node != NULL)
5260 {
5261 /* The register was conditionally live previously, but is now
5262 unconditionally so. Remove it from the conditionally dead
5263 list, so that a conditional set won't cause us to think
5264 it dead. */
5265 rcli = (struct reg_cond_life_info *) node->value;
5266 rcli->condition = NULL_RTX;
5267 splay_tree_remove (pbi->reg_cond_dead, regno);
5268 }
5269 }
5270
5271 #endif
5272 }
5273
5274 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5275 This is done assuming the registers needed from X are those that
5276 have 1-bits in PBI->REG_LIVE.
5277
5278 INSN is the containing instruction. If INSN is dead, this function
5279 is not called. */
5280
5281 static void
5282 mark_used_regs (pbi, x, cond, insn)
5283 struct propagate_block_info *pbi;
5284 rtx x, cond, insn;
5285 {
5286 register RTX_CODE code;
5287 register int regno;
5288 int flags = pbi->flags;
5289
5290 retry:
5291 code = GET_CODE (x);
5292 switch (code)
5293 {
5294 case LABEL_REF:
5295 case SYMBOL_REF:
5296 case CONST_INT:
5297 case CONST:
5298 case CONST_DOUBLE:
5299 case PC:
5300 case ADDR_VEC:
5301 case ADDR_DIFF_VEC:
5302 return;
5303
5304 #ifdef HAVE_cc0
5305 case CC0:
5306 pbi->cc0_live = 1;
5307 return;
5308 #endif
5309
5310 case CLOBBER:
5311 /* If we are clobbering a MEM, mark any registers inside the address
5312 as being used. */
5313 if (GET_CODE (XEXP (x, 0)) == MEM)
5314 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5315 return;
5316
5317 case MEM:
5318 /* Don't bother watching stores to mems if this is not the
5319 final pass. We'll not be deleting dead stores this round. */
5320 if (flags & PROP_SCAN_DEAD_CODE)
5321 {
5322 /* Invalidate the data for the last MEM stored, but only if MEM is
5323 something that can be stored into. */
5324 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5325 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5326 ; /* needn't clear the memory set list */
5327 else
5328 {
5329 rtx temp = pbi->mem_set_list;
5330 rtx prev = NULL_RTX;
5331 rtx next;
5332
5333 while (temp)
5334 {
5335 next = XEXP (temp, 1);
5336 if (anti_dependence (XEXP (temp, 0), x))
5337 {
5338 /* Splice temp out of the list. */
5339 if (prev)
5340 XEXP (prev, 1) = next;
5341 else
5342 pbi->mem_set_list = next;
5343 free_EXPR_LIST_node (temp);
5344 }
5345 else
5346 prev = temp;
5347 temp = next;
5348 }
5349 }
5350
5351 /* If the memory reference had embedded side effects (autoincrement
5352 address modes. Then we may need to kill some entries on the
5353 memory set list. */
5354 if (insn)
5355 invalidate_mems_from_autoinc (pbi, insn);
5356 }
5357
5358 #ifdef AUTO_INC_DEC
5359 if (flags & PROP_AUTOINC)
5360 find_auto_inc (pbi, x, insn);
5361 #endif
5362 break;
5363
5364 case SUBREG:
5365 #ifdef CLASS_CANNOT_CHANGE_MODE
5366 if (GET_CODE (SUBREG_REG (x)) == REG
5367 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5368 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5369 GET_MODE (SUBREG_REG (x))))
5370 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5371 #endif
5372
5373 /* While we're here, optimize this case. */
5374 x = SUBREG_REG (x);
5375 if (GET_CODE (x) != REG)
5376 goto retry;
5377 /* FALLTHRU */
5378
5379 case REG:
5380 /* See a register other than being set => mark it as needed. */
5381 mark_used_reg (pbi, x, cond, insn);
5382 return;
5383
5384 case SET:
5385 {
5386 register rtx testreg = SET_DEST (x);
5387 int mark_dest = 0;
5388
5389 /* If storing into MEM, don't show it as being used. But do
5390 show the address as being used. */
5391 if (GET_CODE (testreg) == MEM)
5392 {
5393 #ifdef AUTO_INC_DEC
5394 if (flags & PROP_AUTOINC)
5395 find_auto_inc (pbi, testreg, insn);
5396 #endif
5397 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5398 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5399 return;
5400 }
5401
5402 /* Storing in STRICT_LOW_PART is like storing in a reg
5403 in that this SET might be dead, so ignore it in TESTREG.
5404 but in some other ways it is like using the reg.
5405
5406 Storing in a SUBREG or a bit field is like storing the entire
5407 register in that if the register's value is not used
5408 then this SET is not needed. */
5409 while (GET_CODE (testreg) == STRICT_LOW_PART
5410 || GET_CODE (testreg) == ZERO_EXTRACT
5411 || GET_CODE (testreg) == SIGN_EXTRACT
5412 || GET_CODE (testreg) == SUBREG)
5413 {
5414 #ifdef CLASS_CANNOT_CHANGE_MODE
5415 if (GET_CODE (testreg) == SUBREG
5416 && GET_CODE (SUBREG_REG (testreg)) == REG
5417 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5418 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5419 GET_MODE (testreg)))
5420 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5421 #endif
5422
5423 /* Modifying a single register in an alternate mode
5424 does not use any of the old value. But these other
5425 ways of storing in a register do use the old value. */
5426 if (GET_CODE (testreg) == SUBREG
5427 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5428 ;
5429 else
5430 mark_dest = 1;
5431
5432 testreg = XEXP (testreg, 0);
5433 }
5434
5435 /* If this is a store into a register, recursively scan the
5436 value being stored. */
5437
5438 if ((GET_CODE (testreg) == PARALLEL
5439 && GET_MODE (testreg) == BLKmode)
5440 || (GET_CODE (testreg) == REG
5441 && (regno = REGNO (testreg),
5442 ! (regno == FRAME_POINTER_REGNUM
5443 && (! reload_completed || frame_pointer_needed)))
5444 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5445 && ! (regno == HARD_FRAME_POINTER_REGNUM
5446 && (! reload_completed || frame_pointer_needed))
5447 #endif
5448 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5449 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5450 #endif
5451 ))
5452 {
5453 if (mark_dest)
5454 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5455 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5456 return;
5457 }
5458 }
5459 break;
5460
5461 case ASM_OPERANDS:
5462 case UNSPEC_VOLATILE:
5463 case TRAP_IF:
5464 case ASM_INPUT:
5465 {
5466 /* Traditional and volatile asm instructions must be considered to use
5467 and clobber all hard registers, all pseudo-registers and all of
5468 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5469
5470 Consider for instance a volatile asm that changes the fpu rounding
5471 mode. An insn should not be moved across this even if it only uses
5472 pseudo-regs because it might give an incorrectly rounded result.
5473
5474 ?!? Unfortunately, marking all hard registers as live causes massive
5475 problems for the register allocator and marking all pseudos as live
5476 creates mountains of uninitialized variable warnings.
5477
5478 So for now, just clear the memory set list and mark any regs
5479 we can find in ASM_OPERANDS as used. */
5480 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5481 free_EXPR_LIST_list (&pbi->mem_set_list);
5482
5483 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5484 We can not just fall through here since then we would be confused
5485 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5486 traditional asms unlike their normal usage. */
5487 if (code == ASM_OPERANDS)
5488 {
5489 int j;
5490
5491 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5492 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5493 }
5494 break;
5495 }
5496
5497 case COND_EXEC:
5498 if (cond != NULL_RTX)
5499 abort ();
5500
5501 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5502
5503 cond = COND_EXEC_TEST (x);
5504 x = COND_EXEC_CODE (x);
5505 goto retry;
5506
5507 case PHI:
5508 /* We _do_not_ want to scan operands of phi nodes. Operands of
5509 a phi function are evaluated only when control reaches this
5510 block along a particular edge. Therefore, regs that appear
5511 as arguments to phi should not be added to the global live at
5512 start. */
5513 return;
5514
5515 default:
5516 break;
5517 }
5518
5519 /* Recursively scan the operands of this expression. */
5520
5521 {
5522 register const char *fmt = GET_RTX_FORMAT (code);
5523 register int i;
5524
5525 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5526 {
5527 if (fmt[i] == 'e')
5528 {
5529 /* Tail recursive case: save a function call level. */
5530 if (i == 0)
5531 {
5532 x = XEXP (x, 0);
5533 goto retry;
5534 }
5535 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5536 }
5537 else if (fmt[i] == 'E')
5538 {
5539 register int j;
5540 for (j = 0; j < XVECLEN (x, i); j++)
5541 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5542 }
5543 }
5544 }
5545 }
5546 \f
5547 #ifdef AUTO_INC_DEC
5548
5549 static int
5550 try_pre_increment_1 (pbi, insn)
5551 struct propagate_block_info *pbi;
5552 rtx insn;
5553 {
5554 /* Find the next use of this reg. If in same basic block,
5555 make it do pre-increment or pre-decrement if appropriate. */
5556 rtx x = single_set (insn);
5557 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5558 * INTVAL (XEXP (SET_SRC (x), 1)));
5559 int regno = REGNO (SET_DEST (x));
5560 rtx y = pbi->reg_next_use[regno];
5561 if (y != 0
5562 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5563 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5564 mode would be better. */
5565 && ! dead_or_set_p (y, SET_DEST (x))
5566 && try_pre_increment (y, SET_DEST (x), amount))
5567 {
5568 /* We have found a suitable auto-increment
5569 and already changed insn Y to do it.
5570 So flush this increment-instruction. */
5571 PUT_CODE (insn, NOTE);
5572 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5573 NOTE_SOURCE_FILE (insn) = 0;
5574 /* Count a reference to this reg for the increment
5575 insn we are deleting. When a reg is incremented.
5576 spilling it is worse, so we want to make that
5577 less likely. */
5578 if (regno >= FIRST_PSEUDO_REGISTER)
5579 {
5580 REG_N_REFS (regno) += (optimize_size ? 1
5581 : pbi->bb->loop_depth + 1);
5582 REG_N_SETS (regno)++;
5583 }
5584 return 1;
5585 }
5586 return 0;
5587 }
5588
5589 /* Try to change INSN so that it does pre-increment or pre-decrement
5590 addressing on register REG in order to add AMOUNT to REG.
5591 AMOUNT is negative for pre-decrement.
5592 Returns 1 if the change could be made.
5593 This checks all about the validity of the result of modifying INSN. */
5594
5595 static int
5596 try_pre_increment (insn, reg, amount)
5597 rtx insn, reg;
5598 HOST_WIDE_INT amount;
5599 {
5600 register rtx use;
5601
5602 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5603 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5604 int pre_ok = 0;
5605 /* Nonzero if we can try to make a post-increment or post-decrement.
5606 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5607 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5608 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5609 int post_ok = 0;
5610
5611 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5612 int do_post = 0;
5613
5614 /* From the sign of increment, see which possibilities are conceivable
5615 on this target machine. */
5616 if (HAVE_PRE_INCREMENT && amount > 0)
5617 pre_ok = 1;
5618 if (HAVE_POST_INCREMENT && amount > 0)
5619 post_ok = 1;
5620
5621 if (HAVE_PRE_DECREMENT && amount < 0)
5622 pre_ok = 1;
5623 if (HAVE_POST_DECREMENT && amount < 0)
5624 post_ok = 1;
5625
5626 if (! (pre_ok || post_ok))
5627 return 0;
5628
5629 /* It is not safe to add a side effect to a jump insn
5630 because if the incremented register is spilled and must be reloaded
5631 there would be no way to store the incremented value back in memory. */
5632
5633 if (GET_CODE (insn) == JUMP_INSN)
5634 return 0;
5635
5636 use = 0;
5637 if (pre_ok)
5638 use = find_use_as_address (PATTERN (insn), reg, 0);
5639 if (post_ok && (use == 0 || use == (rtx) 1))
5640 {
5641 use = find_use_as_address (PATTERN (insn), reg, -amount);
5642 do_post = 1;
5643 }
5644
5645 if (use == 0 || use == (rtx) 1)
5646 return 0;
5647
5648 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5649 return 0;
5650
5651 /* See if this combination of instruction and addressing mode exists. */
5652 if (! validate_change (insn, &XEXP (use, 0),
5653 gen_rtx_fmt_e (amount > 0
5654 ? (do_post ? POST_INC : PRE_INC)
5655 : (do_post ? POST_DEC : PRE_DEC),
5656 Pmode, reg), 0))
5657 return 0;
5658
5659 /* Record that this insn now has an implicit side effect on X. */
5660 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5661 return 1;
5662 }
5663
5664 #endif /* AUTO_INC_DEC */
5665 \f
5666 /* Find the place in the rtx X where REG is used as a memory address.
5667 Return the MEM rtx that so uses it.
5668 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5669 (plus REG (const_int PLUSCONST)).
5670
5671 If such an address does not appear, return 0.
5672 If REG appears more than once, or is used other than in such an address,
5673 return (rtx)1. */
5674
5675 rtx
5676 find_use_as_address (x, reg, plusconst)
5677 register rtx x;
5678 rtx reg;
5679 HOST_WIDE_INT plusconst;
5680 {
5681 enum rtx_code code = GET_CODE (x);
5682 const char *fmt = GET_RTX_FORMAT (code);
5683 register int i;
5684 register rtx value = 0;
5685 register rtx tem;
5686
5687 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5688 return x;
5689
5690 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5691 && XEXP (XEXP (x, 0), 0) == reg
5692 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5693 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5694 return x;
5695
5696 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5697 {
5698 /* If REG occurs inside a MEM used in a bit-field reference,
5699 that is unacceptable. */
5700 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5701 return (rtx) (HOST_WIDE_INT) 1;
5702 }
5703
5704 if (x == reg)
5705 return (rtx) (HOST_WIDE_INT) 1;
5706
5707 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5708 {
5709 if (fmt[i] == 'e')
5710 {
5711 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5712 if (value == 0)
5713 value = tem;
5714 else if (tem != 0)
5715 return (rtx) (HOST_WIDE_INT) 1;
5716 }
5717 else if (fmt[i] == 'E')
5718 {
5719 register int j;
5720 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5721 {
5722 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5723 if (value == 0)
5724 value = tem;
5725 else if (tem != 0)
5726 return (rtx) (HOST_WIDE_INT) 1;
5727 }
5728 }
5729 }
5730
5731 return value;
5732 }
5733 \f
5734 /* Write information about registers and basic blocks into FILE.
5735 This is part of making a debugging dump. */
5736
5737 void
5738 dump_regset (r, outf)
5739 regset r;
5740 FILE *outf;
5741 {
5742 int i;
5743 if (r == NULL)
5744 {
5745 fputs (" (nil)", outf);
5746 return;
5747 }
5748
5749 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5750 {
5751 fprintf (outf, " %d", i);
5752 if (i < FIRST_PSEUDO_REGISTER)
5753 fprintf (outf, " [%s]",
5754 reg_names[i]);
5755 });
5756 }
5757
5758 void
5759 debug_regset (r)
5760 regset r;
5761 {
5762 dump_regset (r, stderr);
5763 putc ('\n', stderr);
5764 }
5765
5766 void
5767 dump_flow_info (file)
5768 FILE *file;
5769 {
5770 register int i;
5771 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5772
5773 fprintf (file, "%d registers.\n", max_regno);
5774 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5775 if (REG_N_REFS (i))
5776 {
5777 enum reg_class class, altclass;
5778 fprintf (file, "\nRegister %d used %d times across %d insns",
5779 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5780 if (REG_BASIC_BLOCK (i) >= 0)
5781 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5782 if (REG_N_SETS (i))
5783 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5784 (REG_N_SETS (i) == 1) ? "" : "s");
5785 if (REG_USERVAR_P (regno_reg_rtx[i]))
5786 fprintf (file, "; user var");
5787 if (REG_N_DEATHS (i) != 1)
5788 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5789 if (REG_N_CALLS_CROSSED (i) == 1)
5790 fprintf (file, "; crosses 1 call");
5791 else if (REG_N_CALLS_CROSSED (i))
5792 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5793 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5794 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5795 class = reg_preferred_class (i);
5796 altclass = reg_alternate_class (i);
5797 if (class != GENERAL_REGS || altclass != ALL_REGS)
5798 {
5799 if (altclass == ALL_REGS || class == ALL_REGS)
5800 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5801 else if (altclass == NO_REGS)
5802 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5803 else
5804 fprintf (file, "; pref %s, else %s",
5805 reg_class_names[(int) class],
5806 reg_class_names[(int) altclass]);
5807 }
5808 if (REGNO_POINTER_FLAG (i))
5809 fprintf (file, "; pointer");
5810 fprintf (file, ".\n");
5811 }
5812
5813 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5814 for (i = 0; i < n_basic_blocks; i++)
5815 {
5816 register basic_block bb = BASIC_BLOCK (i);
5817 register edge e;
5818
5819 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5820 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5821
5822 fprintf (file, "Predecessors: ");
5823 for (e = bb->pred; e ; e = e->pred_next)
5824 dump_edge_info (file, e, 0);
5825
5826 fprintf (file, "\nSuccessors: ");
5827 for (e = bb->succ; e ; e = e->succ_next)
5828 dump_edge_info (file, e, 1);
5829
5830 fprintf (file, "\nRegisters live at start:");
5831 dump_regset (bb->global_live_at_start, file);
5832
5833 fprintf (file, "\nRegisters live at end:");
5834 dump_regset (bb->global_live_at_end, file);
5835
5836 putc('\n', file);
5837 }
5838
5839 putc('\n', file);
5840 }
5841
5842 void
5843 debug_flow_info ()
5844 {
5845 dump_flow_info (stderr);
5846 }
5847
5848 static void
5849 dump_edge_info (file, e, do_succ)
5850 FILE *file;
5851 edge e;
5852 int do_succ;
5853 {
5854 basic_block side = (do_succ ? e->dest : e->src);
5855
5856 if (side == ENTRY_BLOCK_PTR)
5857 fputs (" ENTRY", file);
5858 else if (side == EXIT_BLOCK_PTR)
5859 fputs (" EXIT", file);
5860 else
5861 fprintf (file, " %d", side->index);
5862
5863 if (e->count)
5864 fprintf (file, " count:%d", e->count);
5865
5866 if (e->flags)
5867 {
5868 static const char * const bitnames[] = {
5869 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5870 };
5871 int comma = 0;
5872 int i, flags = e->flags;
5873
5874 fputc (' ', file);
5875 fputc ('(', file);
5876 for (i = 0; flags; i++)
5877 if (flags & (1 << i))
5878 {
5879 flags &= ~(1 << i);
5880
5881 if (comma)
5882 fputc (',', file);
5883 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5884 fputs (bitnames[i], file);
5885 else
5886 fprintf (file, "%d", i);
5887 comma = 1;
5888 }
5889 fputc (')', file);
5890 }
5891 }
5892
5893 \f
5894 /* Print out one basic block with live information at start and end. */
5895 void
5896 dump_bb (bb, outf)
5897 basic_block bb;
5898 FILE *outf;
5899 {
5900 rtx insn;
5901 rtx last;
5902 edge e;
5903
5904 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5905 bb->index, bb->loop_depth, bb->count);
5906 if (bb->eh_beg != -1 || bb->eh_end != -1)
5907 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5908 putc ('\n', outf);
5909
5910 fputs (";; Predecessors: ", outf);
5911 for (e = bb->pred; e ; e = e->pred_next)
5912 dump_edge_info (outf, e, 0);
5913 putc ('\n', outf);
5914
5915 fputs (";; Registers live at start:", outf);
5916 dump_regset (bb->global_live_at_start, outf);
5917 putc ('\n', outf);
5918
5919 for (insn = bb->head, last = NEXT_INSN (bb->end);
5920 insn != last;
5921 insn = NEXT_INSN (insn))
5922 print_rtl_single (outf, insn);
5923
5924 fputs (";; Registers live at end:", outf);
5925 dump_regset (bb->global_live_at_end, outf);
5926 putc ('\n', outf);
5927
5928 fputs (";; Successors: ", outf);
5929 for (e = bb->succ; e; e = e->succ_next)
5930 dump_edge_info (outf, e, 1);
5931 putc ('\n', outf);
5932 }
5933
5934 void
5935 debug_bb (bb)
5936 basic_block bb;
5937 {
5938 dump_bb (bb, stderr);
5939 }
5940
5941 void
5942 debug_bb_n (n)
5943 int n;
5944 {
5945 dump_bb (BASIC_BLOCK(n), stderr);
5946 }
5947
5948 /* Like print_rtl, but also print out live information for the start of each
5949 basic block. */
5950
5951 void
5952 print_rtl_with_bb (outf, rtx_first)
5953 FILE *outf;
5954 rtx rtx_first;
5955 {
5956 register rtx tmp_rtx;
5957
5958 if (rtx_first == 0)
5959 fprintf (outf, "(nil)\n");
5960 else
5961 {
5962 int i;
5963 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5964 int max_uid = get_max_uid ();
5965 basic_block *start = (basic_block *)
5966 xcalloc (max_uid, sizeof (basic_block));
5967 basic_block *end = (basic_block *)
5968 xcalloc (max_uid, sizeof (basic_block));
5969 enum bb_state *in_bb_p = (enum bb_state *)
5970 xcalloc (max_uid, sizeof (enum bb_state));
5971
5972 for (i = n_basic_blocks - 1; i >= 0; i--)
5973 {
5974 basic_block bb = BASIC_BLOCK (i);
5975 rtx x;
5976
5977 start[INSN_UID (bb->head)] = bb;
5978 end[INSN_UID (bb->end)] = bb;
5979 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5980 {
5981 enum bb_state state = IN_MULTIPLE_BB;
5982 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5983 state = IN_ONE_BB;
5984 in_bb_p[INSN_UID(x)] = state;
5985
5986 if (x == bb->end)
5987 break;
5988 }
5989 }
5990
5991 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5992 {
5993 int did_output;
5994 basic_block bb;
5995
5996 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5997 {
5998 fprintf (outf, ";; Start of basic block %d, registers live:",
5999 bb->index);
6000 dump_regset (bb->global_live_at_start, outf);
6001 putc ('\n', outf);
6002 }
6003
6004 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
6005 && GET_CODE (tmp_rtx) != NOTE
6006 && GET_CODE (tmp_rtx) != BARRIER)
6007 fprintf (outf, ";; Insn is not within a basic block\n");
6008 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
6009 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6010
6011 did_output = print_rtl_single (outf, tmp_rtx);
6012
6013 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6014 {
6015 fprintf (outf, ";; End of basic block %d, registers live:\n",
6016 bb->index);
6017 dump_regset (bb->global_live_at_end, outf);
6018 putc ('\n', outf);
6019 }
6020
6021 if (did_output)
6022 putc ('\n', outf);
6023 }
6024
6025 free (start);
6026 free (end);
6027 free (in_bb_p);
6028 }
6029
6030 if (current_function_epilogue_delay_list != 0)
6031 {
6032 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6033 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6034 tmp_rtx = XEXP (tmp_rtx, 1))
6035 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6036 }
6037 }
6038
6039 /* Compute dominator relationships using new flow graph structures. */
6040 void
6041 compute_flow_dominators (dominators, post_dominators)
6042 sbitmap *dominators;
6043 sbitmap *post_dominators;
6044 {
6045 int bb;
6046 sbitmap *temp_bitmap;
6047 edge e;
6048 basic_block *worklist, *workend, *qin, *qout;
6049 int qlen;
6050
6051 /* Allocate a worklist array/queue. Entries are only added to the
6052 list if they were not already on the list. So the size is
6053 bounded by the number of basic blocks. */
6054 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6055 workend = &worklist[n_basic_blocks];
6056
6057 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6058 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6059
6060 if (dominators)
6061 {
6062 /* The optimistic setting of dominators requires us to put every
6063 block on the work list initially. */
6064 qin = qout = worklist;
6065 for (bb = 0; bb < n_basic_blocks; bb++)
6066 {
6067 *qin++ = BASIC_BLOCK (bb);
6068 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6069 }
6070 qlen = n_basic_blocks;
6071 qin = worklist;
6072
6073 /* We want a maximal solution, so initially assume everything dominates
6074 everything else. */
6075 sbitmap_vector_ones (dominators, n_basic_blocks);
6076
6077 /* Mark successors of the entry block so we can identify them below. */
6078 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6079 e->dest->aux = ENTRY_BLOCK_PTR;
6080
6081 /* Iterate until the worklist is empty. */
6082 while (qlen)
6083 {
6084 /* Take the first entry off the worklist. */
6085 basic_block b = *qout++;
6086 if (qout >= workend)
6087 qout = worklist;
6088 qlen--;
6089
6090 bb = b->index;
6091
6092 /* Compute the intersection of the dominators of all the
6093 predecessor blocks.
6094
6095 If one of the predecessor blocks is the ENTRY block, then the
6096 intersection of the dominators of the predecessor blocks is
6097 defined as the null set. We can identify such blocks by the
6098 special value in the AUX field in the block structure. */
6099 if (b->aux == ENTRY_BLOCK_PTR)
6100 {
6101 /* Do not clear the aux field for blocks which are
6102 successors of the ENTRY block. That way we we never
6103 add them to the worklist again.
6104
6105 The intersect of dominators of the preds of this block is
6106 defined as the null set. */
6107 sbitmap_zero (temp_bitmap[bb]);
6108 }
6109 else
6110 {
6111 /* Clear the aux field of this block so it can be added to
6112 the worklist again if necessary. */
6113 b->aux = NULL;
6114 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6115 }
6116
6117 /* Make sure each block always dominates itself. */
6118 SET_BIT (temp_bitmap[bb], bb);
6119
6120 /* If the out state of this block changed, then we need to
6121 add the successors of this block to the worklist if they
6122 are not already on the worklist. */
6123 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6124 {
6125 for (e = b->succ; e; e = e->succ_next)
6126 {
6127 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6128 {
6129 *qin++ = e->dest;
6130 if (qin >= workend)
6131 qin = worklist;
6132 qlen++;
6133
6134 e->dest->aux = e;
6135 }
6136 }
6137 }
6138 }
6139 }
6140
6141 if (post_dominators)
6142 {
6143 /* The optimistic setting of dominators requires us to put every
6144 block on the work list initially. */
6145 qin = qout = worklist;
6146 for (bb = 0; bb < n_basic_blocks; bb++)
6147 {
6148 *qin++ = BASIC_BLOCK (bb);
6149 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6150 }
6151 qlen = n_basic_blocks;
6152 qin = worklist;
6153
6154 /* We want a maximal solution, so initially assume everything post
6155 dominates everything else. */
6156 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6157
6158 /* Mark predecessors of the exit block so we can identify them below. */
6159 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6160 e->src->aux = EXIT_BLOCK_PTR;
6161
6162 /* Iterate until the worklist is empty. */
6163 while (qlen)
6164 {
6165 /* Take the first entry off the worklist. */
6166 basic_block b = *qout++;
6167 if (qout >= workend)
6168 qout = worklist;
6169 qlen--;
6170
6171 bb = b->index;
6172
6173 /* Compute the intersection of the post dominators of all the
6174 successor blocks.
6175
6176 If one of the successor blocks is the EXIT block, then the
6177 intersection of the dominators of the successor blocks is
6178 defined as the null set. We can identify such blocks by the
6179 special value in the AUX field in the block structure. */
6180 if (b->aux == EXIT_BLOCK_PTR)
6181 {
6182 /* Do not clear the aux field for blocks which are
6183 predecessors of the EXIT block. That way we we never
6184 add them to the worklist again.
6185
6186 The intersect of dominators of the succs of this block is
6187 defined as the null set. */
6188 sbitmap_zero (temp_bitmap[bb]);
6189 }
6190 else
6191 {
6192 /* Clear the aux field of this block so it can be added to
6193 the worklist again if necessary. */
6194 b->aux = NULL;
6195 sbitmap_intersection_of_succs (temp_bitmap[bb],
6196 post_dominators, bb);
6197 }
6198
6199 /* Make sure each block always post dominates itself. */
6200 SET_BIT (temp_bitmap[bb], bb);
6201
6202 /* If the out state of this block changed, then we need to
6203 add the successors of this block to the worklist if they
6204 are not already on the worklist. */
6205 if (sbitmap_a_and_b (post_dominators[bb],
6206 post_dominators[bb],
6207 temp_bitmap[bb]))
6208 {
6209 for (e = b->pred; e; e = e->pred_next)
6210 {
6211 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6212 {
6213 *qin++ = e->src;
6214 if (qin >= workend)
6215 qin = worklist;
6216 qlen++;
6217
6218 e->src->aux = e;
6219 }
6220 }
6221 }
6222 }
6223 }
6224
6225 free (worklist);
6226 free (temp_bitmap);
6227 }
6228
6229 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6230 block dominates only itself, its entry remains as INVALID_BLOCK. */
6231
6232 void
6233 compute_immediate_dominators (idom, dominators)
6234 int *idom;
6235 sbitmap *dominators;
6236 {
6237 sbitmap *tmp;
6238 int b;
6239
6240 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6241
6242 /* Begin with tmp(n) = dom(n) - { n }. */
6243 for (b = n_basic_blocks; --b >= 0; )
6244 {
6245 sbitmap_copy (tmp[b], dominators[b]);
6246 RESET_BIT (tmp[b], b);
6247 }
6248
6249 /* Subtract out all of our dominator's dominators. */
6250 for (b = n_basic_blocks; --b >= 0; )
6251 {
6252 sbitmap tmp_b = tmp[b];
6253 int s;
6254
6255 for (s = n_basic_blocks; --s >= 0; )
6256 if (TEST_BIT (tmp_b, s))
6257 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6258 }
6259
6260 /* Find the one bit set in the bitmap and put it in the output array. */
6261 for (b = n_basic_blocks; --b >= 0; )
6262 {
6263 int t;
6264 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6265 }
6266
6267 sbitmap_vector_free (tmp);
6268 }
6269
6270 /* Given POSTDOMINATORS, compute the immediate postdominators into
6271 IDOM. If a block is only dominated by itself, its entry remains as
6272 INVALID_BLOCK. */
6273
6274 void
6275 compute_immediate_postdominators (idom, postdominators)
6276 int *idom;
6277 sbitmap *postdominators;
6278 {
6279 compute_immediate_dominators (idom, postdominators);
6280 }
6281
6282 /* Recompute register set/reference counts immediately prior to register
6283 allocation.
6284
6285 This avoids problems with set/reference counts changing to/from values
6286 which have special meanings to the register allocators.
6287
6288 Additionally, the reference counts are the primary component used by the
6289 register allocators to prioritize pseudos for allocation to hard regs.
6290 More accurate reference counts generally lead to better register allocation.
6291
6292 F is the first insn to be scanned.
6293
6294 LOOP_STEP denotes how much loop_depth should be incremented per
6295 loop nesting level in order to increase the ref count more for
6296 references in a loop.
6297
6298 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6299 possibly other information which is used by the register allocators. */
6300
6301 void
6302 recompute_reg_usage (f, loop_step)
6303 rtx f ATTRIBUTE_UNUSED;
6304 int loop_step ATTRIBUTE_UNUSED;
6305 {
6306 allocate_reg_life_data ();
6307 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6308 }
6309
6310 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6311 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6312 of the number of registers that died. */
6313
6314 int
6315 count_or_remove_death_notes (blocks, kill)
6316 sbitmap blocks;
6317 int kill;
6318 {
6319 int i, count = 0;
6320
6321 for (i = n_basic_blocks - 1; i >= 0; --i)
6322 {
6323 basic_block bb;
6324 rtx insn;
6325
6326 if (blocks && ! TEST_BIT (blocks, i))
6327 continue;
6328
6329 bb = BASIC_BLOCK (i);
6330
6331 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6332 {
6333 if (INSN_P (insn))
6334 {
6335 rtx *pprev = &REG_NOTES (insn);
6336 rtx link = *pprev;
6337
6338 while (link)
6339 {
6340 switch (REG_NOTE_KIND (link))
6341 {
6342 case REG_DEAD:
6343 if (GET_CODE (XEXP (link, 0)) == REG)
6344 {
6345 rtx reg = XEXP (link, 0);
6346 int n;
6347
6348 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6349 n = 1;
6350 else
6351 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6352 count += n;
6353 }
6354 /* FALLTHRU */
6355
6356 case REG_UNUSED:
6357 if (kill)
6358 {
6359 rtx next = XEXP (link, 1);
6360 free_EXPR_LIST_node (link);
6361 *pprev = link = next;
6362 break;
6363 }
6364 /* FALLTHRU */
6365
6366 default:
6367 pprev = &XEXP (link, 1);
6368 link = *pprev;
6369 break;
6370 }
6371 }
6372 }
6373
6374 if (insn == bb->end)
6375 break;
6376 }
6377 }
6378
6379 return count;
6380 }
6381
6382 /* Record INSN's block as BB. */
6383
6384 void
6385 set_block_for_insn (insn, bb)
6386 rtx insn;
6387 basic_block bb;
6388 {
6389 size_t uid = INSN_UID (insn);
6390 if (uid >= basic_block_for_insn->num_elements)
6391 {
6392 int new_size;
6393
6394 /* Add one-eighth the size so we don't keep calling xrealloc. */
6395 new_size = uid + (uid + 7) / 8;
6396
6397 VARRAY_GROW (basic_block_for_insn, new_size);
6398 }
6399 VARRAY_BB (basic_block_for_insn, uid) = bb;
6400 }
6401
6402 /* Record INSN's block number as BB. */
6403 /* ??? This has got to go. */
6404
6405 void
6406 set_block_num (insn, bb)
6407 rtx insn;
6408 int bb;
6409 {
6410 set_block_for_insn (insn, BASIC_BLOCK (bb));
6411 }
6412 \f
6413 /* Verify the CFG consistency. This function check some CFG invariants and
6414 aborts when something is wrong. Hope that this function will help to
6415 convert many optimization passes to preserve CFG consistent.
6416
6417 Currently it does following checks:
6418
6419 - test head/end pointers
6420 - overlapping of basic blocks
6421 - edge list corectness
6422 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6423 - tails of basic blocks (ensure that boundary is necesary)
6424 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6425 and NOTE_INSN_BASIC_BLOCK
6426 - check that all insns are in the basic blocks
6427 (except the switch handling code, barriers and notes)
6428 - check that all returns are followed by barriers
6429
6430 In future it can be extended check a lot of other stuff as well
6431 (reachability of basic blocks, life information, etc. etc.). */
6432
6433 void
6434 verify_flow_info ()
6435 {
6436 const int max_uid = get_max_uid ();
6437 const rtx rtx_first = get_insns ();
6438 rtx last_head = get_last_insn ();
6439 basic_block *bb_info;
6440 rtx x;
6441 int i, last_bb_num_seen, num_bb_notes, err = 0;
6442
6443 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6444
6445 for (i = n_basic_blocks - 1; i >= 0; i--)
6446 {
6447 basic_block bb = BASIC_BLOCK (i);
6448 rtx head = bb->head;
6449 rtx end = bb->end;
6450
6451 /* Verify the end of the basic block is in the INSN chain. */
6452 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6453 if (x == end)
6454 break;
6455 if (!x)
6456 {
6457 error ("End insn %d for block %d not found in the insn stream.",
6458 INSN_UID (end), bb->index);
6459 err = 1;
6460 }
6461
6462 /* Work backwards from the end to the head of the basic block
6463 to verify the head is in the RTL chain. */
6464 for ( ; x != NULL_RTX; x = PREV_INSN (x))
6465 {
6466 /* While walking over the insn chain, verify insns appear
6467 in only one basic block and initialize the BB_INFO array
6468 used by other passes. */
6469 if (bb_info[INSN_UID (x)] != NULL)
6470 {
6471 error ("Insn %d is in multiple basic blocks (%d and %d)",
6472 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6473 err = 1;
6474 }
6475 bb_info[INSN_UID (x)] = bb;
6476
6477 if (x == head)
6478 break;
6479 }
6480 if (!x)
6481 {
6482 error ("Head insn %d for block %d not found in the insn stream.",
6483 INSN_UID (head), bb->index);
6484 err = 1;
6485 }
6486
6487 last_head = x;
6488 }
6489
6490 /* Now check the basic blocks (boundaries etc.) */
6491 for (i = n_basic_blocks - 1; i >= 0; i--)
6492 {
6493 basic_block bb = BASIC_BLOCK (i);
6494 /* Check corectness of edge lists */
6495 edge e;
6496
6497 e = bb->succ;
6498 while (e)
6499 {
6500 if (e->src != bb)
6501 {
6502 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6503 bb->index);
6504 fprintf (stderr, "Predecessor: ");
6505 dump_edge_info (stderr, e, 0);
6506 fprintf (stderr, "\nSuccessor: ");
6507 dump_edge_info (stderr, e, 1);
6508 fflush (stderr);
6509 err = 1;
6510 }
6511 if (e->dest != EXIT_BLOCK_PTR)
6512 {
6513 edge e2 = e->dest->pred;
6514 while (e2 && e2 != e)
6515 e2 = e2->pred_next;
6516 if (!e2)
6517 {
6518 error ("Basic block %i edge lists are corrupted", bb->index);
6519 err = 1;
6520 }
6521 }
6522 e = e->succ_next;
6523 }
6524
6525 e = bb->pred;
6526 while (e)
6527 {
6528 if (e->dest != bb)
6529 {
6530 error ("Basic block %d pred edge is corrupted", bb->index);
6531 fputs ("Predecessor: ", stderr);
6532 dump_edge_info (stderr, e, 0);
6533 fputs ("\nSuccessor: ", stderr);
6534 dump_edge_info (stderr, e, 1);
6535 fputc ('\n', stderr);
6536 err = 1;
6537 }
6538 if (e->src != ENTRY_BLOCK_PTR)
6539 {
6540 edge e2 = e->src->succ;
6541 while (e2 && e2 != e)
6542 e2 = e2->succ_next;
6543 if (!e2)
6544 {
6545 error ("Basic block %i edge lists are corrupted", bb->index);
6546 err = 1;
6547 }
6548 }
6549 e = e->pred_next;
6550 }
6551
6552 /* OK pointers are correct. Now check the header of basic
6553 block. It ought to contain optional CODE_LABEL followed
6554 by NOTE_BASIC_BLOCK. */
6555 x = bb->head;
6556 if (GET_CODE (x) == CODE_LABEL)
6557 {
6558 if (bb->end == x)
6559 {
6560 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6561 bb->index);
6562 err = 1;
6563 }
6564 x = NEXT_INSN (x);
6565 }
6566 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6567 {
6568 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6569 bb->index);
6570 err = 1;
6571 }
6572
6573 if (bb->end == x)
6574 {
6575 /* Do checks for empty blocks here */
6576 }
6577 else
6578 {
6579 x = NEXT_INSN (x);
6580 while (x)
6581 {
6582 if (NOTE_INSN_BASIC_BLOCK_P (x))
6583 {
6584 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6585 INSN_UID (x), bb->index);
6586 err = 1;
6587 }
6588
6589 if (x == bb->end)
6590 break;
6591
6592 if (GET_CODE (x) == JUMP_INSN
6593 || GET_CODE (x) == CODE_LABEL
6594 || GET_CODE (x) == BARRIER)
6595 {
6596 error ("In basic block %d:", bb->index);
6597 fatal_insn ("Flow control insn inside a basic block", x);
6598 }
6599
6600 x = NEXT_INSN (x);
6601 }
6602 }
6603 }
6604
6605 last_bb_num_seen = -1;
6606 num_bb_notes = 0;
6607 x = rtx_first;
6608 while (x)
6609 {
6610 if (NOTE_INSN_BASIC_BLOCK_P (x))
6611 {
6612 basic_block bb = NOTE_BASIC_BLOCK (x);
6613 num_bb_notes++;
6614 if (bb->index != last_bb_num_seen + 1)
6615 fatal ("Basic blocks not numbered consecutively");
6616 last_bb_num_seen = bb->index;
6617 }
6618
6619 if (!bb_info[INSN_UID (x)])
6620 {
6621 switch (GET_CODE (x))
6622 {
6623 case BARRIER:
6624 case NOTE:
6625 break;
6626
6627 case CODE_LABEL:
6628 /* An addr_vec is placed outside any block block. */
6629 if (NEXT_INSN (x)
6630 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6631 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6632 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6633 {
6634 x = NEXT_INSN (x);
6635 }
6636
6637 /* But in any case, non-deletable labels can appear anywhere. */
6638 break;
6639
6640 default:
6641 fatal_insn ("Insn outside basic block", x);
6642 }
6643 }
6644
6645 if (INSN_P (x)
6646 && GET_CODE (x) == JUMP_INSN
6647 && returnjump_p (x) && ! condjump_p (x)
6648 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6649 fatal_insn ("Return not followed by barrier", x);
6650
6651 x = NEXT_INSN (x);
6652 }
6653
6654 if (num_bb_notes != n_basic_blocks)
6655 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6656 num_bb_notes, n_basic_blocks);
6657
6658 if (err)
6659 abort ();
6660
6661 /* Clean up. */
6662 free (bb_info);
6663 }
6664 \f
6665 /* Functions to access an edge list with a vector representation.
6666 Enough data is kept such that given an index number, the
6667 pred and succ that edge represents can be determined, or
6668 given a pred and a succ, its index number can be returned.
6669 This allows algorithms which consume a lot of memory to
6670 represent the normally full matrix of edge (pred,succ) with a
6671 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6672 wasted space in the client code due to sparse flow graphs. */
6673
6674 /* This functions initializes the edge list. Basically the entire
6675 flowgraph is processed, and all edges are assigned a number,
6676 and the data structure is filled in. */
6677 struct edge_list *
6678 create_edge_list ()
6679 {
6680 struct edge_list *elist;
6681 edge e;
6682 int num_edges;
6683 int x;
6684 int block_count;
6685
6686 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6687
6688 num_edges = 0;
6689
6690 /* Determine the number of edges in the flow graph by counting successor
6691 edges on each basic block. */
6692 for (x = 0; x < n_basic_blocks; x++)
6693 {
6694 basic_block bb = BASIC_BLOCK (x);
6695
6696 for (e = bb->succ; e; e = e->succ_next)
6697 num_edges++;
6698 }
6699 /* Don't forget successors of the entry block. */
6700 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6701 num_edges++;
6702
6703 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6704 elist->num_blocks = block_count;
6705 elist->num_edges = num_edges;
6706 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6707
6708 num_edges = 0;
6709
6710 /* Follow successors of the entry block, and register these edges. */
6711 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6712 {
6713 elist->index_to_edge[num_edges] = e;
6714 num_edges++;
6715 }
6716
6717 for (x = 0; x < n_basic_blocks; x++)
6718 {
6719 basic_block bb = BASIC_BLOCK (x);
6720
6721 /* Follow all successors of blocks, and register these edges. */
6722 for (e = bb->succ; e; e = e->succ_next)
6723 {
6724 elist->index_to_edge[num_edges] = e;
6725 num_edges++;
6726 }
6727 }
6728 return elist;
6729 }
6730
6731 /* This function free's memory associated with an edge list. */
6732 void
6733 free_edge_list (elist)
6734 struct edge_list *elist;
6735 {
6736 if (elist)
6737 {
6738 free (elist->index_to_edge);
6739 free (elist);
6740 }
6741 }
6742
6743 /* This function provides debug output showing an edge list. */
6744 void
6745 print_edge_list (f, elist)
6746 FILE *f;
6747 struct edge_list *elist;
6748 {
6749 int x;
6750 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6751 elist->num_blocks - 2, elist->num_edges);
6752
6753 for (x = 0; x < elist->num_edges; x++)
6754 {
6755 fprintf (f, " %-4d - edge(", x);
6756 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6757 fprintf (f,"entry,");
6758 else
6759 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6760
6761 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6762 fprintf (f,"exit)\n");
6763 else
6764 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6765 }
6766 }
6767
6768 /* This function provides an internal consistency check of an edge list,
6769 verifying that all edges are present, and that there are no
6770 extra edges. */
6771 void
6772 verify_edge_list (f, elist)
6773 FILE *f;
6774 struct edge_list *elist;
6775 {
6776 int x, pred, succ, index;
6777 edge e;
6778
6779 for (x = 0; x < n_basic_blocks; x++)
6780 {
6781 basic_block bb = BASIC_BLOCK (x);
6782
6783 for (e = bb->succ; e; e = e->succ_next)
6784 {
6785 pred = e->src->index;
6786 succ = e->dest->index;
6787 index = EDGE_INDEX (elist, e->src, e->dest);
6788 if (index == EDGE_INDEX_NO_EDGE)
6789 {
6790 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6791 continue;
6792 }
6793 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6794 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6795 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6796 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6797 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6798 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6799 }
6800 }
6801 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6802 {
6803 pred = e->src->index;
6804 succ = e->dest->index;
6805 index = EDGE_INDEX (elist, e->src, e->dest);
6806 if (index == EDGE_INDEX_NO_EDGE)
6807 {
6808 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6809 continue;
6810 }
6811 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6812 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6813 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6814 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6815 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6816 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6817 }
6818 /* We've verified that all the edges are in the list, no lets make sure
6819 there are no spurious edges in the list. */
6820
6821 for (pred = 0 ; pred < n_basic_blocks; pred++)
6822 for (succ = 0 ; succ < n_basic_blocks; succ++)
6823 {
6824 basic_block p = BASIC_BLOCK (pred);
6825 basic_block s = BASIC_BLOCK (succ);
6826
6827 int found_edge = 0;
6828
6829 for (e = p->succ; e; e = e->succ_next)
6830 if (e->dest == s)
6831 {
6832 found_edge = 1;
6833 break;
6834 }
6835 for (e = s->pred; e; e = e->pred_next)
6836 if (e->src == p)
6837 {
6838 found_edge = 1;
6839 break;
6840 }
6841 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6842 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6843 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6844 pred, succ);
6845 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6846 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6847 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6848 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6849 BASIC_BLOCK (succ)));
6850 }
6851 for (succ = 0 ; succ < n_basic_blocks; succ++)
6852 {
6853 basic_block p = ENTRY_BLOCK_PTR;
6854 basic_block s = BASIC_BLOCK (succ);
6855
6856 int found_edge = 0;
6857
6858 for (e = p->succ; e; e = e->succ_next)
6859 if (e->dest == s)
6860 {
6861 found_edge = 1;
6862 break;
6863 }
6864 for (e = s->pred; e; e = e->pred_next)
6865 if (e->src == p)
6866 {
6867 found_edge = 1;
6868 break;
6869 }
6870 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6871 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6872 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6873 succ);
6874 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6875 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6876 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6877 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6878 BASIC_BLOCK (succ)));
6879 }
6880 for (pred = 0 ; pred < n_basic_blocks; pred++)
6881 {
6882 basic_block p = BASIC_BLOCK (pred);
6883 basic_block s = EXIT_BLOCK_PTR;
6884
6885 int found_edge = 0;
6886
6887 for (e = p->succ; e; e = e->succ_next)
6888 if (e->dest == s)
6889 {
6890 found_edge = 1;
6891 break;
6892 }
6893 for (e = s->pred; e; e = e->pred_next)
6894 if (e->src == p)
6895 {
6896 found_edge = 1;
6897 break;
6898 }
6899 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6900 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6901 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6902 pred);
6903 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6904 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6905 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6906 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6907 EXIT_BLOCK_PTR));
6908 }
6909 }
6910
6911 /* This routine will determine what, if any, edge there is between
6912 a specified predecessor and successor. */
6913 int
6914 find_edge_index (edge_list, pred, succ)
6915 struct edge_list *edge_list;
6916 basic_block pred, succ;
6917 {
6918 int x;
6919 for (x = 0; x < NUM_EDGES (edge_list); x++)
6920 {
6921 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6922 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6923 return x;
6924 }
6925 return (EDGE_INDEX_NO_EDGE);
6926 }
6927
6928 /* This function will remove an edge from the flow graph. */
6929 void
6930 remove_edge (e)
6931 edge e;
6932 {
6933 edge last_pred = NULL;
6934 edge last_succ = NULL;
6935 edge tmp;
6936 basic_block src, dest;
6937 src = e->src;
6938 dest = e->dest;
6939 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6940 last_succ = tmp;
6941
6942 if (!tmp)
6943 abort ();
6944 if (last_succ)
6945 last_succ->succ_next = e->succ_next;
6946 else
6947 src->succ = e->succ_next;
6948
6949 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6950 last_pred = tmp;
6951
6952 if (!tmp)
6953 abort ();
6954 if (last_pred)
6955 last_pred->pred_next = e->pred_next;
6956 else
6957 dest->pred = e->pred_next;
6958
6959 n_edges--;
6960 free (e);
6961 }
6962
6963 /* This routine will remove any fake successor edges for a basic block.
6964 When the edge is removed, it is also removed from whatever predecessor
6965 list it is in. */
6966 static void
6967 remove_fake_successors (bb)
6968 basic_block bb;
6969 {
6970 edge e;
6971 for (e = bb->succ; e ; )
6972 {
6973 edge tmp = e;
6974 e = e->succ_next;
6975 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6976 remove_edge (tmp);
6977 }
6978 }
6979
6980 /* This routine will remove all fake edges from the flow graph. If
6981 we remove all fake successors, it will automatically remove all
6982 fake predecessors. */
6983 void
6984 remove_fake_edges ()
6985 {
6986 int x;
6987
6988 for (x = 0; x < n_basic_blocks; x++)
6989 remove_fake_successors (BASIC_BLOCK (x));
6990
6991 /* We've handled all successors except the entry block's. */
6992 remove_fake_successors (ENTRY_BLOCK_PTR);
6993 }
6994
6995 /* This function will add a fake edge between any block which has no
6996 successors, and the exit block. Some data flow equations require these
6997 edges to exist. */
6998 void
6999 add_noreturn_fake_exit_edges ()
7000 {
7001 int x;
7002
7003 for (x = 0; x < n_basic_blocks; x++)
7004 if (BASIC_BLOCK (x)->succ == NULL)
7005 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7006 }
7007
7008 /* This function adds a fake edge between any infinite loops to the
7009 exit block. Some optimizations require a path from each node to
7010 the exit node.
7011
7012 See also Morgan, Figure 3.10, pp. 82-83.
7013
7014 The current implementation is ugly, not attempting to minimize the
7015 number of inserted fake edges. To reduce the number of fake edges
7016 to insert, add fake edges from _innermost_ loops containing only
7017 nodes not reachable from the exit block. */
7018 void
7019 connect_infinite_loops_to_exit ()
7020 {
7021 basic_block unvisited_block;
7022
7023 /* Perform depth-first search in the reverse graph to find nodes
7024 reachable from the exit block. */
7025 struct depth_first_search_dsS dfs_ds;
7026
7027 flow_dfs_compute_reverse_init (&dfs_ds);
7028 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7029
7030 /* Repeatedly add fake edges, updating the unreachable nodes. */
7031 while (1)
7032 {
7033 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7034 if (!unvisited_block)
7035 break;
7036 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7037 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7038 }
7039
7040 flow_dfs_compute_reverse_finish (&dfs_ds);
7041
7042 return;
7043 }
7044
7045 /* Redirect an edge's successor from one block to another. */
7046 void
7047 redirect_edge_succ (e, new_succ)
7048 edge e;
7049 basic_block new_succ;
7050 {
7051 edge *pe;
7052
7053 /* Disconnect the edge from the old successor block. */
7054 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
7055 continue;
7056 *pe = (*pe)->pred_next;
7057
7058 /* Reconnect the edge to the new successor block. */
7059 e->pred_next = new_succ->pred;
7060 new_succ->pred = e;
7061 e->dest = new_succ;
7062 }
7063
7064 /* Redirect an edge's predecessor from one block to another. */
7065 void
7066 redirect_edge_pred (e, new_pred)
7067 edge e;
7068 basic_block new_pred;
7069 {
7070 edge *pe;
7071
7072 /* Disconnect the edge from the old predecessor block. */
7073 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
7074 continue;
7075 *pe = (*pe)->succ_next;
7076
7077 /* Reconnect the edge to the new predecessor block. */
7078 e->succ_next = new_pred->succ;
7079 new_pred->succ = e;
7080 e->src = new_pred;
7081 }
7082 \f
7083 /* Dump the list of basic blocks in the bitmap NODES. */
7084 static void
7085 flow_nodes_print (str, nodes, file)
7086 const char *str;
7087 const sbitmap nodes;
7088 FILE *file;
7089 {
7090 int node;
7091
7092 fprintf (file, "%s { ", str);
7093 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7094 fputs ("}\n", file);
7095 }
7096
7097
7098 /* Dump the list of exiting edges in the array EDGES. */
7099 static void
7100 flow_exits_print (str, edges, num_edges, file)
7101 const char *str;
7102 const edge *edges;
7103 int num_edges;
7104 FILE *file;
7105 {
7106 int i;
7107
7108 fprintf (file, "%s { ", str);
7109 for (i = 0; i < num_edges; i++)
7110 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
7111 fputs ("}\n", file);
7112 }
7113
7114
7115 /* Dump loop related CFG information. */
7116 static void
7117 flow_loops_cfg_dump (loops, file)
7118 const struct loops *loops;
7119 FILE *file;
7120 {
7121 int i;
7122
7123 if (! loops->num || ! file || ! loops->cfg.dom)
7124 return;
7125
7126 for (i = 0; i < n_basic_blocks; i++)
7127 {
7128 edge succ;
7129
7130 fprintf (file, ";; %d succs { ", i);
7131 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7132 fprintf (file, "%d ", succ->dest->index);
7133 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7134 }
7135
7136
7137 /* Dump the DFS node order. */
7138 if (loops->cfg.dfs_order)
7139 {
7140 fputs (";; DFS order: ", file);
7141 for (i = 0; i < n_basic_blocks; i++)
7142 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7143 fputs ("\n", file);
7144 }
7145 /* Dump the reverse completion node order. */
7146 if (loops->cfg.rc_order)
7147 {
7148 fputs (";; RC order: ", file);
7149 for (i = 0; i < n_basic_blocks; i++)
7150 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7151 fputs ("\n", file);
7152 }
7153 }
7154
7155
7156 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7157 static int
7158 flow_loop_nested_p (outer, loop)
7159 struct loop *outer;
7160 struct loop *loop;
7161 {
7162 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7163 }
7164
7165
7166 /* Dump the loop information specified by LOOPS to the stream FILE. */
7167 void
7168 flow_loops_dump (loops, file, verbose)
7169 const struct loops *loops;
7170 FILE *file;
7171 int verbose;
7172 {
7173 int i;
7174 int num_loops;
7175
7176 num_loops = loops->num;
7177 if (! num_loops || ! file)
7178 return;
7179
7180 fprintf (file, ";; %d loops found, %d levels\n",
7181 num_loops, loops->levels);
7182
7183 for (i = 0; i < num_loops; i++)
7184 {
7185 struct loop *loop = &loops->array[i];
7186
7187 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7188 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7189 loop->header->index, loop->latch->index,
7190 loop->pre_header ? loop->pre_header->index : -1,
7191 loop->depth, loop->level,
7192 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7193 fprintf (file, ";; %d", loop->num_nodes);
7194 flow_nodes_print (" nodes", loop->nodes, file);
7195 fprintf (file, ";; %d", loop->num_exits);
7196 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7197
7198 if (loop->shared)
7199 {
7200 int j;
7201
7202 for (j = 0; j < i; j++)
7203 {
7204 struct loop *oloop = &loops->array[j];
7205
7206 if (loop->header == oloop->header)
7207 {
7208 int disjoint;
7209 int smaller;
7210
7211 smaller = loop->num_nodes < oloop->num_nodes;
7212
7213 /* If the union of LOOP and OLOOP is different than
7214 the larger of LOOP and OLOOP then LOOP and OLOOP
7215 must be disjoint. */
7216 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7217 smaller ? oloop : loop);
7218 fprintf (file,
7219 ";; loop header %d shared by loops %d, %d %s\n",
7220 loop->header->index, i, j,
7221 disjoint ? "disjoint" : "nested");
7222 }
7223 }
7224 }
7225
7226 if (verbose)
7227 {
7228 /* Print diagnostics to compare our concept of a loop with
7229 what the loop notes say. */
7230 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7231 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7232 != NOTE_INSN_LOOP_BEG)
7233 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7234 INSN_UID (PREV_INSN (loop->first->head)));
7235 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7236 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7237 != NOTE_INSN_LOOP_END)
7238 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7239 INSN_UID (NEXT_INSN (loop->last->end)));
7240 }
7241 }
7242
7243 if (verbose)
7244 flow_loops_cfg_dump (loops, file);
7245 }
7246
7247
7248 /* Free all the memory allocated for LOOPS. */
7249 void
7250 flow_loops_free (loops)
7251 struct loops *loops;
7252 {
7253 if (loops->array)
7254 {
7255 int i;
7256
7257 if (! loops->num)
7258 abort ();
7259
7260 /* Free the loop descriptors. */
7261 for (i = 0; i < loops->num; i++)
7262 {
7263 struct loop *loop = &loops->array[i];
7264
7265 if (loop->nodes)
7266 sbitmap_free (loop->nodes);
7267 if (loop->exits)
7268 free (loop->exits);
7269 }
7270 free (loops->array);
7271 loops->array = NULL;
7272
7273 if (loops->cfg.dom)
7274 sbitmap_vector_free (loops->cfg.dom);
7275 if (loops->cfg.dfs_order)
7276 free (loops->cfg.dfs_order);
7277
7278 sbitmap_free (loops->shared_headers);
7279 }
7280 }
7281
7282
7283 /* Find the exits from the loop using the bitmap of loop nodes NODES
7284 and store in EXITS array. Return the number of exits from the
7285 loop. */
7286 static int
7287 flow_loop_exits_find (nodes, exits)
7288 const sbitmap nodes;
7289 edge **exits;
7290 {
7291 edge e;
7292 int node;
7293 int num_exits;
7294
7295 *exits = NULL;
7296
7297 /* Check all nodes within the loop to see if there are any
7298 successors not in the loop. Note that a node may have multiple
7299 exiting edges. */
7300 num_exits = 0;
7301 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7302 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7303 {
7304 basic_block dest = e->dest;
7305
7306 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7307 num_exits++;
7308 }
7309 });
7310
7311 if (! num_exits)
7312 return 0;
7313
7314 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7315
7316 /* Store all exiting edges into an array. */
7317 num_exits = 0;
7318 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7319 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7320 {
7321 basic_block dest = e->dest;
7322
7323 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7324 (*exits)[num_exits++] = e;
7325 }
7326 });
7327
7328 return num_exits;
7329 }
7330
7331
7332 /* Find the nodes contained within the loop with header HEADER and
7333 latch LATCH and store in NODES. Return the number of nodes within
7334 the loop. */
7335 static int
7336 flow_loop_nodes_find (header, latch, nodes)
7337 basic_block header;
7338 basic_block latch;
7339 sbitmap nodes;
7340 {
7341 basic_block *stack;
7342 int sp;
7343 int num_nodes = 0;
7344
7345 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7346 sp = 0;
7347
7348 /* Start with only the loop header in the set of loop nodes. */
7349 sbitmap_zero (nodes);
7350 SET_BIT (nodes, header->index);
7351 num_nodes++;
7352 header->loop_depth++;
7353
7354 /* Push the loop latch on to the stack. */
7355 if (! TEST_BIT (nodes, latch->index))
7356 {
7357 SET_BIT (nodes, latch->index);
7358 latch->loop_depth++;
7359 num_nodes++;
7360 stack[sp++] = latch;
7361 }
7362
7363 while (sp)
7364 {
7365 basic_block node;
7366 edge e;
7367
7368 node = stack[--sp];
7369 for (e = node->pred; e; e = e->pred_next)
7370 {
7371 basic_block ancestor = e->src;
7372
7373 /* If each ancestor not marked as part of loop, add to set of
7374 loop nodes and push on to stack. */
7375 if (ancestor != ENTRY_BLOCK_PTR
7376 && ! TEST_BIT (nodes, ancestor->index))
7377 {
7378 SET_BIT (nodes, ancestor->index);
7379 ancestor->loop_depth++;
7380 num_nodes++;
7381 stack[sp++] = ancestor;
7382 }
7383 }
7384 }
7385 free (stack);
7386 return num_nodes;
7387 }
7388
7389
7390 /* Compute the depth first search order and store in the array
7391 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7392 RC_ORDER is non-zero, return the reverse completion number for each
7393 node. Returns the number of nodes visited. A depth first search
7394 tries to get as far away from the starting point as quickly as
7395 possible. */
7396 static int
7397 flow_depth_first_order_compute (dfs_order, rc_order)
7398 int *dfs_order;
7399 int *rc_order;
7400 {
7401 edge *stack;
7402 int sp;
7403 int dfsnum = 0;
7404 int rcnum = n_basic_blocks - 1;
7405 sbitmap visited;
7406
7407 /* Allocate stack for back-tracking up CFG. */
7408 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7409 sp = 0;
7410
7411 /* Allocate bitmap to track nodes that have been visited. */
7412 visited = sbitmap_alloc (n_basic_blocks);
7413
7414 /* None of the nodes in the CFG have been visited yet. */
7415 sbitmap_zero (visited);
7416
7417 /* Push the first edge on to the stack. */
7418 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7419
7420 while (sp)
7421 {
7422 edge e;
7423 basic_block src;
7424 basic_block dest;
7425
7426 /* Look at the edge on the top of the stack. */
7427 e = stack[sp - 1];
7428 src = e->src;
7429 dest = e->dest;
7430
7431 /* Check if the edge destination has been visited yet. */
7432 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7433 {
7434 /* Mark that we have visited the destination. */
7435 SET_BIT (visited, dest->index);
7436
7437 if (dfs_order)
7438 dfs_order[dfsnum++] = dest->index;
7439
7440 if (dest->succ)
7441 {
7442 /* Since the DEST node has been visited for the first
7443 time, check its successors. */
7444 stack[sp++] = dest->succ;
7445 }
7446 else
7447 {
7448 /* There are no successors for the DEST node so assign
7449 its reverse completion number. */
7450 if (rc_order)
7451 rc_order[rcnum--] = dest->index;
7452 }
7453 }
7454 else
7455 {
7456 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7457 {
7458 /* There are no more successors for the SRC node
7459 so assign its reverse completion number. */
7460 if (rc_order)
7461 rc_order[rcnum--] = src->index;
7462 }
7463
7464 if (e->succ_next)
7465 stack[sp - 1] = e->succ_next;
7466 else
7467 sp--;
7468 }
7469 }
7470
7471 free (stack);
7472 sbitmap_free (visited);
7473
7474 /* The number of nodes visited should not be greater than
7475 n_basic_blocks. */
7476 if (dfsnum > n_basic_blocks)
7477 abort ();
7478
7479 /* There are some nodes left in the CFG that are unreachable. */
7480 if (dfsnum < n_basic_blocks)
7481 abort ();
7482 return dfsnum;
7483 }
7484
7485
7486 /* Compute the depth first search order on the _reverse_ graph and
7487 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7488 Returns the number of nodes visited.
7489
7490 The computation is split into three pieces:
7491
7492 flow_dfs_compute_reverse_init () creates the necessary data
7493 structures.
7494
7495 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7496 structures. The block will start the search.
7497
7498 flow_dfs_compute_reverse_execute () continues (or starts) the
7499 search using the block on the top of the stack, stopping when the
7500 stack is empty.
7501
7502 flow_dfs_compute_reverse_finish () destroys the necessary data
7503 structures.
7504
7505 Thus, the user will probably call ..._init(), call ..._add_bb() to
7506 add a beginning basic block to the stack, call ..._execute(),
7507 possibly add another bb to the stack and again call ..._execute(),
7508 ..., and finally call _finish(). */
7509
7510 /* Initialize the data structures used for depth-first search on the
7511 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7512 added to the basic block stack. DATA is the current depth-first
7513 search context. If INITIALIZE_STACK is non-zero, there is an
7514 element on the stack. */
7515
7516 static void
7517 flow_dfs_compute_reverse_init (data)
7518 depth_first_search_ds data;
7519 {
7520 /* Allocate stack for back-tracking up CFG. */
7521 data->stack =
7522 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK+1))
7523 * sizeof (basic_block));
7524 data->sp = 0;
7525
7526 /* Allocate bitmap to track nodes that have been visited. */
7527 data->visited_blocks
7528 = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7529
7530 /* None of the nodes in the CFG have been visited yet. */
7531 sbitmap_zero (data->visited_blocks);
7532
7533 return;
7534 }
7535
7536 /* Add the specified basic block to the top of the dfs data
7537 structures. When the search continues, it will start at the
7538 block. */
7539
7540 static void
7541 flow_dfs_compute_reverse_add_bb (data, bb)
7542 depth_first_search_ds data;
7543 basic_block bb;
7544 {
7545 data->stack[data->sp++] = bb;
7546 return;
7547 }
7548
7549 /* Continue the depth-first search through the reverse graph starting
7550 with the block at the stack's top and ending when the stack is
7551 empty. Visited nodes are marked. Returns an unvisited basic
7552 block, or NULL if there is none available. */
7553 static basic_block
7554 flow_dfs_compute_reverse_execute (data)
7555 depth_first_search_ds data;
7556 {
7557 basic_block bb;
7558 edge e;
7559 int i;
7560
7561 while (data->sp > 0)
7562 {
7563 bb = data->stack[--data->sp];
7564
7565 /* Mark that we have visited this node. */
7566 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK+1)))
7567 {
7568 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK+1));
7569
7570 /* Perform depth-first search on adjacent vertices. */
7571 for (e = bb->pred; e; e = e->pred_next)
7572 flow_dfs_compute_reverse_add_bb (data, e->src);
7573 }
7574 }
7575
7576 /* Determine if there are unvisited basic blocks. */
7577 for (i = n_basic_blocks - (INVALID_BLOCK+1); --i >= 0; )
7578 if (!TEST_BIT (data->visited_blocks, i))
7579 return BASIC_BLOCK (i + (INVALID_BLOCK+1));
7580 return NULL;
7581 }
7582
7583 /* Destroy the data structures needed for depth-first search on the
7584 reverse graph. */
7585
7586 static void
7587 flow_dfs_compute_reverse_finish (data)
7588 depth_first_search_ds data;
7589 {
7590 free (data->stack);
7591 sbitmap_free (data->visited_blocks);
7592 return;
7593 }
7594
7595 /* Return the block for the pre-header of the loop with header
7596 HEADER where DOM specifies the dominator information. Return NULL if
7597 there is no pre-header. */
7598 static basic_block
7599 flow_loop_pre_header_find (header, dom)
7600 basic_block header;
7601 const sbitmap *dom;
7602 {
7603 basic_block pre_header;
7604 edge e;
7605
7606 /* If block p is a predecessor of the header and is the only block
7607 that the header does not dominate, then it is the pre-header. */
7608 pre_header = NULL;
7609 for (e = header->pred; e; e = e->pred_next)
7610 {
7611 basic_block node = e->src;
7612
7613 if (node != ENTRY_BLOCK_PTR
7614 && ! TEST_BIT (dom[node->index], header->index))
7615 {
7616 if (pre_header == NULL)
7617 pre_header = node;
7618 else
7619 {
7620 /* There are multiple edges into the header from outside
7621 the loop so there is no pre-header block. */
7622 pre_header = NULL;
7623 break;
7624 }
7625 }
7626 }
7627 return pre_header;
7628 }
7629
7630
7631 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7632 previously added. The insertion algorithm assumes that the loops
7633 are added in the order found by a depth first search of the CFG. */
7634 static void
7635 flow_loop_tree_node_add (prevloop, loop)
7636 struct loop *prevloop;
7637 struct loop *loop;
7638 {
7639
7640 if (flow_loop_nested_p (prevloop, loop))
7641 {
7642 prevloop->inner = loop;
7643 loop->outer = prevloop;
7644 return;
7645 }
7646
7647 while (prevloop->outer)
7648 {
7649 if (flow_loop_nested_p (prevloop->outer, loop))
7650 {
7651 prevloop->next = loop;
7652 loop->outer = prevloop->outer;
7653 return;
7654 }
7655 prevloop = prevloop->outer;
7656 }
7657
7658 prevloop->next = loop;
7659 loop->outer = NULL;
7660 }
7661
7662
7663 /* Build the loop hierarchy tree for LOOPS. */
7664 static void
7665 flow_loops_tree_build (loops)
7666 struct loops *loops;
7667 {
7668 int i;
7669 int num_loops;
7670
7671 num_loops = loops->num;
7672 if (! num_loops)
7673 return;
7674
7675 /* Root the loop hierarchy tree with the first loop found.
7676 Since we used a depth first search this should be the
7677 outermost loop. */
7678 loops->tree = &loops->array[0];
7679 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7680
7681 /* Add the remaining loops to the tree. */
7682 for (i = 1; i < num_loops; i++)
7683 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7684 }
7685
7686
7687 /* Helper function to compute loop nesting depth and enclosed loop level
7688 for the natural loop specified by LOOP at the loop depth DEPTH.
7689 Returns the loop level. */
7690 static int
7691 flow_loop_level_compute (loop, depth)
7692 struct loop *loop;
7693 int depth;
7694 {
7695 struct loop *inner;
7696 int level = 1;
7697
7698 if (! loop)
7699 return 0;
7700
7701 /* Traverse loop tree assigning depth and computing level as the
7702 maximum level of all the inner loops of this loop. The loop
7703 level is equivalent to the height of the loop in the loop tree
7704 and corresponds to the number of enclosed loop levels (including
7705 itself). */
7706 for (inner = loop->inner; inner; inner = inner->next)
7707 {
7708 int ilevel;
7709
7710 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7711
7712 if (ilevel > level)
7713 level = ilevel;
7714 }
7715 loop->level = level;
7716 loop->depth = depth;
7717 return level;
7718 }
7719
7720
7721 /* Compute the loop nesting depth and enclosed loop level for the loop
7722 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7723 level. */
7724
7725 static int
7726 flow_loops_level_compute (loops)
7727 struct loops *loops;
7728 {
7729 struct loop *loop;
7730 int level;
7731 int levels = 0;
7732
7733 /* Traverse all the outer level loops. */
7734 for (loop = loops->tree; loop; loop = loop->next)
7735 {
7736 level = flow_loop_level_compute (loop, 1);
7737 if (level > levels)
7738 levels = level;
7739 }
7740 return levels;
7741 }
7742
7743
7744 /* Find all the natural loops in the function and save in LOOPS structure
7745 and recalculate loop_depth information in basic block structures.
7746 Return the number of natural loops found. */
7747
7748 int
7749 flow_loops_find (loops)
7750 struct loops *loops;
7751 {
7752 int i;
7753 int b;
7754 int num_loops;
7755 edge e;
7756 sbitmap headers;
7757 sbitmap *dom;
7758 int *dfs_order;
7759 int *rc_order;
7760
7761 loops->num = 0;
7762 loops->array = NULL;
7763 loops->tree = NULL;
7764 dfs_order = NULL;
7765 rc_order = NULL;
7766
7767 /* Taking care of this degenerate case makes the rest of
7768 this code simpler. */
7769 if (n_basic_blocks == 0)
7770 return 0;
7771
7772 /* Compute the dominators. */
7773 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7774 compute_flow_dominators (dom, NULL);
7775
7776 /* Count the number of loop edges (back edges). This should be the
7777 same as the number of natural loops. Also clear the loop_depth
7778 and as we work from inner->outer in a loop nest we call
7779 find_loop_nodes_find which will increment loop_depth for nodes
7780 within the current loop, which happens to enclose inner loops. */
7781
7782 num_loops = 0;
7783 for (b = 0; b < n_basic_blocks; b++)
7784 {
7785 BASIC_BLOCK (b)->loop_depth = 0;
7786 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7787 {
7788 basic_block latch = e->src;
7789
7790 /* Look for back edges where a predecessor is dominated
7791 by this block. A natural loop has a single entry
7792 node (header) that dominates all the nodes in the
7793 loop. It also has single back edge to the header
7794 from a latch node. Note that multiple natural loops
7795 may share the same header. */
7796 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7797 num_loops++;
7798 }
7799 }
7800
7801 if (num_loops)
7802 {
7803 /* Compute depth first search order of the CFG so that outer
7804 natural loops will be found before inner natural loops. */
7805 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7806 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7807 flow_depth_first_order_compute (dfs_order, rc_order);
7808
7809 /* Allocate loop structures. */
7810 loops->array
7811 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7812
7813 headers = sbitmap_alloc (n_basic_blocks);
7814 sbitmap_zero (headers);
7815
7816 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7817 sbitmap_zero (loops->shared_headers);
7818
7819 /* Find and record information about all the natural loops
7820 in the CFG. */
7821 num_loops = 0;
7822 for (b = 0; b < n_basic_blocks; b++)
7823 {
7824 basic_block header;
7825
7826 /* Search the nodes of the CFG in DFS order that we can find
7827 outer loops first. */
7828 header = BASIC_BLOCK (rc_order[b]);
7829
7830 /* Look for all the possible latch blocks for this header. */
7831 for (e = header->pred; e; e = e->pred_next)
7832 {
7833 basic_block latch = e->src;
7834
7835 /* Look for back edges where a predecessor is dominated
7836 by this block. A natural loop has a single entry
7837 node (header) that dominates all the nodes in the
7838 loop. It also has single back edge to the header
7839 from a latch node. Note that multiple natural loops
7840 may share the same header. */
7841 if (latch != ENTRY_BLOCK_PTR
7842 && TEST_BIT (dom[latch->index], header->index))
7843 {
7844 struct loop *loop;
7845
7846 loop = loops->array + num_loops;
7847
7848 loop->header = header;
7849 loop->latch = latch;
7850 loop->num = num_loops;
7851
7852 /* Keep track of blocks that are loop headers so
7853 that we can tell which loops should be merged. */
7854 if (TEST_BIT (headers, header->index))
7855 SET_BIT (loops->shared_headers, header->index);
7856 SET_BIT (headers, header->index);
7857
7858 /* Find nodes contained within the loop. */
7859 loop->nodes = sbitmap_alloc (n_basic_blocks);
7860 loop->num_nodes
7861 = flow_loop_nodes_find (header, latch, loop->nodes);
7862
7863 /* Compute first and last blocks within the loop.
7864 These are often the same as the loop header and
7865 loop latch respectively, but this is not always
7866 the case. */
7867 loop->first
7868 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7869 loop->last
7870 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7871
7872 /* Find edges which exit the loop. Note that a node
7873 may have several exit edges. */
7874 loop->num_exits
7875 = flow_loop_exits_find (loop->nodes, &loop->exits);
7876
7877 /* Look to see if the loop has a pre-header node. */
7878 loop->pre_header
7879 = flow_loop_pre_header_find (header, dom);
7880
7881 num_loops++;
7882 }
7883 }
7884 }
7885
7886 /* Natural loops with shared headers may either be disjoint or
7887 nested. Disjoint loops with shared headers cannot be inner
7888 loops and should be merged. For now just mark loops that share
7889 headers. */
7890 for (i = 0; i < num_loops; i++)
7891 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7892 loops->array[i].shared = 1;
7893
7894 sbitmap_free (headers);
7895 }
7896
7897 loops->num = num_loops;
7898
7899 /* Save CFG derived information to avoid recomputing it. */
7900 loops->cfg.dom = dom;
7901 loops->cfg.dfs_order = dfs_order;
7902 loops->cfg.rc_order = rc_order;
7903
7904 /* Build the loop hierarchy tree. */
7905 flow_loops_tree_build (loops);
7906
7907 /* Assign the loop nesting depth and enclosed loop level for each
7908 loop. */
7909 loops->levels = flow_loops_level_compute (loops);
7910
7911 return num_loops;
7912 }
7913
7914
7915 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7916
7917 int
7918 flow_loop_outside_edge_p (loop, e)
7919 const struct loop *loop;
7920 edge e;
7921 {
7922 if (e->dest != loop->header)
7923 abort ();
7924 return (e->src == ENTRY_BLOCK_PTR)
7925 || ! TEST_BIT (loop->nodes, e->src->index);
7926 }
7927
7928
7929 /* Clear LOG_LINKS fields of insns in a chain.
7930 Also clear the global_live_at_{start,end} fields of the basic block
7931 structures. */
7932
7933 void
7934 clear_log_links (insns)
7935 rtx insns;
7936 {
7937 rtx i;
7938 int b;
7939
7940 for (i = insns; i; i = NEXT_INSN (i))
7941 if (INSN_P (i))
7942 LOG_LINKS (i) = 0;
7943
7944 for (b = 0; b < n_basic_blocks; b++)
7945 {
7946 basic_block bb = BASIC_BLOCK (b);
7947
7948 bb->global_live_at_start = NULL;
7949 bb->global_live_at_end = NULL;
7950 }
7951
7952 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
7953 EXIT_BLOCK_PTR->global_live_at_start = NULL;
7954 }
7955
7956 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7957 correspond to the hard registers, if any, set in that map. This
7958 could be done far more efficiently by having all sorts of special-cases
7959 with moving single words, but probably isn't worth the trouble. */
7960
7961 void
7962 reg_set_to_hard_reg_set (to, from)
7963 HARD_REG_SET *to;
7964 bitmap from;
7965 {
7966 int i;
7967
7968 EXECUTE_IF_SET_IN_BITMAP
7969 (from, 0, i,
7970 {
7971 if (i >= FIRST_PSEUDO_REGISTER)
7972 return;
7973 SET_HARD_REG_BIT (*to, i);
7974 });
7975 }
7976