0a20f4837e5e67fdb10c369472c190231fcd76a3
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 /* The obstack on which the flow graph components are allocated. */
172
173 struct obstack flow_obstack;
174 static char *flow_firstobj;
175
176 /* Number of basic blocks in the current function. */
177
178 int n_basic_blocks;
179
180 /* Number of edges in the current function. */
181
182 int n_edges;
183
184 /* The basic block array. */
185
186 varray_type basic_block_info;
187
188 /* The special entry and exit blocks. */
189
190 struct basic_block_def entry_exit_blocks[2]
191 = {{NULL, /* head */
192 NULL, /* end */
193 NULL, /* pred */
194 NULL, /* succ */
195 NULL, /* local_set */
196 NULL, /* cond_local_set */
197 NULL, /* global_live_at_start */
198 NULL, /* global_live_at_end */
199 NULL, /* aux */
200 ENTRY_BLOCK, /* index */
201 0, /* loop_depth */
202 -1, -1, /* eh_beg, eh_end */
203 0 /* count */
204 },
205 {
206 NULL, /* head */
207 NULL, /* end */
208 NULL, /* pred */
209 NULL, /* succ */
210 NULL, /* local_set */
211 NULL, /* cond_local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
214 NULL, /* aux */
215 EXIT_BLOCK, /* index */
216 0, /* loop_depth */
217 -1, -1, /* eh_beg, eh_end */
218 0 /* count */
219 }
220 };
221
222 /* Nonzero if the second flow pass has completed. */
223 int flow2_completed;
224
225 /* Maximum register number used in this function, plus one. */
226
227 int max_regno;
228
229 /* Indexed by n, giving various register information */
230
231 varray_type reg_n_info;
232
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
235
236 int regset_bytes;
237 int regset_size;
238
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241
242 regset regs_live_at_setjmp;
243
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
248 rtx regs_may_share;
249
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
252
253 static HARD_REG_SET elim_reg_set;
254
255 /* The basic block structure for every insn, indexed by uid. */
256
257 varray_type basic_block_for_insn;
258
259 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
260 /* ??? Should probably be using LABEL_NUSES instead. It would take a
261 bit of surgery to be able to use or co-opt the routines in jump. */
262
263 static rtx label_value_list;
264 static rtx tail_recursion_label_list;
265
266 /* Holds information for tracking conditional register life information. */
267 struct reg_cond_life_info
268 {
269 /* An EXPR_LIST of conditions under which a register is dead. */
270 rtx condition;
271
272 /* ??? Could store mask of bytes that are dead, so that we could finally
273 track lifetimes of multi-word registers accessed via subregs. */
274 };
275
276 /* For use in communicating between propagate_block and its subroutines.
277 Holds all information needed to compute life and def-use information. */
278
279 struct propagate_block_info
280 {
281 /* The basic block we're considering. */
282 basic_block bb;
283
284 /* Bit N is set if register N is conditionally or unconditionally live. */
285 regset reg_live;
286
287 /* Bit N is set if register N is set this insn. */
288 regset new_set;
289
290 /* Element N is the next insn that uses (hard or pseudo) register N
291 within the current basic block; or zero, if there is no such insn. */
292 rtx *reg_next_use;
293
294 /* Contains a list of all the MEMs we are tracking for dead store
295 elimination. */
296 rtx mem_set_list;
297
298 /* If non-null, record the set of registers set unconditionally in the
299 basic block. */
300 regset local_set;
301
302 /* If non-null, record the set of registers set conditionally in the
303 basic block. */
304 regset cond_local_set;
305
306 #ifdef HAVE_conditional_execution
307 /* Indexed by register number, holds a reg_cond_life_info for each
308 register that is not unconditionally live or dead. */
309 splay_tree reg_cond_dead;
310
311 /* Bit N is set if register N is in an expression in reg_cond_dead. */
312 regset reg_cond_reg;
313 #endif
314
315 /* Non-zero if the value of CC0 is live. */
316 int cc0_live;
317
318 /* Flags controling the set of information propagate_block collects. */
319 int flags;
320 };
321
322 /* Store the data structures necessary for depth-first search. */
323 struct depth_first_search_dsS {
324 /* stack for backtracking during the algorithm */
325 basic_block *stack;
326
327 /* number of edges in the stack. That is, positions 0, ..., sp-1
328 have edges. */
329 unsigned int sp;
330
331 /* record of basic blocks already seen by depth-first search */
332 sbitmap visited_blocks;
333 };
334 typedef struct depth_first_search_dsS *depth_first_search_ds;
335
336 /* Forward declarations */
337 static int count_basic_blocks PARAMS ((rtx));
338 static void find_basic_blocks_1 PARAMS ((rtx));
339 static rtx find_label_refs PARAMS ((rtx, rtx));
340 static void clear_edges PARAMS ((void));
341 static void make_edges PARAMS ((rtx));
342 static void make_label_edge PARAMS ((sbitmap *, basic_block,
343 rtx, int));
344 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
345 basic_block, rtx, int));
346 static void mark_critical_edges PARAMS ((void));
347 static void move_stray_eh_region_notes PARAMS ((void));
348 static void record_active_eh_regions PARAMS ((rtx));
349
350 static void commit_one_edge_insertion PARAMS ((edge));
351
352 static void delete_unreachable_blocks PARAMS ((void));
353 static void delete_eh_regions PARAMS ((void));
354 static int can_delete_note_p PARAMS ((rtx));
355 static void expunge_block PARAMS ((basic_block));
356 static int can_delete_label_p PARAMS ((rtx));
357 static int tail_recursion_label_p PARAMS ((rtx));
358 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
359 basic_block));
360 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
361 basic_block));
362 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
363 static void try_merge_blocks PARAMS ((void));
364 static void tidy_fallthru_edges PARAMS ((void));
365 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
366 static void verify_wide_reg PARAMS ((int, rtx, rtx));
367 static void verify_local_live_at_start PARAMS ((regset, basic_block));
368 static int set_noop_p PARAMS ((rtx));
369 static int noop_move_p PARAMS ((rtx));
370 static void delete_noop_moves PARAMS ((rtx));
371 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
372 static void notice_stack_pointer_modification PARAMS ((rtx));
373 static void mark_reg PARAMS ((rtx, void *));
374 static void mark_regs_live_at_end PARAMS ((regset));
375 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
376 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
377 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
378 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
379 static int insn_dead_p PARAMS ((struct propagate_block_info *,
380 rtx, int, rtx));
381 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
382 rtx, rtx));
383 static void mark_set_regs PARAMS ((struct propagate_block_info *,
384 rtx, rtx));
385 static void mark_set_1 PARAMS ((struct propagate_block_info *,
386 enum rtx_code, rtx, rtx,
387 rtx, int));
388 #ifdef HAVE_conditional_execution
389 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
390 int, rtx));
391 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
392 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
393 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
394 int));
395 static rtx ior_reg_cond PARAMS ((rtx, rtx));
396 static rtx not_reg_cond PARAMS ((rtx));
397 static rtx nand_reg_cond PARAMS ((rtx, rtx));
398 #endif
399 #ifdef AUTO_INC_DEC
400 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
401 rtx, rtx, rtx, rtx, rtx));
402 static void find_auto_inc PARAMS ((struct propagate_block_info *,
403 rtx, rtx));
404 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
405 rtx));
406 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
407 #endif
408 static void mark_used_reg PARAMS ((struct propagate_block_info *,
409 rtx, rtx, rtx));
410 static void mark_used_regs PARAMS ((struct propagate_block_info *,
411 rtx, rtx, rtx));
412 void dump_flow_info PARAMS ((FILE *));
413 void debug_flow_info PARAMS ((void));
414 static void dump_edge_info PARAMS ((FILE *, edge, int));
415 static void print_rtl_and_abort PARAMS ((void));
416
417 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
418 rtx));
419 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
420 rtx));
421 static void remove_fake_successors PARAMS ((basic_block));
422 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
423 FILE *));
424 static void flow_edge_list_print PARAMS ((const char *, const edge *,
425 int, FILE *));
426 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
427 FILE *));
428 static int flow_loop_nested_p PARAMS ((struct loop *,
429 struct loop *));
430 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
431 edge **));
432 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
433 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
434 static int flow_depth_first_order_compute PARAMS ((int *, int *));
435 static void flow_dfs_compute_reverse_init
436 PARAMS ((depth_first_search_ds));
437 static void flow_dfs_compute_reverse_add_bb
438 PARAMS ((depth_first_search_ds, basic_block));
439 static basic_block flow_dfs_compute_reverse_execute
440 PARAMS ((depth_first_search_ds));
441 static void flow_dfs_compute_reverse_finish
442 PARAMS ((depth_first_search_ds));
443 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
444 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
445 const sbitmap *));
446 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
447 static void flow_loops_tree_build PARAMS ((struct loops *));
448 static int flow_loop_level_compute PARAMS ((struct loop *, int));
449 static int flow_loops_level_compute PARAMS ((struct loops *));
450 static void allocate_bb_life_data PARAMS ((void));
451 \f
452 /* Find basic blocks of the current function.
453 F is the first insn of the function and NREGS the number of register
454 numbers in use. */
455
456 void
457 find_basic_blocks (f, nregs, file)
458 rtx f;
459 int nregs ATTRIBUTE_UNUSED;
460 FILE *file ATTRIBUTE_UNUSED;
461 {
462 int max_uid;
463
464 /* Flush out existing data. */
465 if (basic_block_info != NULL)
466 {
467 int i;
468
469 clear_edges ();
470
471 /* Clear bb->aux on all extant basic blocks. We'll use this as a
472 tag for reuse during create_basic_block, just in case some pass
473 copies around basic block notes improperly. */
474 for (i = 0; i < n_basic_blocks; ++i)
475 BASIC_BLOCK (i)->aux = NULL;
476
477 VARRAY_FREE (basic_block_info);
478 }
479
480 n_basic_blocks = count_basic_blocks (f);
481
482 /* Size the basic block table. The actual structures will be allocated
483 by find_basic_blocks_1, since we want to keep the structure pointers
484 stable across calls to find_basic_blocks. */
485 /* ??? This whole issue would be much simpler if we called find_basic_blocks
486 exactly once, and thereafter we don't have a single long chain of
487 instructions at all until close to the end of compilation when we
488 actually lay them out. */
489
490 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
491
492 find_basic_blocks_1 (f);
493
494 /* Record the block to which an insn belongs. */
495 /* ??? This should be done another way, by which (perhaps) a label is
496 tagged directly with the basic block that it starts. It is used for
497 more than that currently, but IMO that is the only valid use. */
498
499 max_uid = get_max_uid ();
500 #ifdef AUTO_INC_DEC
501 /* Leave space for insns life_analysis makes in some cases for auto-inc.
502 These cases are rare, so we don't need too much space. */
503 max_uid += max_uid / 10;
504 #endif
505
506 compute_bb_for_insn (max_uid);
507
508 /* Discover the edges of our cfg. */
509 record_active_eh_regions (f);
510 make_edges (label_value_list);
511
512 /* Do very simple cleanup now, for the benefit of code that runs between
513 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
514 tidy_fallthru_edges ();
515
516 mark_critical_edges ();
517
518 #ifdef ENABLE_CHECKING
519 verify_flow_info ();
520 #endif
521 }
522
523 void
524 check_function_return_warnings ()
525 {
526 if (warn_missing_noreturn
527 && !TREE_THIS_VOLATILE (cfun->decl)
528 && EXIT_BLOCK_PTR->pred == NULL)
529 warning ("function might be possible candidate for attribute `noreturn'");
530
531 /* If we have a path to EXIT, then we do return. */
532 if (TREE_THIS_VOLATILE (cfun->decl)
533 && EXIT_BLOCK_PTR->pred != NULL)
534 warning ("`noreturn' function does return");
535
536 /* If the clobber_return_insn appears in some basic block, then we
537 do reach the end without returning a value. */
538 else if (warn_return_type
539 && cfun->x_clobber_return_insn != NULL
540 && EXIT_BLOCK_PTR->pred != NULL)
541 {
542 int max_uid = get_max_uid ();
543
544 /* If clobber_return_insn was excised by jump1, then renumber_insns
545 can make max_uid smaller than the number still recorded in our rtx.
546 That's fine, since this is a quick way of verifying that the insn
547 is no longer in the chain. */
548 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
549 {
550 /* Recompute insn->block mapping, since the initial mapping is
551 set before we delete unreachable blocks. */
552 compute_bb_for_insn (max_uid);
553
554 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
555 warning ("control reaches end of non-void function");
556 }
557 }
558 }
559
560 /* Count the basic blocks of the function. */
561
562 static int
563 count_basic_blocks (f)
564 rtx f;
565 {
566 register rtx insn;
567 register RTX_CODE prev_code;
568 register int count = 0;
569 int eh_region = 0;
570 int call_had_abnormal_edge = 0;
571
572 prev_code = JUMP_INSN;
573 for (insn = f; insn; insn = NEXT_INSN (insn))
574 {
575 register RTX_CODE code = GET_CODE (insn);
576
577 if (code == CODE_LABEL
578 || (GET_RTX_CLASS (code) == 'i'
579 && (prev_code == JUMP_INSN
580 || prev_code == BARRIER
581 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
582 count++;
583
584 /* Record whether this call created an edge. */
585 if (code == CALL_INSN)
586 {
587 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
588 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
589
590 call_had_abnormal_edge = 0;
591
592 /* If there is an EH region or rethrow, we have an edge. */
593 if ((eh_region && region > 0)
594 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
595 call_had_abnormal_edge = 1;
596 else if (nonlocal_goto_handler_labels && region >= 0)
597 /* If there is a nonlocal goto label and the specified
598 region number isn't -1, we have an edge. (0 means
599 no throw, but might have a nonlocal goto). */
600 call_had_abnormal_edge = 1;
601 }
602
603 if (code != NOTE)
604 prev_code = code;
605 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
606 ++eh_region;
607 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
608 --eh_region;
609 }
610
611 /* The rest of the compiler works a bit smoother when we don't have to
612 check for the edge case of do-nothing functions with no basic blocks. */
613 if (count == 0)
614 {
615 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
616 count = 1;
617 }
618
619 return count;
620 }
621
622 /* Scan a list of insns for labels referred to other than by jumps.
623 This is used to scan the alternatives of a call placeholder. */
624 static rtx
625 find_label_refs (f, lvl)
626 rtx f;
627 rtx lvl;
628 {
629 rtx insn;
630
631 for (insn = f; insn; insn = NEXT_INSN (insn))
632 if (INSN_P (insn))
633 {
634 rtx note;
635
636 /* Make a list of all labels referred to other than by jumps
637 (which just don't have the REG_LABEL notes).
638
639 Make a special exception for labels followed by an ADDR*VEC,
640 as this would be a part of the tablejump setup code.
641
642 Make a special exception for the eh_return_stub_label, which
643 we know isn't part of any otherwise visible control flow. */
644
645 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
646 if (REG_NOTE_KIND (note) == REG_LABEL)
647 {
648 rtx lab = XEXP (note, 0), next;
649
650 if (lab == eh_return_stub_label)
651 ;
652 else if ((next = next_nonnote_insn (lab)) != NULL
653 && GET_CODE (next) == JUMP_INSN
654 && (GET_CODE (PATTERN (next)) == ADDR_VEC
655 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
656 ;
657 else if (GET_CODE (lab) == NOTE)
658 ;
659 else
660 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
661 }
662 }
663
664 return lvl;
665 }
666
667 /* Find all basic blocks of the function whose first insn is F.
668
669 Collect and return a list of labels whose addresses are taken. This
670 will be used in make_edges for use with computed gotos. */
671
672 static void
673 find_basic_blocks_1 (f)
674 rtx f;
675 {
676 register rtx insn, next;
677 int i = 0;
678 rtx bb_note = NULL_RTX;
679 rtx eh_list = NULL_RTX;
680 rtx lvl = NULL_RTX;
681 rtx trll = NULL_RTX;
682 rtx head = NULL_RTX;
683 rtx end = NULL_RTX;
684
685 /* We process the instructions in a slightly different way than we did
686 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
687 closed out the previous block, so that it gets attached at the proper
688 place. Since this form should be equivalent to the previous,
689 count_basic_blocks continues to use the old form as a check. */
690
691 for (insn = f; insn; insn = next)
692 {
693 enum rtx_code code = GET_CODE (insn);
694
695 next = NEXT_INSN (insn);
696
697 switch (code)
698 {
699 case NOTE:
700 {
701 int kind = NOTE_LINE_NUMBER (insn);
702
703 /* Keep a LIFO list of the currently active exception notes. */
704 if (kind == NOTE_INSN_EH_REGION_BEG)
705 eh_list = alloc_INSN_LIST (insn, eh_list);
706 else if (kind == NOTE_INSN_EH_REGION_END)
707 {
708 rtx t = eh_list;
709
710 eh_list = XEXP (eh_list, 1);
711 free_INSN_LIST_node (t);
712 }
713
714 /* Look for basic block notes with which to keep the
715 basic_block_info pointers stable. Unthread the note now;
716 we'll put it back at the right place in create_basic_block.
717 Or not at all if we've already found a note in this block. */
718 else if (kind == NOTE_INSN_BASIC_BLOCK)
719 {
720 if (bb_note == NULL_RTX)
721 bb_note = insn;
722 else
723 next = flow_delete_insn (insn);
724 }
725 break;
726 }
727
728 case CODE_LABEL:
729 /* A basic block starts at a label. If we've closed one off due
730 to a barrier or some such, no need to do it again. */
731 if (head != NULL_RTX)
732 {
733 /* While we now have edge lists with which other portions of
734 the compiler might determine a call ending a basic block
735 does not imply an abnormal edge, it will be a bit before
736 everything can be updated. So continue to emit a noop at
737 the end of such a block. */
738 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
739 {
740 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
741 end = emit_insn_after (nop, end);
742 }
743
744 create_basic_block (i++, head, end, bb_note);
745 bb_note = NULL_RTX;
746 }
747
748 head = end = insn;
749 break;
750
751 case JUMP_INSN:
752 /* A basic block ends at a jump. */
753 if (head == NULL_RTX)
754 head = insn;
755 else
756 {
757 /* ??? Make a special check for table jumps. The way this
758 happens is truly and amazingly gross. We are about to
759 create a basic block that contains just a code label and
760 an addr*vec jump insn. Worse, an addr_diff_vec creates
761 its own natural loop.
762
763 Prevent this bit of brain damage, pasting things together
764 correctly in make_edges.
765
766 The correct solution involves emitting the table directly
767 on the tablejump instruction as a note, or JUMP_LABEL. */
768
769 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
770 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
771 {
772 head = end = NULL;
773 n_basic_blocks--;
774 break;
775 }
776 }
777 end = insn;
778 goto new_bb_inclusive;
779
780 case BARRIER:
781 /* A basic block ends at a barrier. It may be that an unconditional
782 jump already closed the basic block -- no need to do it again. */
783 if (head == NULL_RTX)
784 break;
785
786 /* While we now have edge lists with which other portions of the
787 compiler might determine a call ending a basic block does not
788 imply an abnormal edge, it will be a bit before everything can
789 be updated. So continue to emit a noop at the end of such a
790 block. */
791 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
792 {
793 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
794 end = emit_insn_after (nop, end);
795 }
796 goto new_bb_exclusive;
797
798 case CALL_INSN:
799 {
800 /* Record whether this call created an edge. */
801 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
802 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
803 int call_has_abnormal_edge = 0;
804
805 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
806 {
807 /* Scan each of the alternatives for label refs. */
808 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
809 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
810 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
811 /* Record its tail recursion label, if any. */
812 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
813 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
814 }
815
816 /* If there is an EH region or rethrow, we have an edge. */
817 if ((eh_list && region > 0)
818 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
819 call_has_abnormal_edge = 1;
820 else if (nonlocal_goto_handler_labels && region >= 0)
821 /* If there is a nonlocal goto label and the specified
822 region number isn't -1, we have an edge. (0 means
823 no throw, but might have a nonlocal goto). */
824 call_has_abnormal_edge = 1;
825
826 /* A basic block ends at a call that can either throw or
827 do a non-local goto. */
828 if (call_has_abnormal_edge)
829 {
830 new_bb_inclusive:
831 if (head == NULL_RTX)
832 head = insn;
833 end = insn;
834
835 new_bb_exclusive:
836 create_basic_block (i++, head, end, bb_note);
837 head = end = NULL_RTX;
838 bb_note = NULL_RTX;
839 break;
840 }
841 }
842 /* Fall through. */
843
844 default:
845 if (GET_RTX_CLASS (code) == 'i')
846 {
847 if (head == NULL_RTX)
848 head = insn;
849 end = insn;
850 }
851 break;
852 }
853
854 if (GET_RTX_CLASS (code) == 'i')
855 {
856 rtx note;
857
858 /* Make a list of all labels referred to other than by jumps
859 (which just don't have the REG_LABEL notes).
860
861 Make a special exception for labels followed by an ADDR*VEC,
862 as this would be a part of the tablejump setup code.
863
864 Make a special exception for the eh_return_stub_label, which
865 we know isn't part of any otherwise visible control flow. */
866
867 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
868 if (REG_NOTE_KIND (note) == REG_LABEL)
869 {
870 rtx lab = XEXP (note, 0), next;
871
872 if (lab == eh_return_stub_label)
873 ;
874 else if ((next = next_nonnote_insn (lab)) != NULL
875 && GET_CODE (next) == JUMP_INSN
876 && (GET_CODE (PATTERN (next)) == ADDR_VEC
877 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
878 ;
879 else if (GET_CODE (lab) == NOTE)
880 ;
881 else
882 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
883 }
884 }
885 }
886
887 if (head != NULL_RTX)
888 create_basic_block (i++, head, end, bb_note);
889 else if (bb_note)
890 flow_delete_insn (bb_note);
891
892 if (i != n_basic_blocks)
893 abort ();
894
895 label_value_list = lvl;
896 tail_recursion_label_list = trll;
897 }
898
899 /* Tidy the CFG by deleting unreachable code and whatnot. */
900
901 void
902 cleanup_cfg (f)
903 rtx f;
904 {
905 delete_unreachable_blocks ();
906 move_stray_eh_region_notes ();
907 record_active_eh_regions (f);
908 try_merge_blocks ();
909 mark_critical_edges ();
910
911 /* Kill the data we won't maintain. */
912 free_EXPR_LIST_list (&label_value_list);
913 free_EXPR_LIST_list (&tail_recursion_label_list);
914 }
915
916 /* Create a new basic block consisting of the instructions between
917 HEAD and END inclusive. Reuses the note and basic block struct
918 in BB_NOTE, if any. */
919
920 void
921 create_basic_block (index, head, end, bb_note)
922 int index;
923 rtx head, end, bb_note;
924 {
925 basic_block bb;
926
927 if (bb_note
928 && ! RTX_INTEGRATED_P (bb_note)
929 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
930 && bb->aux == NULL)
931 {
932 /* If we found an existing note, thread it back onto the chain. */
933
934 rtx after;
935
936 if (GET_CODE (head) == CODE_LABEL)
937 after = head;
938 else
939 {
940 after = PREV_INSN (head);
941 head = bb_note;
942 }
943
944 if (after != bb_note && NEXT_INSN (after) != bb_note)
945 reorder_insns (bb_note, bb_note, after);
946 }
947 else
948 {
949 /* Otherwise we must create a note and a basic block structure.
950 Since we allow basic block structs in rtl, give the struct
951 the same lifetime by allocating it off the function obstack
952 rather than using malloc. */
953
954 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
955 memset (bb, 0, sizeof (*bb));
956
957 if (GET_CODE (head) == CODE_LABEL)
958 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
959 else
960 {
961 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
962 head = bb_note;
963 }
964 NOTE_BASIC_BLOCK (bb_note) = bb;
965 }
966
967 /* Always include the bb note in the block. */
968 if (NEXT_INSN (end) == bb_note)
969 end = bb_note;
970
971 bb->head = head;
972 bb->end = end;
973 bb->index = index;
974 BASIC_BLOCK (index) = bb;
975
976 /* Tag the block so that we know it has been used when considering
977 other basic block notes. */
978 bb->aux = bb;
979 }
980 \f
981 /* Records the basic block struct in BB_FOR_INSN, for every instruction
982 indexed by INSN_UID. MAX is the size of the array. */
983
984 void
985 compute_bb_for_insn (max)
986 int max;
987 {
988 int i;
989
990 if (basic_block_for_insn)
991 VARRAY_FREE (basic_block_for_insn);
992 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
993
994 for (i = 0; i < n_basic_blocks; ++i)
995 {
996 basic_block bb = BASIC_BLOCK (i);
997 rtx insn, end;
998
999 end = bb->end;
1000 insn = bb->head;
1001 while (1)
1002 {
1003 int uid = INSN_UID (insn);
1004 if (uid < max)
1005 VARRAY_BB (basic_block_for_insn, uid) = bb;
1006 if (insn == end)
1007 break;
1008 insn = NEXT_INSN (insn);
1009 }
1010 }
1011 }
1012
1013 /* Free the memory associated with the edge structures. */
1014
1015 static void
1016 clear_edges ()
1017 {
1018 int i;
1019 edge n, e;
1020
1021 for (i = 0; i < n_basic_blocks; ++i)
1022 {
1023 basic_block bb = BASIC_BLOCK (i);
1024
1025 for (e = bb->succ; e; e = n)
1026 {
1027 n = e->succ_next;
1028 free (e);
1029 }
1030
1031 bb->succ = 0;
1032 bb->pred = 0;
1033 }
1034
1035 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1036 {
1037 n = e->succ_next;
1038 free (e);
1039 }
1040
1041 ENTRY_BLOCK_PTR->succ = 0;
1042 EXIT_BLOCK_PTR->pred = 0;
1043
1044 n_edges = 0;
1045 }
1046
1047 /* Identify the edges between basic blocks.
1048
1049 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1050 that are otherwise unreachable may be reachable with a non-local goto.
1051
1052 BB_EH_END is an array indexed by basic block number in which we record
1053 the list of exception regions active at the end of the basic block. */
1054
1055 static void
1056 make_edges (label_value_list)
1057 rtx label_value_list;
1058 {
1059 int i;
1060 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1061 sbitmap *edge_cache = NULL;
1062
1063 /* Assume no computed jump; revise as we create edges. */
1064 current_function_has_computed_jump = 0;
1065
1066 /* Heavy use of computed goto in machine-generated code can lead to
1067 nearly fully-connected CFGs. In that case we spend a significant
1068 amount of time searching the edge lists for duplicates. */
1069 if (forced_labels || label_value_list)
1070 {
1071 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1072 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1073 }
1074
1075 /* By nature of the way these get numbered, block 0 is always the entry. */
1076 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1077
1078 for (i = 0; i < n_basic_blocks; ++i)
1079 {
1080 basic_block bb = BASIC_BLOCK (i);
1081 rtx insn, x;
1082 enum rtx_code code;
1083 int force_fallthru = 0;
1084
1085 /* Examine the last instruction of the block, and discover the
1086 ways we can leave the block. */
1087
1088 insn = bb->end;
1089 code = GET_CODE (insn);
1090
1091 /* A branch. */
1092 if (code == JUMP_INSN)
1093 {
1094 rtx tmp;
1095
1096 /* ??? Recognize a tablejump and do the right thing. */
1097 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1098 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1099 && GET_CODE (tmp) == JUMP_INSN
1100 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1101 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1102 {
1103 rtvec vec;
1104 int j;
1105
1106 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1107 vec = XVEC (PATTERN (tmp), 0);
1108 else
1109 vec = XVEC (PATTERN (tmp), 1);
1110
1111 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1112 make_label_edge (edge_cache, bb,
1113 XEXP (RTVEC_ELT (vec, j), 0), 0);
1114
1115 /* Some targets (eg, ARM) emit a conditional jump that also
1116 contains the out-of-range target. Scan for these and
1117 add an edge if necessary. */
1118 if ((tmp = single_set (insn)) != NULL
1119 && SET_DEST (tmp) == pc_rtx
1120 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1121 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1122 make_label_edge (edge_cache, bb,
1123 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1124
1125 #ifdef CASE_DROPS_THROUGH
1126 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1127 us naturally detecting fallthru into the next block. */
1128 force_fallthru = 1;
1129 #endif
1130 }
1131
1132 /* If this is a computed jump, then mark it as reaching
1133 everything on the label_value_list and forced_labels list. */
1134 else if (computed_jump_p (insn))
1135 {
1136 current_function_has_computed_jump = 1;
1137
1138 for (x = label_value_list; x; x = XEXP (x, 1))
1139 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1140
1141 for (x = forced_labels; x; x = XEXP (x, 1))
1142 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1143 }
1144
1145 /* Returns create an exit out. */
1146 else if (returnjump_p (insn))
1147 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1148
1149 /* Otherwise, we have a plain conditional or unconditional jump. */
1150 else
1151 {
1152 if (! JUMP_LABEL (insn))
1153 abort ();
1154 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1155 }
1156 }
1157
1158 /* If this is a sibling call insn, then this is in effect a
1159 combined call and return, and so we need an edge to the
1160 exit block. No need to worry about EH edges, since we
1161 wouldn't have created the sibling call in the first place. */
1162
1163 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1164 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1165 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1166
1167 /* If this is a CALL_INSN, then mark it as reaching the active EH
1168 handler for this CALL_INSN. If we're handling asynchronous
1169 exceptions then any insn can reach any of the active handlers.
1170
1171 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1172
1173 else if (code == CALL_INSN || asynchronous_exceptions)
1174 {
1175 /* Add any appropriate EH edges. We do this unconditionally
1176 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1177 on the call, and this needn't be within an EH region. */
1178 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1179
1180 /* If we have asynchronous exceptions, do the same for *all*
1181 exception regions active in the block. */
1182 if (asynchronous_exceptions
1183 && bb->eh_beg != bb->eh_end)
1184 {
1185 if (bb->eh_beg >= 0)
1186 make_eh_edge (edge_cache, eh_nest_info, bb,
1187 NULL_RTX, bb->eh_beg);
1188
1189 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1190 if (GET_CODE (x) == NOTE
1191 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1192 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1193 {
1194 int region = NOTE_EH_HANDLER (x);
1195 make_eh_edge (edge_cache, eh_nest_info, bb,
1196 NULL_RTX, region);
1197 }
1198 }
1199
1200 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1201 {
1202 /* ??? This could be made smarter: in some cases it's possible
1203 to tell that certain calls will not do a nonlocal goto.
1204
1205 For example, if the nested functions that do the nonlocal
1206 gotos do not have their addresses taken, then only calls to
1207 those functions or to other nested functions that use them
1208 could possibly do nonlocal gotos. */
1209 /* We do know that a REG_EH_REGION note with a value less
1210 than 0 is guaranteed not to perform a non-local goto. */
1211 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1212 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1213 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1214 make_label_edge (edge_cache, bb, XEXP (x, 0),
1215 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1216 }
1217 }
1218
1219 /* We know something about the structure of the function __throw in
1220 libgcc2.c. It is the only function that ever contains eh_stub
1221 labels. It modifies its return address so that the last block
1222 returns to one of the eh_stub labels within it. So we have to
1223 make additional edges in the flow graph. */
1224 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1225 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1226
1227 /* Find out if we can drop through to the next block. */
1228 insn = next_nonnote_insn (insn);
1229 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1230 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1231 else if (i + 1 < n_basic_blocks)
1232 {
1233 rtx tmp = BLOCK_HEAD (i + 1);
1234 if (GET_CODE (tmp) == NOTE)
1235 tmp = next_nonnote_insn (tmp);
1236 if (force_fallthru || insn == tmp)
1237 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1238 }
1239 }
1240
1241 free_eh_nesting_info (eh_nest_info);
1242 if (edge_cache)
1243 sbitmap_vector_free (edge_cache);
1244 }
1245
1246 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1247 about the edge that is accumulated between calls. */
1248
1249 void
1250 make_edge (edge_cache, src, dst, flags)
1251 sbitmap *edge_cache;
1252 basic_block src, dst;
1253 int flags;
1254 {
1255 int use_edge_cache;
1256 edge e;
1257
1258 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1259 many edges to them, and we didn't allocate memory for it. */
1260 use_edge_cache = (edge_cache
1261 && src != ENTRY_BLOCK_PTR
1262 && dst != EXIT_BLOCK_PTR);
1263
1264 /* Make sure we don't add duplicate edges. */
1265 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1266 for (e = src->succ; e; e = e->succ_next)
1267 if (e->dest == dst)
1268 {
1269 e->flags |= flags;
1270 return;
1271 }
1272
1273 e = (edge) xcalloc (1, sizeof (*e));
1274 n_edges++;
1275
1276 e->succ_next = src->succ;
1277 e->pred_next = dst->pred;
1278 e->src = src;
1279 e->dest = dst;
1280 e->flags = flags;
1281
1282 src->succ = e;
1283 dst->pred = e;
1284
1285 if (use_edge_cache)
1286 SET_BIT (edge_cache[src->index], dst->index);
1287 }
1288
1289 /* Create an edge from a basic block to a label. */
1290
1291 static void
1292 make_label_edge (edge_cache, src, label, flags)
1293 sbitmap *edge_cache;
1294 basic_block src;
1295 rtx label;
1296 int flags;
1297 {
1298 if (GET_CODE (label) != CODE_LABEL)
1299 abort ();
1300
1301 /* If the label was never emitted, this insn is junk, but avoid a
1302 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1303 as a result of a syntax error and a diagnostic has already been
1304 printed. */
1305
1306 if (INSN_UID (label) == 0)
1307 return;
1308
1309 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1310 }
1311
1312 /* Create the edges generated by INSN in REGION. */
1313
1314 static void
1315 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1316 sbitmap *edge_cache;
1317 eh_nesting_info *eh_nest_info;
1318 basic_block src;
1319 rtx insn;
1320 int region;
1321 {
1322 handler_info **handler_list;
1323 int num, is_call;
1324
1325 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1326 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1327 while (--num >= 0)
1328 {
1329 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1330 EDGE_ABNORMAL | EDGE_EH | is_call);
1331 }
1332 }
1333
1334 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1335 dangerous if we intend to move basic blocks around. Move such notes
1336 into the following block. */
1337
1338 static void
1339 move_stray_eh_region_notes ()
1340 {
1341 int i;
1342 basic_block b1, b2;
1343
1344 if (n_basic_blocks < 2)
1345 return;
1346
1347 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1348 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1349 {
1350 rtx insn, next, list = NULL_RTX;
1351
1352 b1 = BASIC_BLOCK (i);
1353 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1354 {
1355 next = NEXT_INSN (insn);
1356 if (GET_CODE (insn) == NOTE
1357 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1358 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1359 {
1360 /* Unlink from the insn chain. */
1361 NEXT_INSN (PREV_INSN (insn)) = next;
1362 PREV_INSN (next) = PREV_INSN (insn);
1363
1364 /* Queue it. */
1365 NEXT_INSN (insn) = list;
1366 list = insn;
1367 }
1368 }
1369
1370 if (list == NULL_RTX)
1371 continue;
1372
1373 /* Find where to insert these things. */
1374 insn = b2->head;
1375 if (GET_CODE (insn) == CODE_LABEL)
1376 insn = NEXT_INSN (insn);
1377
1378 while (list)
1379 {
1380 next = NEXT_INSN (list);
1381 add_insn_after (list, insn);
1382 list = next;
1383 }
1384 }
1385 }
1386
1387 /* Recompute eh_beg/eh_end for each basic block. */
1388
1389 static void
1390 record_active_eh_regions (f)
1391 rtx f;
1392 {
1393 rtx insn, eh_list = NULL_RTX;
1394 int i = 0;
1395 basic_block bb = BASIC_BLOCK (0);
1396
1397 for (insn = f; insn; insn = NEXT_INSN (insn))
1398 {
1399 if (bb->head == insn)
1400 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1401
1402 if (GET_CODE (insn) == NOTE)
1403 {
1404 int kind = NOTE_LINE_NUMBER (insn);
1405 if (kind == NOTE_INSN_EH_REGION_BEG)
1406 eh_list = alloc_INSN_LIST (insn, eh_list);
1407 else if (kind == NOTE_INSN_EH_REGION_END)
1408 {
1409 rtx t = XEXP (eh_list, 1);
1410 free_INSN_LIST_node (eh_list);
1411 eh_list = t;
1412 }
1413 }
1414
1415 if (bb->end == insn)
1416 {
1417 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1418 i += 1;
1419 if (i == n_basic_blocks)
1420 break;
1421 bb = BASIC_BLOCK (i);
1422 }
1423 }
1424 }
1425
1426 /* Identify critical edges and set the bits appropriately. */
1427
1428 static void
1429 mark_critical_edges ()
1430 {
1431 int i, n = n_basic_blocks;
1432 basic_block bb;
1433
1434 /* We begin with the entry block. This is not terribly important now,
1435 but could be if a front end (Fortran) implemented alternate entry
1436 points. */
1437 bb = ENTRY_BLOCK_PTR;
1438 i = -1;
1439
1440 while (1)
1441 {
1442 edge e;
1443
1444 /* (1) Critical edges must have a source with multiple successors. */
1445 if (bb->succ && bb->succ->succ_next)
1446 {
1447 for (e = bb->succ; e; e = e->succ_next)
1448 {
1449 /* (2) Critical edges must have a destination with multiple
1450 predecessors. Note that we know there is at least one
1451 predecessor -- the edge we followed to get here. */
1452 if (e->dest->pred->pred_next)
1453 e->flags |= EDGE_CRITICAL;
1454 else
1455 e->flags &= ~EDGE_CRITICAL;
1456 }
1457 }
1458 else
1459 {
1460 for (e = bb->succ; e; e = e->succ_next)
1461 e->flags &= ~EDGE_CRITICAL;
1462 }
1463
1464 if (++i >= n)
1465 break;
1466 bb = BASIC_BLOCK (i);
1467 }
1468 }
1469 \f
1470 /* Split a block BB after insn INSN creating a new fallthru edge.
1471 Return the new edge. Note that to keep other parts of the compiler happy,
1472 this function renumbers all the basic blocks so that the new
1473 one has a number one greater than the block split. */
1474
1475 edge
1476 split_block (bb, insn)
1477 basic_block bb;
1478 rtx insn;
1479 {
1480 basic_block new_bb;
1481 edge new_edge;
1482 edge e;
1483 rtx bb_note;
1484 int i, j;
1485
1486 /* There is no point splitting the block after its end. */
1487 if (bb->end == insn)
1488 return 0;
1489
1490 /* Create the new structures. */
1491 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1492 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1493 n_edges++;
1494
1495 memset (new_bb, 0, sizeof (*new_bb));
1496
1497 new_bb->head = NEXT_INSN (insn);
1498 new_bb->end = bb->end;
1499 bb->end = insn;
1500
1501 new_bb->succ = bb->succ;
1502 bb->succ = new_edge;
1503 new_bb->pred = new_edge;
1504 new_bb->count = bb->count;
1505 new_bb->loop_depth = bb->loop_depth;
1506
1507 new_edge->src = bb;
1508 new_edge->dest = new_bb;
1509 new_edge->flags = EDGE_FALLTHRU;
1510 new_edge->probability = REG_BR_PROB_BASE;
1511 new_edge->count = bb->count;
1512
1513 /* Redirect the src of the successor edges of bb to point to new_bb. */
1514 for (e = new_bb->succ; e; e = e->succ_next)
1515 e->src = new_bb;
1516
1517 /* Place the new block just after the block being split. */
1518 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1519
1520 /* Some parts of the compiler expect blocks to be number in
1521 sequential order so insert the new block immediately after the
1522 block being split.. */
1523 j = bb->index;
1524 for (i = n_basic_blocks - 1; i > j + 1; --i)
1525 {
1526 basic_block tmp = BASIC_BLOCK (i - 1);
1527 BASIC_BLOCK (i) = tmp;
1528 tmp->index = i;
1529 }
1530
1531 BASIC_BLOCK (i) = new_bb;
1532 new_bb->index = i;
1533
1534 /* Create the basic block note. */
1535 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1536 new_bb->head);
1537 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1538 new_bb->head = bb_note;
1539
1540 update_bb_for_insn (new_bb);
1541
1542 if (bb->global_live_at_start)
1543 {
1544 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1545 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1546 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1547
1548 /* We now have to calculate which registers are live at the end
1549 of the split basic block and at the start of the new basic
1550 block. Start with those registers that are known to be live
1551 at the end of the original basic block and get
1552 propagate_block to determine which registers are live. */
1553 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1554 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1555 COPY_REG_SET (bb->global_live_at_end,
1556 new_bb->global_live_at_start);
1557 }
1558
1559 return new_edge;
1560 }
1561
1562
1563 /* Split a (typically critical) edge. Return the new block.
1564 Abort on abnormal edges.
1565
1566 ??? The code generally expects to be called on critical edges.
1567 The case of a block ending in an unconditional jump to a
1568 block with multiple predecessors is not handled optimally. */
1569
1570 basic_block
1571 split_edge (edge_in)
1572 edge edge_in;
1573 {
1574 basic_block old_pred, bb, old_succ;
1575 edge edge_out;
1576 rtx bb_note;
1577 int i, j;
1578
1579 /* Abnormal edges cannot be split. */
1580 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1581 abort ();
1582
1583 old_pred = edge_in->src;
1584 old_succ = edge_in->dest;
1585
1586 /* Remove the existing edge from the destination's pred list. */
1587 {
1588 edge *pp;
1589 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1590 continue;
1591 *pp = edge_in->pred_next;
1592 edge_in->pred_next = NULL;
1593 }
1594
1595 /* Create the new structures. */
1596 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1597 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1598 n_edges++;
1599
1600 memset (bb, 0, sizeof (*bb));
1601
1602 /* ??? This info is likely going to be out of date very soon. */
1603 if (old_succ->global_live_at_start)
1604 {
1605 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1606 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1607 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1608 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1609 }
1610
1611 /* Wire them up. */
1612 bb->pred = edge_in;
1613 bb->succ = edge_out;
1614 bb->count = edge_in->count;
1615
1616 edge_in->dest = bb;
1617 edge_in->flags &= ~EDGE_CRITICAL;
1618
1619 edge_out->pred_next = old_succ->pred;
1620 edge_out->succ_next = NULL;
1621 edge_out->src = bb;
1622 edge_out->dest = old_succ;
1623 edge_out->flags = EDGE_FALLTHRU;
1624 edge_out->probability = REG_BR_PROB_BASE;
1625 edge_out->count = edge_in->count;
1626
1627 old_succ->pred = edge_out;
1628
1629 /* Tricky case -- if there existed a fallthru into the successor
1630 (and we're not it) we must add a new unconditional jump around
1631 the new block we're actually interested in.
1632
1633 Further, if that edge is critical, this means a second new basic
1634 block must be created to hold it. In order to simplify correct
1635 insn placement, do this before we touch the existing basic block
1636 ordering for the block we were really wanting. */
1637 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1638 {
1639 edge e;
1640 for (e = edge_out->pred_next; e; e = e->pred_next)
1641 if (e->flags & EDGE_FALLTHRU)
1642 break;
1643
1644 if (e)
1645 {
1646 basic_block jump_block;
1647 rtx pos;
1648
1649 if ((e->flags & EDGE_CRITICAL) == 0
1650 && e->src != ENTRY_BLOCK_PTR)
1651 {
1652 /* Non critical -- we can simply add a jump to the end
1653 of the existing predecessor. */
1654 jump_block = e->src;
1655 }
1656 else
1657 {
1658 /* We need a new block to hold the jump. The simplest
1659 way to do the bulk of the work here is to recursively
1660 call ourselves. */
1661 jump_block = split_edge (e);
1662 e = jump_block->succ;
1663 }
1664
1665 /* Now add the jump insn ... */
1666 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1667 jump_block->end);
1668 jump_block->end = pos;
1669 if (basic_block_for_insn)
1670 set_block_for_insn (pos, jump_block);
1671 emit_barrier_after (pos);
1672
1673 /* ... let jump know that label is in use, ... */
1674 JUMP_LABEL (pos) = old_succ->head;
1675 ++LABEL_NUSES (old_succ->head);
1676
1677 /* ... and clear fallthru on the outgoing edge. */
1678 e->flags &= ~EDGE_FALLTHRU;
1679
1680 /* Continue splitting the interesting edge. */
1681 }
1682 }
1683
1684 /* Place the new block just in front of the successor. */
1685 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1686 if (old_succ == EXIT_BLOCK_PTR)
1687 j = n_basic_blocks - 1;
1688 else
1689 j = old_succ->index;
1690 for (i = n_basic_blocks - 1; i > j; --i)
1691 {
1692 basic_block tmp = BASIC_BLOCK (i - 1);
1693 BASIC_BLOCK (i) = tmp;
1694 tmp->index = i;
1695 }
1696 BASIC_BLOCK (i) = bb;
1697 bb->index = i;
1698
1699 /* Create the basic block note.
1700
1701 Where we place the note can have a noticable impact on the generated
1702 code. Consider this cfg:
1703
1704 E
1705 |
1706 0
1707 / \
1708 +->1-->2--->E
1709 | |
1710 +--+
1711
1712 If we need to insert an insn on the edge from block 0 to block 1,
1713 we want to ensure the instructions we insert are outside of any
1714 loop notes that physically sit between block 0 and block 1. Otherwise
1715 we confuse the loop optimizer into thinking the loop is a phony. */
1716 if (old_succ != EXIT_BLOCK_PTR
1717 && PREV_INSN (old_succ->head)
1718 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1719 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1720 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1721 PREV_INSN (old_succ->head));
1722 else if (old_succ != EXIT_BLOCK_PTR)
1723 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1724 else
1725 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1726 NOTE_BASIC_BLOCK (bb_note) = bb;
1727 bb->head = bb->end = bb_note;
1728
1729 /* Not quite simple -- for non-fallthru edges, we must adjust the
1730 predecessor's jump instruction to target our new block. */
1731 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1732 {
1733 rtx tmp, insn = old_pred->end;
1734 rtx old_label = old_succ->head;
1735 rtx new_label = gen_label_rtx ();
1736
1737 if (GET_CODE (insn) != JUMP_INSN)
1738 abort ();
1739
1740 /* ??? Recognize a tablejump and adjust all matching cases. */
1741 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1742 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1743 && GET_CODE (tmp) == JUMP_INSN
1744 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1745 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1746 {
1747 rtvec vec;
1748 int j;
1749
1750 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1751 vec = XVEC (PATTERN (tmp), 0);
1752 else
1753 vec = XVEC (PATTERN (tmp), 1);
1754
1755 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1756 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1757 {
1758 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1759 --LABEL_NUSES (old_label);
1760 ++LABEL_NUSES (new_label);
1761 }
1762
1763 /* Handle casesi dispatch insns */
1764 if ((tmp = single_set (insn)) != NULL
1765 && SET_DEST (tmp) == pc_rtx
1766 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1767 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1768 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1769 {
1770 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1771 new_label);
1772 --LABEL_NUSES (old_label);
1773 ++LABEL_NUSES (new_label);
1774 }
1775 }
1776 else
1777 {
1778 /* This would have indicated an abnormal edge. */
1779 if (computed_jump_p (insn))
1780 abort ();
1781
1782 /* A return instruction can't be redirected. */
1783 if (returnjump_p (insn))
1784 abort ();
1785
1786 /* If the insn doesn't go where we think, we're confused. */
1787 if (JUMP_LABEL (insn) != old_label)
1788 abort ();
1789
1790 redirect_jump (insn, new_label, 0);
1791 }
1792
1793 emit_label_before (new_label, bb_note);
1794 bb->head = new_label;
1795 }
1796
1797 return bb;
1798 }
1799
1800 /* Queue instructions for insertion on an edge between two basic blocks.
1801 The new instructions and basic blocks (if any) will not appear in the
1802 CFG until commit_edge_insertions is called. */
1803
1804 void
1805 insert_insn_on_edge (pattern, e)
1806 rtx pattern;
1807 edge e;
1808 {
1809 /* We cannot insert instructions on an abnormal critical edge.
1810 It will be easier to find the culprit if we die now. */
1811 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1812 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1813 abort ();
1814
1815 if (e->insns == NULL_RTX)
1816 start_sequence ();
1817 else
1818 push_to_sequence (e->insns);
1819
1820 emit_insn (pattern);
1821
1822 e->insns = get_insns ();
1823 end_sequence ();
1824 }
1825
1826 /* Update the CFG for the instructions queued on edge E. */
1827
1828 static void
1829 commit_one_edge_insertion (e)
1830 edge e;
1831 {
1832 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1833 basic_block bb;
1834
1835 /* Pull the insns off the edge now since the edge might go away. */
1836 insns = e->insns;
1837 e->insns = NULL_RTX;
1838
1839 /* Figure out where to put these things. If the destination has
1840 one predecessor, insert there. Except for the exit block. */
1841 if (e->dest->pred->pred_next == NULL
1842 && e->dest != EXIT_BLOCK_PTR)
1843 {
1844 bb = e->dest;
1845
1846 /* Get the location correct wrt a code label, and "nice" wrt
1847 a basic block note, and before everything else. */
1848 tmp = bb->head;
1849 if (GET_CODE (tmp) == CODE_LABEL)
1850 tmp = NEXT_INSN (tmp);
1851 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1852 tmp = NEXT_INSN (tmp);
1853 if (tmp == bb->head)
1854 before = tmp;
1855 else
1856 after = PREV_INSN (tmp);
1857 }
1858
1859 /* If the source has one successor and the edge is not abnormal,
1860 insert there. Except for the entry block. */
1861 else if ((e->flags & EDGE_ABNORMAL) == 0
1862 && e->src->succ->succ_next == NULL
1863 && e->src != ENTRY_BLOCK_PTR)
1864 {
1865 bb = e->src;
1866 /* It is possible to have a non-simple jump here. Consider a target
1867 where some forms of unconditional jumps clobber a register. This
1868 happens on the fr30 for example.
1869
1870 We know this block has a single successor, so we can just emit
1871 the queued insns before the jump. */
1872 if (GET_CODE (bb->end) == JUMP_INSN)
1873 {
1874 before = bb->end;
1875 }
1876 else
1877 {
1878 /* We'd better be fallthru, or we've lost track of what's what. */
1879 if ((e->flags & EDGE_FALLTHRU) == 0)
1880 abort ();
1881
1882 after = bb->end;
1883 }
1884 }
1885
1886 /* Otherwise we must split the edge. */
1887 else
1888 {
1889 bb = split_edge (e);
1890 after = bb->end;
1891 }
1892
1893 /* Now that we've found the spot, do the insertion. */
1894
1895 /* Set the new block number for these insns, if structure is allocated. */
1896 if (basic_block_for_insn)
1897 {
1898 rtx i;
1899 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1900 set_block_for_insn (i, bb);
1901 }
1902
1903 if (before)
1904 {
1905 emit_insns_before (insns, before);
1906 if (before == bb->head)
1907 bb->head = insns;
1908
1909 last = prev_nonnote_insn (before);
1910 }
1911 else
1912 {
1913 last = emit_insns_after (insns, after);
1914 if (after == bb->end)
1915 bb->end = last;
1916 }
1917
1918 if (returnjump_p (last))
1919 {
1920 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1921 This is not currently a problem because this only happens
1922 for the (single) epilogue, which already has a fallthru edge
1923 to EXIT. */
1924
1925 e = bb->succ;
1926 if (e->dest != EXIT_BLOCK_PTR
1927 || e->succ_next != NULL
1928 || (e->flags & EDGE_FALLTHRU) == 0)
1929 abort ();
1930 e->flags &= ~EDGE_FALLTHRU;
1931
1932 emit_barrier_after (last);
1933 bb->end = last;
1934
1935 if (before)
1936 flow_delete_insn (before);
1937 }
1938 else if (GET_CODE (last) == JUMP_INSN)
1939 abort ();
1940 }
1941
1942 /* Update the CFG for all queued instructions. */
1943
1944 void
1945 commit_edge_insertions ()
1946 {
1947 int i;
1948 basic_block bb;
1949
1950 #ifdef ENABLE_CHECKING
1951 verify_flow_info ();
1952 #endif
1953
1954 i = -1;
1955 bb = ENTRY_BLOCK_PTR;
1956 while (1)
1957 {
1958 edge e, next;
1959
1960 for (e = bb->succ; e; e = next)
1961 {
1962 next = e->succ_next;
1963 if (e->insns)
1964 commit_one_edge_insertion (e);
1965 }
1966
1967 if (++i >= n_basic_blocks)
1968 break;
1969 bb = BASIC_BLOCK (i);
1970 }
1971 }
1972 \f
1973 /* Delete all unreachable basic blocks. */
1974
1975 static void
1976 delete_unreachable_blocks ()
1977 {
1978 basic_block *worklist, *tos;
1979 int deleted_handler;
1980 edge e;
1981 int i, n;
1982
1983 n = n_basic_blocks;
1984 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1985
1986 /* Use basic_block->aux as a marker. Clear them all. */
1987
1988 for (i = 0; i < n; ++i)
1989 BASIC_BLOCK (i)->aux = NULL;
1990
1991 /* Add our starting points to the worklist. Almost always there will
1992 be only one. It isn't inconcievable that we might one day directly
1993 support Fortran alternate entry points. */
1994
1995 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1996 {
1997 *tos++ = e->dest;
1998
1999 /* Mark the block with a handy non-null value. */
2000 e->dest->aux = e;
2001 }
2002
2003 /* Iterate: find everything reachable from what we've already seen. */
2004
2005 while (tos != worklist)
2006 {
2007 basic_block b = *--tos;
2008
2009 for (e = b->succ; e; e = e->succ_next)
2010 if (!e->dest->aux)
2011 {
2012 *tos++ = e->dest;
2013 e->dest->aux = e;
2014 }
2015 }
2016
2017 /* Delete all unreachable basic blocks. Count down so that we don't
2018 interfere with the block renumbering that happens in flow_delete_block. */
2019
2020 deleted_handler = 0;
2021
2022 for (i = n - 1; i >= 0; --i)
2023 {
2024 basic_block b = BASIC_BLOCK (i);
2025
2026 if (b->aux != NULL)
2027 /* This block was found. Tidy up the mark. */
2028 b->aux = NULL;
2029 else
2030 deleted_handler |= flow_delete_block (b);
2031 }
2032
2033 tidy_fallthru_edges ();
2034
2035 /* If we deleted an exception handler, we may have EH region begin/end
2036 blocks to remove as well. */
2037 if (deleted_handler)
2038 delete_eh_regions ();
2039
2040 free (worklist);
2041 }
2042
2043 /* Find EH regions for which there is no longer a handler, and delete them. */
2044
2045 static void
2046 delete_eh_regions ()
2047 {
2048 rtx insn;
2049
2050 update_rethrow_references ();
2051
2052 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2053 if (GET_CODE (insn) == NOTE)
2054 {
2055 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2056 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2057 {
2058 int num = NOTE_EH_HANDLER (insn);
2059 /* A NULL handler indicates a region is no longer needed,
2060 as long as its rethrow label isn't used. */
2061 if (get_first_handler (num) == NULL && ! rethrow_used (num))
2062 {
2063 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2064 NOTE_SOURCE_FILE (insn) = 0;
2065 }
2066 }
2067 }
2068 }
2069
2070 /* Return true if NOTE is not one of the ones that must be kept paired,
2071 so that we may simply delete them. */
2072
2073 static int
2074 can_delete_note_p (note)
2075 rtx note;
2076 {
2077 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2078 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2079 }
2080
2081 /* Unlink a chain of insns between START and FINISH, leaving notes
2082 that must be paired. */
2083
2084 void
2085 flow_delete_insn_chain (start, finish)
2086 rtx start, finish;
2087 {
2088 /* Unchain the insns one by one. It would be quicker to delete all
2089 of these with a single unchaining, rather than one at a time, but
2090 we need to keep the NOTE's. */
2091
2092 rtx next;
2093
2094 while (1)
2095 {
2096 next = NEXT_INSN (start);
2097 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2098 ;
2099 else if (GET_CODE (start) == CODE_LABEL
2100 && ! can_delete_label_p (start))
2101 {
2102 const char *name = LABEL_NAME (start);
2103 PUT_CODE (start, NOTE);
2104 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2105 NOTE_SOURCE_FILE (start) = name;
2106 }
2107 else
2108 next = flow_delete_insn (start);
2109
2110 if (start == finish)
2111 break;
2112 start = next;
2113 }
2114 }
2115
2116 /* Delete the insns in a (non-live) block. We physically delete every
2117 non-deleted-note insn, and update the flow graph appropriately.
2118
2119 Return nonzero if we deleted an exception handler. */
2120
2121 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2122 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2123
2124 int
2125 flow_delete_block (b)
2126 basic_block b;
2127 {
2128 int deleted_handler = 0;
2129 rtx insn, end, tmp;
2130
2131 /* If the head of this block is a CODE_LABEL, then it might be the
2132 label for an exception handler which can't be reached.
2133
2134 We need to remove the label from the exception_handler_label list
2135 and remove the associated NOTE_INSN_EH_REGION_BEG and
2136 NOTE_INSN_EH_REGION_END notes. */
2137
2138 insn = b->head;
2139
2140 never_reached_warning (insn);
2141
2142 if (GET_CODE (insn) == CODE_LABEL)
2143 {
2144 rtx x, *prev = &exception_handler_labels;
2145
2146 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2147 {
2148 if (XEXP (x, 0) == insn)
2149 {
2150 /* Found a match, splice this label out of the EH label list. */
2151 *prev = XEXP (x, 1);
2152 XEXP (x, 1) = NULL_RTX;
2153 XEXP (x, 0) = NULL_RTX;
2154
2155 /* Remove the handler from all regions */
2156 remove_handler (insn);
2157 deleted_handler = 1;
2158 break;
2159 }
2160 prev = &XEXP (x, 1);
2161 }
2162 }
2163
2164 /* Include any jump table following the basic block. */
2165 end = b->end;
2166 if (GET_CODE (end) == JUMP_INSN
2167 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2168 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2169 && GET_CODE (tmp) == JUMP_INSN
2170 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2171 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2172 end = tmp;
2173
2174 /* Include any barrier that may follow the basic block. */
2175 tmp = next_nonnote_insn (end);
2176 if (tmp && GET_CODE (tmp) == BARRIER)
2177 end = tmp;
2178
2179 /* Selectively delete the entire chain. */
2180 flow_delete_insn_chain (insn, end);
2181
2182 /* Remove the edges into and out of this block. Note that there may
2183 indeed be edges in, if we are removing an unreachable loop. */
2184 {
2185 edge e, next, *q;
2186
2187 for (e = b->pred; e; e = next)
2188 {
2189 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2190 continue;
2191 *q = e->succ_next;
2192 next = e->pred_next;
2193 n_edges--;
2194 free (e);
2195 }
2196 for (e = b->succ; e; e = next)
2197 {
2198 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2199 continue;
2200 *q = e->pred_next;
2201 next = e->succ_next;
2202 n_edges--;
2203 free (e);
2204 }
2205
2206 b->pred = NULL;
2207 b->succ = NULL;
2208 }
2209
2210 /* Remove the basic block from the array, and compact behind it. */
2211 expunge_block (b);
2212
2213 return deleted_handler;
2214 }
2215
2216 /* Remove block B from the basic block array and compact behind it. */
2217
2218 static void
2219 expunge_block (b)
2220 basic_block b;
2221 {
2222 int i, n = n_basic_blocks;
2223
2224 for (i = b->index; i + 1 < n; ++i)
2225 {
2226 basic_block x = BASIC_BLOCK (i + 1);
2227 BASIC_BLOCK (i) = x;
2228 x->index = i;
2229 }
2230
2231 basic_block_info->num_elements--;
2232 n_basic_blocks--;
2233 }
2234
2235 /* Delete INSN by patching it out. Return the next insn. */
2236
2237 rtx
2238 flow_delete_insn (insn)
2239 rtx insn;
2240 {
2241 rtx prev = PREV_INSN (insn);
2242 rtx next = NEXT_INSN (insn);
2243 rtx note;
2244
2245 PREV_INSN (insn) = NULL_RTX;
2246 NEXT_INSN (insn) = NULL_RTX;
2247 INSN_DELETED_P (insn) = 1;
2248
2249 if (prev)
2250 NEXT_INSN (prev) = next;
2251 if (next)
2252 PREV_INSN (next) = prev;
2253 else
2254 set_last_insn (prev);
2255
2256 if (GET_CODE (insn) == CODE_LABEL)
2257 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2258
2259 /* If deleting a jump, decrement the use count of the label. Deleting
2260 the label itself should happen in the normal course of block merging. */
2261 if (GET_CODE (insn) == JUMP_INSN
2262 && JUMP_LABEL (insn)
2263 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2264 LABEL_NUSES (JUMP_LABEL (insn))--;
2265
2266 /* Also if deleting an insn that references a label. */
2267 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2268 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2269 LABEL_NUSES (XEXP (note, 0))--;
2270
2271 return next;
2272 }
2273
2274 /* True if a given label can be deleted. */
2275
2276 static int
2277 can_delete_label_p (label)
2278 rtx label;
2279 {
2280 rtx x;
2281
2282 if (LABEL_PRESERVE_P (label))
2283 return 0;
2284
2285 for (x = forced_labels; x; x = XEXP (x, 1))
2286 if (label == XEXP (x, 0))
2287 return 0;
2288 for (x = label_value_list; x; x = XEXP (x, 1))
2289 if (label == XEXP (x, 0))
2290 return 0;
2291 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2292 if (label == XEXP (x, 0))
2293 return 0;
2294
2295 /* User declared labels must be preserved. */
2296 if (LABEL_NAME (label) != 0)
2297 return 0;
2298
2299 return 1;
2300 }
2301
2302 static int
2303 tail_recursion_label_p (label)
2304 rtx label;
2305 {
2306 rtx x;
2307
2308 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2309 if (label == XEXP (x, 0))
2310 return 1;
2311
2312 return 0;
2313 }
2314
2315 /* Blocks A and B are to be merged into a single block A. The insns
2316 are already contiguous, hence `nomove'. */
2317
2318 void
2319 merge_blocks_nomove (a, b)
2320 basic_block a, b;
2321 {
2322 edge e;
2323 rtx b_head, b_end, a_end;
2324 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2325 int b_empty = 0;
2326
2327 /* If there was a CODE_LABEL beginning B, delete it. */
2328 b_head = b->head;
2329 b_end = b->end;
2330 if (GET_CODE (b_head) == CODE_LABEL)
2331 {
2332 /* Detect basic blocks with nothing but a label. This can happen
2333 in particular at the end of a function. */
2334 if (b_head == b_end)
2335 b_empty = 1;
2336 del_first = del_last = b_head;
2337 b_head = NEXT_INSN (b_head);
2338 }
2339
2340 /* Delete the basic block note. */
2341 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2342 {
2343 if (b_head == b_end)
2344 b_empty = 1;
2345 if (! del_last)
2346 del_first = b_head;
2347 del_last = b_head;
2348 b_head = NEXT_INSN (b_head);
2349 }
2350
2351 /* If there was a jump out of A, delete it. */
2352 a_end = a->end;
2353 if (GET_CODE (a_end) == JUMP_INSN)
2354 {
2355 rtx prev;
2356
2357 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2358 if (GET_CODE (prev) != NOTE
2359 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2360 || prev == a->head)
2361 break;
2362
2363 del_first = a_end;
2364
2365 #ifdef HAVE_cc0
2366 /* If this was a conditional jump, we need to also delete
2367 the insn that set cc0. */
2368 if (prev && sets_cc0_p (prev))
2369 {
2370 rtx tmp = prev;
2371 prev = prev_nonnote_insn (prev);
2372 if (!prev)
2373 prev = a->head;
2374 del_first = tmp;
2375 }
2376 #endif
2377
2378 a_end = prev;
2379 }
2380 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2381 del_first = NEXT_INSN (a_end);
2382
2383 /* Delete everything marked above as well as crap that might be
2384 hanging out between the two blocks. */
2385 flow_delete_insn_chain (del_first, del_last);
2386
2387 /* Normally there should only be one successor of A and that is B, but
2388 partway though the merge of blocks for conditional_execution we'll
2389 be merging a TEST block with THEN and ELSE successors. Free the
2390 whole lot of them and hope the caller knows what they're doing. */
2391 while (a->succ)
2392 remove_edge (a->succ);
2393
2394 /* Adjust the edges out of B for the new owner. */
2395 for (e = b->succ; e; e = e->succ_next)
2396 e->src = a;
2397 a->succ = b->succ;
2398
2399 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2400 b->pred = b->succ = NULL;
2401
2402 /* Reassociate the insns of B with A. */
2403 if (!b_empty)
2404 {
2405 if (basic_block_for_insn)
2406 {
2407 BLOCK_FOR_INSN (b_head) = a;
2408 while (b_head != b_end)
2409 {
2410 b_head = NEXT_INSN (b_head);
2411 BLOCK_FOR_INSN (b_head) = a;
2412 }
2413 }
2414 a_end = b_end;
2415 }
2416 a->end = a_end;
2417
2418 expunge_block (b);
2419 }
2420
2421 /* Blocks A and B are to be merged into a single block. A has no incoming
2422 fallthru edge, so it can be moved before B without adding or modifying
2423 any jumps (aside from the jump from A to B). */
2424
2425 static int
2426 merge_blocks_move_predecessor_nojumps (a, b)
2427 basic_block a, b;
2428 {
2429 rtx start, end, barrier;
2430 int index;
2431
2432 start = a->head;
2433 end = a->end;
2434
2435 barrier = next_nonnote_insn (end);
2436 if (GET_CODE (barrier) != BARRIER)
2437 abort ();
2438 flow_delete_insn (barrier);
2439
2440 /* Move block and loop notes out of the chain so that we do not
2441 disturb their order.
2442
2443 ??? A better solution would be to squeeze out all the non-nested notes
2444 and adjust the block trees appropriately. Even better would be to have
2445 a tighter connection between block trees and rtl so that this is not
2446 necessary. */
2447 start = squeeze_notes (start, end);
2448
2449 /* Scramble the insn chain. */
2450 if (end != PREV_INSN (b->head))
2451 reorder_insns (start, end, PREV_INSN (b->head));
2452
2453 if (rtl_dump_file)
2454 {
2455 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2456 a->index, b->index);
2457 }
2458
2459 /* Swap the records for the two blocks around. Although we are deleting B,
2460 A is now where B was and we want to compact the BB array from where
2461 A used to be. */
2462 BASIC_BLOCK (a->index) = b;
2463 BASIC_BLOCK (b->index) = a;
2464 index = a->index;
2465 a->index = b->index;
2466 b->index = index;
2467
2468 /* Now blocks A and B are contiguous. Merge them. */
2469 merge_blocks_nomove (a, b);
2470
2471 return 1;
2472 }
2473
2474 /* Blocks A and B are to be merged into a single block. B has no outgoing
2475 fallthru edge, so it can be moved after A without adding or modifying
2476 any jumps (aside from the jump from A to B). */
2477
2478 static int
2479 merge_blocks_move_successor_nojumps (a, b)
2480 basic_block a, b;
2481 {
2482 rtx start, end, barrier;
2483
2484 start = b->head;
2485 end = b->end;
2486 barrier = NEXT_INSN (end);
2487
2488 /* Recognize a jump table following block B. */
2489 if (GET_CODE (barrier) == CODE_LABEL
2490 && NEXT_INSN (barrier)
2491 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2492 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2493 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2494 {
2495 end = NEXT_INSN (barrier);
2496 barrier = NEXT_INSN (end);
2497 }
2498
2499 /* There had better have been a barrier there. Delete it. */
2500 if (GET_CODE (barrier) != BARRIER)
2501 abort ();
2502 flow_delete_insn (barrier);
2503
2504 /* Move block and loop notes out of the chain so that we do not
2505 disturb their order.
2506
2507 ??? A better solution would be to squeeze out all the non-nested notes
2508 and adjust the block trees appropriately. Even better would be to have
2509 a tighter connection between block trees and rtl so that this is not
2510 necessary. */
2511 start = squeeze_notes (start, end);
2512
2513 /* Scramble the insn chain. */
2514 reorder_insns (start, end, a->end);
2515
2516 /* Now blocks A and B are contiguous. Merge them. */
2517 merge_blocks_nomove (a, b);
2518
2519 if (rtl_dump_file)
2520 {
2521 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2522 b->index, a->index);
2523 }
2524
2525 return 1;
2526 }
2527
2528 /* Attempt to merge basic blocks that are potentially non-adjacent.
2529 Return true iff the attempt succeeded. */
2530
2531 static int
2532 merge_blocks (e, b, c)
2533 edge e;
2534 basic_block b, c;
2535 {
2536 /* If C has a tail recursion label, do not merge. There is no
2537 edge recorded from the call_placeholder back to this label, as
2538 that would make optimize_sibling_and_tail_recursive_calls more
2539 complex for no gain. */
2540 if (GET_CODE (c->head) == CODE_LABEL
2541 && tail_recursion_label_p (c->head))
2542 return 0;
2543
2544 /* If B has a fallthru edge to C, no need to move anything. */
2545 if (e->flags & EDGE_FALLTHRU)
2546 {
2547 merge_blocks_nomove (b, c);
2548
2549 if (rtl_dump_file)
2550 {
2551 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2552 b->index, c->index);
2553 }
2554
2555 return 1;
2556 }
2557 else
2558 {
2559 edge tmp_edge;
2560 basic_block d;
2561 int c_has_outgoing_fallthru;
2562 int b_has_incoming_fallthru;
2563
2564 /* We must make sure to not munge nesting of exception regions,
2565 lexical blocks, and loop notes.
2566
2567 The first is taken care of by requiring that the active eh
2568 region at the end of one block always matches the active eh
2569 region at the beginning of the next block.
2570
2571 The later two are taken care of by squeezing out all the notes. */
2572
2573 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2574 executed and we may want to treat blocks which have two out
2575 edges, one normal, one abnormal as only having one edge for
2576 block merging purposes. */
2577
2578 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2579 if (tmp_edge->flags & EDGE_FALLTHRU)
2580 break;
2581 c_has_outgoing_fallthru = (tmp_edge != NULL);
2582
2583 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2584 if (tmp_edge->flags & EDGE_FALLTHRU)
2585 break;
2586 b_has_incoming_fallthru = (tmp_edge != NULL);
2587
2588 /* If B does not have an incoming fallthru, and the exception regions
2589 match, then it can be moved immediately before C without introducing
2590 or modifying jumps.
2591
2592 C can not be the first block, so we do not have to worry about
2593 accessing a non-existent block. */
2594 d = BASIC_BLOCK (c->index - 1);
2595 if (! b_has_incoming_fallthru
2596 && d->eh_end == b->eh_beg
2597 && b->eh_end == c->eh_beg)
2598 return merge_blocks_move_predecessor_nojumps (b, c);
2599
2600 /* Otherwise, we're going to try to move C after B. Make sure the
2601 exception regions match.
2602
2603 If B is the last basic block, then we must not try to access the
2604 block structure for block B + 1. Luckily in that case we do not
2605 need to worry about matching exception regions. */
2606 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2607 if (b->eh_end == c->eh_beg
2608 && (d == NULL || c->eh_end == d->eh_beg))
2609 {
2610 /* If C does not have an outgoing fallthru, then it can be moved
2611 immediately after B without introducing or modifying jumps. */
2612 if (! c_has_outgoing_fallthru)
2613 return merge_blocks_move_successor_nojumps (b, c);
2614
2615 /* Otherwise, we'll need to insert an extra jump, and possibly
2616 a new block to contain it. */
2617 /* ??? Not implemented yet. */
2618 }
2619
2620 return 0;
2621 }
2622 }
2623
2624 /* Top level driver for merge_blocks. */
2625
2626 static void
2627 try_merge_blocks ()
2628 {
2629 int i;
2630
2631 /* Attempt to merge blocks as made possible by edge removal. If a block
2632 has only one successor, and the successor has only one predecessor,
2633 they may be combined. */
2634
2635 for (i = 0; i < n_basic_blocks;)
2636 {
2637 basic_block c, b = BASIC_BLOCK (i);
2638 edge s;
2639
2640 /* A loop because chains of blocks might be combineable. */
2641 while ((s = b->succ) != NULL
2642 && s->succ_next == NULL
2643 && (s->flags & EDGE_EH) == 0
2644 && (c = s->dest) != EXIT_BLOCK_PTR
2645 && c->pred->pred_next == NULL
2646 /* If the jump insn has side effects, we can't kill the edge. */
2647 && (GET_CODE (b->end) != JUMP_INSN
2648 || onlyjump_p (b->end))
2649 && merge_blocks (s, b, c))
2650 continue;
2651
2652 /* Don't get confused by the index shift caused by deleting blocks. */
2653 i = b->index + 1;
2654 }
2655 }
2656
2657 /* The given edge should potentially be a fallthru edge. If that is in
2658 fact true, delete the jump and barriers that are in the way. */
2659
2660 void
2661 tidy_fallthru_edge (e, b, c)
2662 edge e;
2663 basic_block b, c;
2664 {
2665 rtx q;
2666
2667 /* ??? In a late-running flow pass, other folks may have deleted basic
2668 blocks by nopping out blocks, leaving multiple BARRIERs between here
2669 and the target label. They ought to be chastized and fixed.
2670
2671 We can also wind up with a sequence of undeletable labels between
2672 one block and the next.
2673
2674 So search through a sequence of barriers, labels, and notes for
2675 the head of block C and assert that we really do fall through. */
2676
2677 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2678 return;
2679
2680 /* Remove what will soon cease being the jump insn from the source block.
2681 If block B consisted only of this single jump, turn it into a deleted
2682 note. */
2683 q = b->end;
2684 if (GET_CODE (q) == JUMP_INSN
2685 && onlyjump_p (q)
2686 && (any_uncondjump_p (q)
2687 || (b->succ == e && e->succ_next == NULL)))
2688 {
2689 #ifdef HAVE_cc0
2690 /* If this was a conditional jump, we need to also delete
2691 the insn that set cc0. */
2692 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2693 q = PREV_INSN (q);
2694 #endif
2695
2696 if (b->head == q)
2697 {
2698 PUT_CODE (q, NOTE);
2699 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2700 NOTE_SOURCE_FILE (q) = 0;
2701 }
2702 else
2703 q = PREV_INSN (q);
2704
2705 b->end = q;
2706 }
2707
2708 /* Selectively unlink the sequence. */
2709 if (q != PREV_INSN (c->head))
2710 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2711
2712 e->flags |= EDGE_FALLTHRU;
2713 }
2714
2715 /* Fix up edges that now fall through, or rather should now fall through
2716 but previously required a jump around now deleted blocks. Simplify
2717 the search by only examining blocks numerically adjacent, since this
2718 is how find_basic_blocks created them. */
2719
2720 static void
2721 tidy_fallthru_edges ()
2722 {
2723 int i;
2724
2725 for (i = 1; i < n_basic_blocks; ++i)
2726 {
2727 basic_block b = BASIC_BLOCK (i - 1);
2728 basic_block c = BASIC_BLOCK (i);
2729 edge s;
2730
2731 /* We care about simple conditional or unconditional jumps with
2732 a single successor.
2733
2734 If we had a conditional branch to the next instruction when
2735 find_basic_blocks was called, then there will only be one
2736 out edge for the block which ended with the conditional
2737 branch (since we do not create duplicate edges).
2738
2739 Furthermore, the edge will be marked as a fallthru because we
2740 merge the flags for the duplicate edges. So we do not want to
2741 check that the edge is not a FALLTHRU edge. */
2742 if ((s = b->succ) != NULL
2743 && s->succ_next == NULL
2744 && s->dest == c
2745 /* If the jump insn has side effects, we can't tidy the edge. */
2746 && (GET_CODE (b->end) != JUMP_INSN
2747 || onlyjump_p (b->end)))
2748 tidy_fallthru_edge (s, b, c);
2749 }
2750 }
2751 \f
2752 /* Perform data flow analysis.
2753 F is the first insn of the function; FLAGS is a set of PROP_* flags
2754 to be used in accumulating flow info. */
2755
2756 void
2757 life_analysis (f, file, flags)
2758 rtx f;
2759 FILE *file;
2760 int flags;
2761 {
2762 #ifdef ELIMINABLE_REGS
2763 register int i;
2764 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2765 #endif
2766
2767 /* Record which registers will be eliminated. We use this in
2768 mark_used_regs. */
2769
2770 CLEAR_HARD_REG_SET (elim_reg_set);
2771
2772 #ifdef ELIMINABLE_REGS
2773 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2774 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2775 #else
2776 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2777 #endif
2778
2779 if (! optimize)
2780 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2781
2782 /* The post-reload life analysis have (on a global basis) the same
2783 registers live as was computed by reload itself. elimination
2784 Otherwise offsets and such may be incorrect.
2785
2786 Reload will make some registers as live even though they do not
2787 appear in the rtl.
2788
2789 We don't want to create new auto-incs after reload, since they
2790 are unlikely to be useful and can cause problems with shared
2791 stack slots. */
2792 if (reload_completed)
2793 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2794
2795 /* We want alias analysis information for local dead store elimination. */
2796 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2797 init_alias_analysis ();
2798
2799 /* Always remove no-op moves. Do this before other processing so
2800 that we don't have to keep re-scanning them. */
2801 delete_noop_moves (f);
2802
2803 /* Some targets can emit simpler epilogues if they know that sp was
2804 not ever modified during the function. After reload, of course,
2805 we've already emitted the epilogue so there's no sense searching. */
2806 if (! reload_completed)
2807 notice_stack_pointer_modification (f);
2808
2809 /* Allocate and zero out data structures that will record the
2810 data from lifetime analysis. */
2811 allocate_reg_life_data ();
2812 allocate_bb_life_data ();
2813
2814 /* Find the set of registers live on function exit. */
2815 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2816
2817 /* "Update" life info from zero. It'd be nice to begin the
2818 relaxation with just the exit and noreturn blocks, but that set
2819 is not immediately handy. */
2820
2821 if (flags & PROP_REG_INFO)
2822 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2823 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2824
2825 /* Clean up. */
2826 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2827 end_alias_analysis ();
2828
2829 if (file)
2830 dump_flow_info (file);
2831
2832 free_basic_block_vars (1);
2833 }
2834
2835 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2836 Search for REGNO. If found, abort if it is not wider than word_mode. */
2837
2838 static int
2839 verify_wide_reg_1 (px, pregno)
2840 rtx *px;
2841 void *pregno;
2842 {
2843 rtx x = *px;
2844 unsigned int regno = *(int *) pregno;
2845
2846 if (GET_CODE (x) == REG && REGNO (x) == regno)
2847 {
2848 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2849 abort ();
2850 return 1;
2851 }
2852 return 0;
2853 }
2854
2855 /* A subroutine of verify_local_live_at_start. Search through insns
2856 between HEAD and END looking for register REGNO. */
2857
2858 static void
2859 verify_wide_reg (regno, head, end)
2860 int regno;
2861 rtx head, end;
2862 {
2863 while (1)
2864 {
2865 if (INSN_P (head)
2866 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2867 return;
2868 if (head == end)
2869 break;
2870 head = NEXT_INSN (head);
2871 }
2872
2873 /* We didn't find the register at all. Something's way screwy. */
2874 if (rtl_dump_file)
2875 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
2876 print_rtl_and_abort ();
2877 }
2878
2879 /* A subroutine of update_life_info. Verify that there are no untoward
2880 changes in live_at_start during a local update. */
2881
2882 static void
2883 verify_local_live_at_start (new_live_at_start, bb)
2884 regset new_live_at_start;
2885 basic_block bb;
2886 {
2887 if (reload_completed)
2888 {
2889 /* After reload, there are no pseudos, nor subregs of multi-word
2890 registers. The regsets should exactly match. */
2891 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2892 {
2893 if (rtl_dump_file)
2894 {
2895 fprintf (rtl_dump_file,
2896 "live_at_start mismatch in bb %d, aborting\n",
2897 bb->index);
2898 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
2899 debug_bitmap_file (rtl_dump_file, new_live_at_start);
2900 }
2901 print_rtl_and_abort ();
2902 }
2903 }
2904 else
2905 {
2906 int i;
2907
2908 /* Find the set of changed registers. */
2909 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2910
2911 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2912 {
2913 /* No registers should die. */
2914 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2915 {
2916 if (rtl_dump_file)
2917 fprintf (rtl_dump_file,
2918 "Register %d died unexpectedly in block %d\n", i,
2919 bb->index);
2920 print_rtl_and_abort ();
2921 }
2922
2923 /* Verify that the now-live register is wider than word_mode. */
2924 verify_wide_reg (i, bb->head, bb->end);
2925 });
2926 }
2927 }
2928
2929 /* Updates life information starting with the basic blocks set in BLOCKS.
2930 If BLOCKS is null, consider it to be the universal set.
2931
2932 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2933 we are only expecting local modifications to basic blocks. If we find
2934 extra registers live at the beginning of a block, then we either killed
2935 useful data, or we have a broken split that wants data not provided.
2936 If we find registers removed from live_at_start, that means we have
2937 a broken peephole that is killing a register it shouldn't.
2938
2939 ??? This is not true in one situation -- when a pre-reload splitter
2940 generates subregs of a multi-word pseudo, current life analysis will
2941 lose the kill. So we _can_ have a pseudo go live. How irritating.
2942
2943 Including PROP_REG_INFO does not properly refresh regs_ever_live
2944 unless the caller resets it to zero. */
2945
2946 void
2947 update_life_info (blocks, extent, prop_flags)
2948 sbitmap blocks;
2949 enum update_life_extent extent;
2950 int prop_flags;
2951 {
2952 regset tmp;
2953 regset_head tmp_head;
2954 int i;
2955
2956 tmp = INITIALIZE_REG_SET (tmp_head);
2957
2958 /* For a global update, we go through the relaxation process again. */
2959 if (extent != UPDATE_LIFE_LOCAL)
2960 {
2961 calculate_global_regs_live (blocks, blocks,
2962 prop_flags & PROP_SCAN_DEAD_CODE);
2963
2964 /* If asked, remove notes from the blocks we'll update. */
2965 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2966 count_or_remove_death_notes (blocks, 1);
2967 }
2968
2969 if (blocks)
2970 {
2971 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2972 {
2973 basic_block bb = BASIC_BLOCK (i);
2974
2975 COPY_REG_SET (tmp, bb->global_live_at_end);
2976 propagate_block (bb, tmp, NULL, NULL, prop_flags);
2977
2978 if (extent == UPDATE_LIFE_LOCAL)
2979 verify_local_live_at_start (tmp, bb);
2980 });
2981 }
2982 else
2983 {
2984 for (i = n_basic_blocks - 1; i >= 0; --i)
2985 {
2986 basic_block bb = BASIC_BLOCK (i);
2987
2988 COPY_REG_SET (tmp, bb->global_live_at_end);
2989 propagate_block (bb, tmp, NULL, NULL, prop_flags);
2990
2991 if (extent == UPDATE_LIFE_LOCAL)
2992 verify_local_live_at_start (tmp, bb);
2993 }
2994 }
2995
2996 FREE_REG_SET (tmp);
2997
2998 if (prop_flags & PROP_REG_INFO)
2999 {
3000 /* The only pseudos that are live at the beginning of the function
3001 are those that were not set anywhere in the function. local-alloc
3002 doesn't know how to handle these correctly, so mark them as not
3003 local to any one basic block. */
3004 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3005 FIRST_PSEUDO_REGISTER, i,
3006 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3007
3008 /* We have a problem with any pseudoreg that lives across the setjmp.
3009 ANSI says that if a user variable does not change in value between
3010 the setjmp and the longjmp, then the longjmp preserves it. This
3011 includes longjmp from a place where the pseudo appears dead.
3012 (In principle, the value still exists if it is in scope.)
3013 If the pseudo goes in a hard reg, some other value may occupy
3014 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3015 Conclusion: such a pseudo must not go in a hard reg. */
3016 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3017 FIRST_PSEUDO_REGISTER, i,
3018 {
3019 if (regno_reg_rtx[i] != 0)
3020 {
3021 REG_LIVE_LENGTH (i) = -1;
3022 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3023 }
3024 });
3025 }
3026 }
3027
3028 /* Free the variables allocated by find_basic_blocks.
3029
3030 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3031
3032 void
3033 free_basic_block_vars (keep_head_end_p)
3034 int keep_head_end_p;
3035 {
3036 if (basic_block_for_insn)
3037 {
3038 VARRAY_FREE (basic_block_for_insn);
3039 basic_block_for_insn = NULL;
3040 }
3041
3042 if (! keep_head_end_p)
3043 {
3044 clear_edges ();
3045 VARRAY_FREE (basic_block_info);
3046 n_basic_blocks = 0;
3047
3048 ENTRY_BLOCK_PTR->aux = NULL;
3049 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3050 EXIT_BLOCK_PTR->aux = NULL;
3051 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3052 }
3053 }
3054
3055 /* Return nonzero if the destination of SET equals the source. */
3056
3057 static int
3058 set_noop_p (set)
3059 rtx set;
3060 {
3061 rtx src = SET_SRC (set);
3062 rtx dst = SET_DEST (set);
3063
3064 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
3065 {
3066 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
3067 return 0;
3068 src = SUBREG_REG (src);
3069 dst = SUBREG_REG (dst);
3070 }
3071
3072 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
3073 && REGNO (src) == REGNO (dst));
3074 }
3075
3076 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3077 value to itself. */
3078
3079 static int
3080 noop_move_p (insn)
3081 rtx insn;
3082 {
3083 rtx pat = PATTERN (insn);
3084
3085 /* Insns carrying these notes are useful later on. */
3086 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3087 return 0;
3088
3089 if (GET_CODE (pat) == SET && set_noop_p (pat))
3090 return 1;
3091
3092 if (GET_CODE (pat) == PARALLEL)
3093 {
3094 int i;
3095 /* If nothing but SETs of registers to themselves,
3096 this insn can also be deleted. */
3097 for (i = 0; i < XVECLEN (pat, 0); i++)
3098 {
3099 rtx tem = XVECEXP (pat, 0, i);
3100
3101 if (GET_CODE (tem) == USE
3102 || GET_CODE (tem) == CLOBBER)
3103 continue;
3104
3105 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3106 return 0;
3107 }
3108
3109 return 1;
3110 }
3111 return 0;
3112 }
3113
3114 /* Delete any insns that copy a register to itself. */
3115
3116 static void
3117 delete_noop_moves (f)
3118 rtx f;
3119 {
3120 rtx insn;
3121 for (insn = f; insn; insn = NEXT_INSN (insn))
3122 {
3123 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3124 {
3125 PUT_CODE (insn, NOTE);
3126 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3127 NOTE_SOURCE_FILE (insn) = 0;
3128 }
3129 }
3130 }
3131
3132 /* Determine if the stack pointer is constant over the life of the function.
3133 Only useful before prologues have been emitted. */
3134
3135 static void
3136 notice_stack_pointer_modification_1 (x, pat, data)
3137 rtx x;
3138 rtx pat ATTRIBUTE_UNUSED;
3139 void *data ATTRIBUTE_UNUSED;
3140 {
3141 if (x == stack_pointer_rtx
3142 /* The stack pointer is only modified indirectly as the result
3143 of a push until later in flow. See the comments in rtl.texi
3144 regarding Embedded Side-Effects on Addresses. */
3145 || (GET_CODE (x) == MEM
3146 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
3147 || GET_CODE (XEXP (x, 0)) == PRE_INC
3148 || GET_CODE (XEXP (x, 0)) == POST_DEC
3149 || GET_CODE (XEXP (x, 0)) == POST_INC)
3150 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3151 current_function_sp_is_unchanging = 0;
3152 }
3153
3154 static void
3155 notice_stack_pointer_modification (f)
3156 rtx f;
3157 {
3158 rtx insn;
3159
3160 /* Assume that the stack pointer is unchanging if alloca hasn't
3161 been used. */
3162 current_function_sp_is_unchanging = !current_function_calls_alloca;
3163 if (! current_function_sp_is_unchanging)
3164 return;
3165
3166 for (insn = f; insn; insn = NEXT_INSN (insn))
3167 {
3168 if (INSN_P (insn))
3169 {
3170 /* Check if insn modifies the stack pointer. */
3171 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3172 NULL);
3173 if (! current_function_sp_is_unchanging)
3174 return;
3175 }
3176 }
3177 }
3178
3179 /* Mark a register in SET. Hard registers in large modes get all
3180 of their component registers set as well. */
3181
3182 static void
3183 mark_reg (reg, xset)
3184 rtx reg;
3185 void *xset;
3186 {
3187 regset set = (regset) xset;
3188 int regno = REGNO (reg);
3189
3190 if (GET_MODE (reg) == BLKmode)
3191 abort ();
3192
3193 SET_REGNO_REG_SET (set, regno);
3194 if (regno < FIRST_PSEUDO_REGISTER)
3195 {
3196 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3197 while (--n > 0)
3198 SET_REGNO_REG_SET (set, regno + n);
3199 }
3200 }
3201
3202 /* Mark those regs which are needed at the end of the function as live
3203 at the end of the last basic block. */
3204
3205 static void
3206 mark_regs_live_at_end (set)
3207 regset set;
3208 {
3209 int i;
3210
3211 /* If exiting needs the right stack value, consider the stack pointer
3212 live at the end of the function. */
3213 if ((HAVE_epilogue && reload_completed)
3214 || ! EXIT_IGNORE_STACK
3215 || (! FRAME_POINTER_REQUIRED
3216 && ! current_function_calls_alloca
3217 && flag_omit_frame_pointer)
3218 || current_function_sp_is_unchanging)
3219 {
3220 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3221 }
3222
3223 /* Mark the frame pointer if needed at the end of the function. If
3224 we end up eliminating it, it will be removed from the live list
3225 of each basic block by reload. */
3226
3227 if (! reload_completed || frame_pointer_needed)
3228 {
3229 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3230 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3231 /* If they are different, also mark the hard frame pointer as live. */
3232 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3233 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3234 #endif
3235 }
3236
3237 #ifdef PIC_OFFSET_TABLE_REGNUM
3238 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3239 /* Many architectures have a GP register even without flag_pic.
3240 Assume the pic register is not in use, or will be handled by
3241 other means, if it is not fixed. */
3242 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3243 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3244 #endif
3245 #endif
3246
3247 /* Mark all global registers, and all registers used by the epilogue
3248 as being live at the end of the function since they may be
3249 referenced by our caller. */
3250 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3251 if (global_regs[i] || EPILOGUE_USES (i))
3252 SET_REGNO_REG_SET (set, i);
3253
3254 /* Mark all call-saved registers that we actaully used. */
3255 if (HAVE_epilogue && reload_completed)
3256 {
3257 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3258 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3259 SET_REGNO_REG_SET (set, i);
3260 }
3261
3262 /* Mark function return value. */
3263 diddle_return_value (mark_reg, set);
3264 }
3265
3266 /* Callback function for for_each_successor_phi. DATA is a regset.
3267 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3268 INSN, in the regset. */
3269
3270 static int
3271 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3272 rtx insn ATTRIBUTE_UNUSED;
3273 int dest_regno ATTRIBUTE_UNUSED;
3274 int src_regno;
3275 void *data;
3276 {
3277 regset live = (regset) data;
3278 SET_REGNO_REG_SET (live, src_regno);
3279 return 0;
3280 }
3281
3282 /* Propagate global life info around the graph of basic blocks. Begin
3283 considering blocks with their corresponding bit set in BLOCKS_IN.
3284 If BLOCKS_IN is null, consider it the universal set.
3285
3286 BLOCKS_OUT is set for every block that was changed. */
3287
3288 static void
3289 calculate_global_regs_live (blocks_in, blocks_out, flags)
3290 sbitmap blocks_in, blocks_out;
3291 int flags;
3292 {
3293 basic_block *queue, *qhead, *qtail, *qend;
3294 regset tmp, new_live_at_end;
3295 regset_head tmp_head;
3296 regset_head new_live_at_end_head;
3297 int i;
3298
3299 tmp = INITIALIZE_REG_SET (tmp_head);
3300 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3301
3302 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3303 because the `head == tail' style test for an empty queue doesn't
3304 work with a full queue. */
3305 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3306 qtail = queue;
3307 qhead = qend = queue + n_basic_blocks + 2;
3308
3309 /* Clear out the garbage that might be hanging out in bb->aux. */
3310 for (i = n_basic_blocks - 1; i >= 0; --i)
3311 BASIC_BLOCK (i)->aux = NULL;
3312
3313 /* Queue the blocks set in the initial mask. Do this in reverse block
3314 number order so that we are more likely for the first round to do
3315 useful work. We use AUX non-null to flag that the block is queued. */
3316 if (blocks_in)
3317 {
3318 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3319 {
3320 basic_block bb = BASIC_BLOCK (i);
3321 *--qhead = bb;
3322 bb->aux = bb;
3323 });
3324 }
3325 else
3326 {
3327 for (i = 0; i < n_basic_blocks; ++i)
3328 {
3329 basic_block bb = BASIC_BLOCK (i);
3330 *--qhead = bb;
3331 bb->aux = bb;
3332 }
3333 }
3334
3335 if (blocks_out)
3336 sbitmap_zero (blocks_out);
3337
3338 while (qhead != qtail)
3339 {
3340 int rescan, changed;
3341 basic_block bb;
3342 edge e;
3343
3344 bb = *qhead++;
3345 if (qhead == qend)
3346 qhead = queue;
3347 bb->aux = NULL;
3348
3349 /* Begin by propogating live_at_start from the successor blocks. */
3350 CLEAR_REG_SET (new_live_at_end);
3351 for (e = bb->succ; e; e = e->succ_next)
3352 {
3353 basic_block sb = e->dest;
3354 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3355 }
3356
3357 /* Force the stack pointer to be live -- which might not already be
3358 the case for blocks within infinite loops. */
3359 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3360
3361 /* Similarly for the frame pointer before reload. Any reference
3362 to any pseudo before reload is a potential reference of the
3363 frame pointer. */
3364 if (! reload_completed)
3365 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3366
3367 /* Regs used in phi nodes are not included in
3368 global_live_at_start, since they are live only along a
3369 particular edge. Set those regs that are live because of a
3370 phi node alternative corresponding to this particular block. */
3371 if (in_ssa_form)
3372 for_each_successor_phi (bb, &set_phi_alternative_reg,
3373 new_live_at_end);
3374
3375 if (bb == ENTRY_BLOCK_PTR)
3376 {
3377 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3378 continue;
3379 }
3380
3381 /* On our first pass through this block, we'll go ahead and continue.
3382 Recognize first pass by local_set NULL. On subsequent passes, we
3383 get to skip out early if live_at_end wouldn't have changed. */
3384
3385 if (bb->local_set == NULL)
3386 {
3387 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3388 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3389 rescan = 1;
3390 }
3391 else
3392 {
3393 /* If any bits were removed from live_at_end, we'll have to
3394 rescan the block. This wouldn't be necessary if we had
3395 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3396 local_live is really dependent on live_at_end. */
3397 CLEAR_REG_SET (tmp);
3398 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3399 new_live_at_end, BITMAP_AND_COMPL);
3400
3401 if (! rescan)
3402 {
3403 /* If any of the registers in the new live_at_end set are
3404 conditionally set in this basic block, we must rescan.
3405 This is because conditional lifetimes at the end of the
3406 block do not just take the live_at_end set into account,
3407 but also the liveness at the start of each successor
3408 block. We can miss changes in those sets if we only
3409 compare the new live_at_end against the previous one. */
3410 CLEAR_REG_SET (tmp);
3411 rescan = bitmap_operation (tmp, new_live_at_end,
3412 bb->cond_local_set, BITMAP_AND);
3413 }
3414
3415 if (! rescan)
3416 {
3417 /* Find the set of changed bits. Take this opportunity
3418 to notice that this set is empty and early out. */
3419 CLEAR_REG_SET (tmp);
3420 changed = bitmap_operation (tmp, bb->global_live_at_end,
3421 new_live_at_end, BITMAP_XOR);
3422 if (! changed)
3423 continue;
3424
3425 /* If any of the changed bits overlap with local_set,
3426 we'll have to rescan the block. Detect overlap by
3427 the AND with ~local_set turning off bits. */
3428 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3429 BITMAP_AND_COMPL);
3430 }
3431 }
3432
3433 /* Let our caller know that BB changed enough to require its
3434 death notes updated. */
3435 if (blocks_out)
3436 SET_BIT (blocks_out, bb->index);
3437
3438 if (! rescan)
3439 {
3440 /* Add to live_at_start the set of all registers in
3441 new_live_at_end that aren't in the old live_at_end. */
3442
3443 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3444 BITMAP_AND_COMPL);
3445 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3446
3447 changed = bitmap_operation (bb->global_live_at_start,
3448 bb->global_live_at_start,
3449 tmp, BITMAP_IOR);
3450 if (! changed)
3451 continue;
3452 }
3453 else
3454 {
3455 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3456
3457 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3458 into live_at_start. */
3459 propagate_block (bb, new_live_at_end, bb->local_set,
3460 bb->cond_local_set, flags);
3461
3462 /* If live_at start didn't change, no need to go farther. */
3463 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3464 continue;
3465
3466 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3467 }
3468
3469 /* Queue all predecessors of BB so that we may re-examine
3470 their live_at_end. */
3471 for (e = bb->pred; e; e = e->pred_next)
3472 {
3473 basic_block pb = e->src;
3474 if (pb->aux == NULL)
3475 {
3476 *qtail++ = pb;
3477 if (qtail == qend)
3478 qtail = queue;
3479 pb->aux = pb;
3480 }
3481 }
3482 }
3483
3484 FREE_REG_SET (tmp);
3485 FREE_REG_SET (new_live_at_end);
3486
3487 if (blocks_out)
3488 {
3489 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3490 {
3491 basic_block bb = BASIC_BLOCK (i);
3492 FREE_REG_SET (bb->local_set);
3493 FREE_REG_SET (bb->cond_local_set);
3494 });
3495 }
3496 else
3497 {
3498 for (i = n_basic_blocks - 1; i >= 0; --i)
3499 {
3500 basic_block bb = BASIC_BLOCK (i);
3501 FREE_REG_SET (bb->local_set);
3502 FREE_REG_SET (bb->cond_local_set);
3503 }
3504 }
3505
3506 free (queue);
3507 }
3508 \f
3509 /* Subroutines of life analysis. */
3510
3511 /* Allocate the permanent data structures that represent the results
3512 of life analysis. Not static since used also for stupid life analysis. */
3513
3514 static void
3515 allocate_bb_life_data ()
3516 {
3517 register int i;
3518
3519 for (i = 0; i < n_basic_blocks; i++)
3520 {
3521 basic_block bb = BASIC_BLOCK (i);
3522
3523 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3524 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3525 }
3526
3527 ENTRY_BLOCK_PTR->global_live_at_end
3528 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3529 EXIT_BLOCK_PTR->global_live_at_start
3530 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3531
3532 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3533 }
3534
3535 void
3536 allocate_reg_life_data ()
3537 {
3538 int i;
3539
3540 max_regno = max_reg_num ();
3541
3542 /* Recalculate the register space, in case it has grown. Old style
3543 vector oriented regsets would set regset_{size,bytes} here also. */
3544 allocate_reg_info (max_regno, FALSE, FALSE);
3545
3546 /* Reset all the data we'll collect in propagate_block and its
3547 subroutines. */
3548 for (i = 0; i < max_regno; i++)
3549 {
3550 REG_N_SETS (i) = 0;
3551 REG_N_REFS (i) = 0;
3552 REG_N_DEATHS (i) = 0;
3553 REG_N_CALLS_CROSSED (i) = 0;
3554 REG_LIVE_LENGTH (i) = 0;
3555 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3556 }
3557 }
3558
3559 /* Delete dead instructions for propagate_block. */
3560
3561 static void
3562 propagate_block_delete_insn (bb, insn)
3563 basic_block bb;
3564 rtx insn;
3565 {
3566 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3567
3568 /* If the insn referred to a label, and that label was attached to
3569 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3570 pretty much mandatory to delete it, because the ADDR_VEC may be
3571 referencing labels that no longer exist. */
3572
3573 if (inote)
3574 {
3575 rtx label = XEXP (inote, 0);
3576 rtx next;
3577
3578 if (LABEL_NUSES (label) == 1
3579 && (next = next_nonnote_insn (label)) != NULL
3580 && GET_CODE (next) == JUMP_INSN
3581 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3582 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3583 {
3584 rtx pat = PATTERN (next);
3585 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3586 int len = XVECLEN (pat, diff_vec_p);
3587 int i;
3588
3589 for (i = 0; i < len; i++)
3590 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3591
3592 flow_delete_insn (next);
3593 }
3594 }
3595
3596 if (bb->end == insn)
3597 bb->end = PREV_INSN (insn);
3598 flow_delete_insn (insn);
3599 }
3600
3601 /* Delete dead libcalls for propagate_block. Return the insn
3602 before the libcall. */
3603
3604 static rtx
3605 propagate_block_delete_libcall (bb, insn, note)
3606 basic_block bb;
3607 rtx insn, note;
3608 {
3609 rtx first = XEXP (note, 0);
3610 rtx before = PREV_INSN (first);
3611
3612 if (insn == bb->end)
3613 bb->end = before;
3614
3615 flow_delete_insn_chain (first, insn);
3616 return before;
3617 }
3618
3619 /* Update the life-status of regs for one insn. Return the previous insn. */
3620
3621 rtx
3622 propagate_one_insn (pbi, insn)
3623 struct propagate_block_info *pbi;
3624 rtx insn;
3625 {
3626 rtx prev = PREV_INSN (insn);
3627 int flags = pbi->flags;
3628 int insn_is_dead = 0;
3629 int libcall_is_dead = 0;
3630 rtx note;
3631 int i;
3632
3633 if (! INSN_P (insn))
3634 return prev;
3635
3636 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3637 if (flags & PROP_SCAN_DEAD_CODE)
3638 {
3639 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3640 REG_NOTES (insn));
3641 libcall_is_dead = (insn_is_dead && note != 0
3642 && libcall_dead_p (pbi, note, insn));
3643 }
3644
3645 /* We almost certainly don't want to delete prologue or epilogue
3646 instructions. Warn about probable compiler losage. */
3647 if (insn_is_dead
3648 && reload_completed
3649 && (((HAVE_epilogue || HAVE_prologue)
3650 && prologue_epilogue_contains (insn))
3651 || (HAVE_sibcall_epilogue
3652 && sibcall_epilogue_contains (insn)))
3653 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3654 {
3655 if (flags & PROP_KILL_DEAD_CODE)
3656 {
3657 warning ("ICE: would have deleted prologue/epilogue insn");
3658 if (!inhibit_warnings)
3659 debug_rtx (insn);
3660 }
3661 libcall_is_dead = insn_is_dead = 0;
3662 }
3663
3664 /* If an instruction consists of just dead store(s) on final pass,
3665 delete it. */
3666 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3667 {
3668 /* Record sets. Do this even for dead instructions, since they
3669 would have killed the values if they hadn't been deleted. */
3670 mark_set_regs (pbi, PATTERN (insn), insn);
3671
3672 /* CC0 is now known to be dead. Either this insn used it,
3673 in which case it doesn't anymore, or clobbered it,
3674 so the next insn can't use it. */
3675 pbi->cc0_live = 0;
3676
3677 if (libcall_is_dead)
3678 {
3679 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3680 insn = NEXT_INSN (prev);
3681 }
3682 else
3683 propagate_block_delete_insn (pbi->bb, insn);
3684
3685 return prev;
3686 }
3687
3688 /* See if this is an increment or decrement that can be merged into
3689 a following memory address. */
3690 #ifdef AUTO_INC_DEC
3691 {
3692 register rtx x = single_set (insn);
3693
3694 /* Does this instruction increment or decrement a register? */
3695 if ((flags & PROP_AUTOINC)
3696 && x != 0
3697 && GET_CODE (SET_DEST (x)) == REG
3698 && (GET_CODE (SET_SRC (x)) == PLUS
3699 || GET_CODE (SET_SRC (x)) == MINUS)
3700 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3701 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3702 /* Ok, look for a following memory ref we can combine with.
3703 If one is found, change the memory ref to a PRE_INC
3704 or PRE_DEC, cancel this insn, and return 1.
3705 Return 0 if nothing has been done. */
3706 && try_pre_increment_1 (pbi, insn))
3707 return prev;
3708 }
3709 #endif /* AUTO_INC_DEC */
3710
3711 CLEAR_REG_SET (pbi->new_set);
3712
3713 /* If this is not the final pass, and this insn is copying the value of
3714 a library call and it's dead, don't scan the insns that perform the
3715 library call, so that the call's arguments are not marked live. */
3716 if (libcall_is_dead)
3717 {
3718 /* Record the death of the dest reg. */
3719 mark_set_regs (pbi, PATTERN (insn), insn);
3720
3721 insn = XEXP (note, 0);
3722 return PREV_INSN (insn);
3723 }
3724 else if (GET_CODE (PATTERN (insn)) == SET
3725 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3726 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3727 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3728 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3729 /* We have an insn to pop a constant amount off the stack.
3730 (Such insns use PLUS regardless of the direction of the stack,
3731 and any insn to adjust the stack by a constant is always a pop.)
3732 These insns, if not dead stores, have no effect on life. */
3733 ;
3734 else
3735 {
3736 /* Any regs live at the time of a call instruction must not go
3737 in a register clobbered by calls. Find all regs now live and
3738 record this for them. */
3739
3740 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3741 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3742 { REG_N_CALLS_CROSSED (i)++; });
3743
3744 /* Record sets. Do this even for dead instructions, since they
3745 would have killed the values if they hadn't been deleted. */
3746 mark_set_regs (pbi, PATTERN (insn), insn);
3747
3748 if (GET_CODE (insn) == CALL_INSN)
3749 {
3750 register int i;
3751 rtx note, cond;
3752
3753 cond = NULL_RTX;
3754 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3755 cond = COND_EXEC_TEST (PATTERN (insn));
3756
3757 /* Non-constant calls clobber memory. */
3758 if (! CONST_CALL_P (insn))
3759 free_EXPR_LIST_list (&pbi->mem_set_list);
3760
3761 /* There may be extra registers to be clobbered. */
3762 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3763 note;
3764 note = XEXP (note, 1))
3765 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3766 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3767 cond, insn, pbi->flags);
3768
3769 /* Calls change all call-used and global registers. */
3770 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3771 if (call_used_regs[i] && ! global_regs[i]
3772 && ! fixed_regs[i])
3773 {
3774 /* We do not want REG_UNUSED notes for these registers. */
3775 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3776 cond, insn,
3777 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3778 }
3779 }
3780
3781 /* If an insn doesn't use CC0, it becomes dead since we assume
3782 that every insn clobbers it. So show it dead here;
3783 mark_used_regs will set it live if it is referenced. */
3784 pbi->cc0_live = 0;
3785
3786 /* Record uses. */
3787 if (! insn_is_dead)
3788 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3789
3790 /* Sometimes we may have inserted something before INSN (such as a move)
3791 when we make an auto-inc. So ensure we will scan those insns. */
3792 #ifdef AUTO_INC_DEC
3793 prev = PREV_INSN (insn);
3794 #endif
3795
3796 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3797 {
3798 register int i;
3799 rtx note, cond;
3800
3801 cond = NULL_RTX;
3802 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3803 cond = COND_EXEC_TEST (PATTERN (insn));
3804
3805 /* Calls use their arguments. */
3806 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3807 note;
3808 note = XEXP (note, 1))
3809 if (GET_CODE (XEXP (note, 0)) == USE)
3810 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3811 cond, insn);
3812
3813 /* The stack ptr is used (honorarily) by a CALL insn. */
3814 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3815
3816 /* Calls may also reference any of the global registers,
3817 so they are made live. */
3818 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3819 if (global_regs[i])
3820 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3821 cond, insn);
3822 }
3823 }
3824
3825 /* On final pass, update counts of how many insns in which each reg
3826 is live. */
3827 if (flags & PROP_REG_INFO)
3828 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3829 { REG_LIVE_LENGTH (i)++; });
3830
3831 return prev;
3832 }
3833
3834 /* Initialize a propagate_block_info struct for public consumption.
3835 Note that the structure itself is opaque to this file, but that
3836 the user can use the regsets provided here. */
3837
3838 struct propagate_block_info *
3839 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3840 basic_block bb;
3841 regset live, local_set, cond_local_set;
3842 int flags;
3843 {
3844 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3845
3846 pbi->bb = bb;
3847 pbi->reg_live = live;
3848 pbi->mem_set_list = NULL_RTX;
3849 pbi->local_set = local_set;
3850 pbi->cond_local_set = cond_local_set;
3851 pbi->cc0_live = 0;
3852 pbi->flags = flags;
3853
3854 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3855 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3856 else
3857 pbi->reg_next_use = NULL;
3858
3859 pbi->new_set = BITMAP_XMALLOC ();
3860
3861 #ifdef HAVE_conditional_execution
3862 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3863 free_reg_cond_life_info);
3864 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3865
3866 /* If this block ends in a conditional branch, for each register live
3867 from one side of the branch and not the other, record the register
3868 as conditionally dead. */
3869 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3870 && GET_CODE (bb->end) == JUMP_INSN
3871 && any_condjump_p (bb->end))
3872 {
3873 regset_head diff_head;
3874 regset diff = INITIALIZE_REG_SET (diff_head);
3875 basic_block bb_true, bb_false;
3876 rtx cond_true, cond_false, set_src;
3877 int i;
3878
3879 /* Identify the successor blocks. */
3880 bb_true = bb->succ->dest;
3881 if (bb->succ->succ_next != NULL)
3882 {
3883 bb_false = bb->succ->succ_next->dest;
3884
3885 if (bb->succ->flags & EDGE_FALLTHRU)
3886 {
3887 basic_block t = bb_false;
3888 bb_false = bb_true;
3889 bb_true = t;
3890 }
3891 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3892 abort ();
3893 }
3894 else
3895 {
3896 /* This can happen with a conditional jump to the next insn. */
3897 if (JUMP_LABEL (bb->end) != bb_true->head)
3898 abort ();
3899
3900 /* Simplest way to do nothing. */
3901 bb_false = bb_true;
3902 }
3903
3904 /* Extract the condition from the branch. */
3905 set_src = SET_SRC (pc_set (bb->end));
3906 cond_true = XEXP (set_src, 0);
3907 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3908 GET_MODE (cond_true), XEXP (cond_true, 0),
3909 XEXP (cond_true, 1));
3910 if (GET_CODE (XEXP (set_src, 1)) == PC)
3911 {
3912 rtx t = cond_false;
3913 cond_false = cond_true;
3914 cond_true = t;
3915 }
3916
3917 /* Compute which register lead different lives in the successors. */
3918 if (bitmap_operation (diff, bb_true->global_live_at_start,
3919 bb_false->global_live_at_start, BITMAP_XOR))
3920 {
3921 rtx reg = XEXP (cond_true, 0);
3922
3923 if (GET_CODE (reg) == SUBREG)
3924 reg = SUBREG_REG (reg);
3925
3926 if (GET_CODE (reg) != REG)
3927 abort ();
3928
3929 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3930
3931 /* For each such register, mark it conditionally dead. */
3932 EXECUTE_IF_SET_IN_REG_SET
3933 (diff, 0, i,
3934 {
3935 struct reg_cond_life_info *rcli;
3936 rtx cond;
3937
3938 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3939
3940 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3941 cond = cond_false;
3942 else
3943 cond = cond_true;
3944 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3945
3946 splay_tree_insert (pbi->reg_cond_dead, i,
3947 (splay_tree_value) rcli);
3948 });
3949 }
3950
3951 FREE_REG_SET (diff);
3952 }
3953 #endif
3954
3955 /* If this block has no successors, any stores to the frame that aren't
3956 used later in the block are dead. So make a pass over the block
3957 recording any such that are made and show them dead at the end. We do
3958 a very conservative and simple job here. */
3959 if (optimize
3960 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3961 && (TYPE_RETURNS_STACK_DEPRESSED
3962 (TREE_TYPE (current_function_decl))))
3963 && (flags & PROP_SCAN_DEAD_CODE)
3964 && (bb->succ == NULL
3965 || (bb->succ->succ_next == NULL
3966 && bb->succ->dest == EXIT_BLOCK_PTR)))
3967 {
3968 rtx insn;
3969 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3970 if (GET_CODE (insn) == INSN
3971 && GET_CODE (PATTERN (insn)) == SET
3972 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3973 {
3974 rtx mem = SET_DEST (PATTERN (insn));
3975
3976 if (XEXP (mem, 0) == frame_pointer_rtx
3977 || (GET_CODE (XEXP (mem, 0)) == PLUS
3978 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3979 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3980 {
3981 #ifdef AUTO_INC_DEC
3982 /* Store a copy of mem, otherwise the address may be scrogged
3983 by find_auto_inc. This matters because insn_dead_p uses
3984 an rtx_equal_p check to determine if two addresses are
3985 the same. This works before find_auto_inc, but fails
3986 after find_auto_inc, causing discrepencies between the
3987 set of live registers calculated during the
3988 calculate_global_regs_live phase and what actually exists
3989 after flow completes, leading to aborts. */
3990 if (flags & PROP_AUTOINC)
3991 mem = shallow_copy_rtx (mem);
3992 #endif
3993 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3994 }
3995 }
3996 }
3997
3998 return pbi;
3999 }
4000
4001 /* Release a propagate_block_info struct. */
4002
4003 void
4004 free_propagate_block_info (pbi)
4005 struct propagate_block_info *pbi;
4006 {
4007 free_EXPR_LIST_list (&pbi->mem_set_list);
4008
4009 BITMAP_XFREE (pbi->new_set);
4010
4011 #ifdef HAVE_conditional_execution
4012 splay_tree_delete (pbi->reg_cond_dead);
4013 BITMAP_XFREE (pbi->reg_cond_reg);
4014 #endif
4015
4016 if (pbi->reg_next_use)
4017 free (pbi->reg_next_use);
4018
4019 free (pbi);
4020 }
4021
4022 /* Compute the registers live at the beginning of a basic block BB from
4023 those live at the end.
4024
4025 When called, REG_LIVE contains those live at the end. On return, it
4026 contains those live at the beginning.
4027
4028 LOCAL_SET, if non-null, will be set with all registers killed
4029 unconditionally by this basic block.
4030 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4031 killed conditionally by this basic block. If there is any unconditional
4032 set of a register, then the corresponding bit will be set in LOCAL_SET
4033 and cleared in COND_LOCAL_SET.
4034 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4035 case, the resulting set will be equal to the union of the two sets that
4036 would otherwise be computed. */
4037
4038 void
4039 propagate_block (bb, live, local_set, cond_local_set, flags)
4040 basic_block bb;
4041 regset live;
4042 regset local_set;
4043 regset cond_local_set;
4044 int flags;
4045 {
4046 struct propagate_block_info *pbi;
4047 rtx insn, prev;
4048
4049 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4050
4051 if (flags & PROP_REG_INFO)
4052 {
4053 register int i;
4054
4055 /* Process the regs live at the end of the block.
4056 Mark them as not local to any one basic block. */
4057 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4058 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4059 }
4060
4061 /* Scan the block an insn at a time from end to beginning. */
4062
4063 for (insn = bb->end;; insn = prev)
4064 {
4065 /* If this is a call to `setjmp' et al, warn if any
4066 non-volatile datum is live. */
4067 if ((flags & PROP_REG_INFO)
4068 && GET_CODE (insn) == NOTE
4069 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4070 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4071
4072 prev = propagate_one_insn (pbi, insn);
4073
4074 if (insn == bb->head)
4075 break;
4076 }
4077
4078 free_propagate_block_info (pbi);
4079 }
4080 \f
4081 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4082 (SET expressions whose destinations are registers dead after the insn).
4083 NEEDED is the regset that says which regs are alive after the insn.
4084
4085 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4086
4087 If X is the entire body of an insn, NOTES contains the reg notes
4088 pertaining to the insn. */
4089
4090 static int
4091 insn_dead_p (pbi, x, call_ok, notes)
4092 struct propagate_block_info *pbi;
4093 rtx x;
4094 int call_ok;
4095 rtx notes ATTRIBUTE_UNUSED;
4096 {
4097 enum rtx_code code = GET_CODE (x);
4098
4099 #ifdef AUTO_INC_DEC
4100 /* If flow is invoked after reload, we must take existing AUTO_INC
4101 expresions into account. */
4102 if (reload_completed)
4103 {
4104 for (; notes; notes = XEXP (notes, 1))
4105 {
4106 if (REG_NOTE_KIND (notes) == REG_INC)
4107 {
4108 int regno = REGNO (XEXP (notes, 0));
4109
4110 /* Don't delete insns to set global regs. */
4111 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4112 || REGNO_REG_SET_P (pbi->reg_live, regno))
4113 return 0;
4114 }
4115 }
4116 }
4117 #endif
4118
4119 /* If setting something that's a reg or part of one,
4120 see if that register's altered value will be live. */
4121
4122 if (code == SET)
4123 {
4124 rtx r = SET_DEST (x);
4125
4126 #ifdef HAVE_cc0
4127 if (GET_CODE (r) == CC0)
4128 return ! pbi->cc0_live;
4129 #endif
4130
4131 /* A SET that is a subroutine call cannot be dead. */
4132 if (GET_CODE (SET_SRC (x)) == CALL)
4133 {
4134 if (! call_ok)
4135 return 0;
4136 }
4137
4138 /* Don't eliminate loads from volatile memory or volatile asms. */
4139 else if (volatile_refs_p (SET_SRC (x)))
4140 return 0;
4141
4142 if (GET_CODE (r) == MEM)
4143 {
4144 rtx temp;
4145
4146 if (MEM_VOLATILE_P (r))
4147 return 0;
4148
4149 /* Walk the set of memory locations we are currently tracking
4150 and see if one is an identical match to this memory location.
4151 If so, this memory write is dead (remember, we're walking
4152 backwards from the end of the block to the start). */
4153 temp = pbi->mem_set_list;
4154 while (temp)
4155 {
4156 rtx mem = XEXP (temp, 0);
4157
4158 if (rtx_equal_p (mem, r))
4159 return 1;
4160 #ifdef AUTO_INC_DEC
4161 /* Check if memory reference matches an auto increment. Only
4162 post increment/decrement or modify are valid. */
4163 if (GET_MODE (mem) == GET_MODE (r)
4164 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4165 || GET_CODE (XEXP (mem, 0)) == POST_INC
4166 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4167 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4168 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4169 return 1;
4170 #endif
4171 temp = XEXP (temp, 1);
4172 }
4173 }
4174 else
4175 {
4176 while (GET_CODE (r) == SUBREG
4177 || GET_CODE (r) == STRICT_LOW_PART
4178 || GET_CODE (r) == ZERO_EXTRACT)
4179 r = XEXP (r, 0);
4180
4181 if (GET_CODE (r) == REG)
4182 {
4183 int regno = REGNO (r);
4184
4185 /* Obvious. */
4186 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4187 return 0;
4188
4189 /* If this is a hard register, verify that subsequent
4190 words are not needed. */
4191 if (regno < FIRST_PSEUDO_REGISTER)
4192 {
4193 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4194
4195 while (--n > 0)
4196 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4197 return 0;
4198 }
4199
4200 /* Don't delete insns to set global regs. */
4201 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4202 return 0;
4203
4204 /* Make sure insns to set the stack pointer aren't deleted. */
4205 if (regno == STACK_POINTER_REGNUM)
4206 return 0;
4207
4208 /* Make sure insns to set the frame pointer aren't deleted. */
4209 if (regno == FRAME_POINTER_REGNUM
4210 && (! reload_completed || frame_pointer_needed))
4211 return 0;
4212 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4213 if (regno == HARD_FRAME_POINTER_REGNUM
4214 && (! reload_completed || frame_pointer_needed))
4215 return 0;
4216 #endif
4217
4218 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4219 /* Make sure insns to set arg pointer are never deleted
4220 (if the arg pointer isn't fixed, there will be a USE
4221 for it, so we can treat it normally). */
4222 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4223 return 0;
4224 #endif
4225
4226 #ifdef PIC_OFFSET_TABLE_REGNUM
4227 /* Before reload, do not allow sets of the pic register
4228 to be deleted. Reload can insert references to
4229 constant pool memory anywhere in the function, making
4230 the PIC register live where it wasn't before. */
4231 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
4232 && ! reload_completed)
4233 return 0;
4234 #endif
4235
4236 /* Otherwise, the set is dead. */
4237 return 1;
4238 }
4239 }
4240 }
4241
4242 /* If performing several activities, insn is dead if each activity
4243 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4244 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4245 worth keeping. */
4246 else if (code == PARALLEL)
4247 {
4248 int i = XVECLEN (x, 0);
4249
4250 for (i--; i >= 0; i--)
4251 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4252 && GET_CODE (XVECEXP (x, 0, i)) != USE
4253 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4254 return 0;
4255
4256 return 1;
4257 }
4258
4259 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4260 is not necessarily true for hard registers. */
4261 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4262 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4263 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4264 return 1;
4265
4266 /* We do not check other CLOBBER or USE here. An insn consisting of just
4267 a CLOBBER or just a USE should not be deleted. */
4268 return 0;
4269 }
4270
4271 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4272 return 1 if the entire library call is dead.
4273 This is true if INSN copies a register (hard or pseudo)
4274 and if the hard return reg of the call insn is dead.
4275 (The caller should have tested the destination of the SET inside
4276 INSN already for death.)
4277
4278 If this insn doesn't just copy a register, then we don't
4279 have an ordinary libcall. In that case, cse could not have
4280 managed to substitute the source for the dest later on,
4281 so we can assume the libcall is dead.
4282
4283 PBI is the block info giving pseudoregs live before this insn.
4284 NOTE is the REG_RETVAL note of the insn. */
4285
4286 static int
4287 libcall_dead_p (pbi, note, insn)
4288 struct propagate_block_info *pbi;
4289 rtx note;
4290 rtx insn;
4291 {
4292 rtx x = single_set (insn);
4293
4294 if (x)
4295 {
4296 register rtx r = SET_SRC (x);
4297 if (GET_CODE (r) == REG)
4298 {
4299 rtx call = XEXP (note, 0);
4300 rtx call_pat;
4301 register int i;
4302
4303 /* Find the call insn. */
4304 while (call != insn && GET_CODE (call) != CALL_INSN)
4305 call = NEXT_INSN (call);
4306
4307 /* If there is none, do nothing special,
4308 since ordinary death handling can understand these insns. */
4309 if (call == insn)
4310 return 0;
4311
4312 /* See if the hard reg holding the value is dead.
4313 If this is a PARALLEL, find the call within it. */
4314 call_pat = PATTERN (call);
4315 if (GET_CODE (call_pat) == PARALLEL)
4316 {
4317 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4318 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4319 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4320 break;
4321
4322 /* This may be a library call that is returning a value
4323 via invisible pointer. Do nothing special, since
4324 ordinary death handling can understand these insns. */
4325 if (i < 0)
4326 return 0;
4327
4328 call_pat = XVECEXP (call_pat, 0, i);
4329 }
4330
4331 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4332 }
4333 }
4334 return 1;
4335 }
4336
4337 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4338 live at function entry. Don't count global register variables, variables
4339 in registers that can be used for function arg passing, or variables in
4340 fixed hard registers. */
4341
4342 int
4343 regno_uninitialized (regno)
4344 int regno;
4345 {
4346 if (n_basic_blocks == 0
4347 || (regno < FIRST_PSEUDO_REGISTER
4348 && (global_regs[regno]
4349 || fixed_regs[regno]
4350 || FUNCTION_ARG_REGNO_P (regno))))
4351 return 0;
4352
4353 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4354 }
4355
4356 /* 1 if register REGNO was alive at a place where `setjmp' was called
4357 and was set more than once or is an argument.
4358 Such regs may be clobbered by `longjmp'. */
4359
4360 int
4361 regno_clobbered_at_setjmp (regno)
4362 int regno;
4363 {
4364 if (n_basic_blocks == 0)
4365 return 0;
4366
4367 return ((REG_N_SETS (regno) > 1
4368 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4369 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4370 }
4371 \f
4372 /* INSN references memory, possibly using autoincrement addressing modes.
4373 Find any entries on the mem_set_list that need to be invalidated due
4374 to an address change. */
4375
4376 static void
4377 invalidate_mems_from_autoinc (pbi, insn)
4378 struct propagate_block_info *pbi;
4379 rtx insn;
4380 {
4381 rtx note = REG_NOTES (insn);
4382 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4383 {
4384 if (REG_NOTE_KIND (note) == REG_INC)
4385 {
4386 rtx temp = pbi->mem_set_list;
4387 rtx prev = NULL_RTX;
4388 rtx next;
4389
4390 while (temp)
4391 {
4392 next = XEXP (temp, 1);
4393 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4394 {
4395 /* Splice temp out of list. */
4396 if (prev)
4397 XEXP (prev, 1) = next;
4398 else
4399 pbi->mem_set_list = next;
4400 free_EXPR_LIST_node (temp);
4401 }
4402 else
4403 prev = temp;
4404 temp = next;
4405 }
4406 }
4407 }
4408 }
4409
4410 /* EXP is either a MEM or a REG. Remove any dependant entries
4411 from pbi->mem_set_list. */
4412
4413 static void
4414 invalidate_mems_from_set (pbi, exp)
4415 struct propagate_block_info *pbi;
4416 rtx exp;
4417 {
4418 rtx temp = pbi->mem_set_list;
4419 rtx prev = NULL_RTX;
4420 rtx next;
4421
4422 while (temp)
4423 {
4424 next = XEXP (temp, 1);
4425 if ((GET_CODE (exp) == MEM
4426 && output_dependence (XEXP (temp, 0), exp))
4427 || (GET_CODE (exp) == REG
4428 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4429 {
4430 /* Splice this entry out of the list. */
4431 if (prev)
4432 XEXP (prev, 1) = next;
4433 else
4434 pbi->mem_set_list = next;
4435 free_EXPR_LIST_node (temp);
4436 }
4437 else
4438 prev = temp;
4439 temp = next;
4440 }
4441 }
4442
4443 /* Process the registers that are set within X. Their bits are set to
4444 1 in the regset DEAD, because they are dead prior to this insn.
4445
4446 If INSN is nonzero, it is the insn being processed.
4447
4448 FLAGS is the set of operations to perform. */
4449
4450 static void
4451 mark_set_regs (pbi, x, insn)
4452 struct propagate_block_info *pbi;
4453 rtx x, insn;
4454 {
4455 rtx cond = NULL_RTX;
4456 rtx link;
4457 enum rtx_code code;
4458
4459 if (insn)
4460 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4461 {
4462 if (REG_NOTE_KIND (link) == REG_INC)
4463 mark_set_1 (pbi, SET, XEXP (link, 0),
4464 (GET_CODE (x) == COND_EXEC
4465 ? COND_EXEC_TEST (x) : NULL_RTX),
4466 insn, pbi->flags);
4467 }
4468 retry:
4469 switch (code = GET_CODE (x))
4470 {
4471 case SET:
4472 case CLOBBER:
4473 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4474 return;
4475
4476 case COND_EXEC:
4477 cond = COND_EXEC_TEST (x);
4478 x = COND_EXEC_CODE (x);
4479 goto retry;
4480
4481 case PARALLEL:
4482 {
4483 register int i;
4484 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4485 {
4486 rtx sub = XVECEXP (x, 0, i);
4487 switch (code = GET_CODE (sub))
4488 {
4489 case COND_EXEC:
4490 if (cond != NULL_RTX)
4491 abort ();
4492
4493 cond = COND_EXEC_TEST (sub);
4494 sub = COND_EXEC_CODE (sub);
4495 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4496 break;
4497 /* Fall through. */
4498
4499 case SET:
4500 case CLOBBER:
4501 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4502 break;
4503
4504 default:
4505 break;
4506 }
4507 }
4508 break;
4509 }
4510
4511 default:
4512 break;
4513 }
4514 }
4515
4516 /* Process a single SET rtx, X. */
4517
4518 static void
4519 mark_set_1 (pbi, code, reg, cond, insn, flags)
4520 struct propagate_block_info *pbi;
4521 enum rtx_code code;
4522 rtx reg, cond, insn;
4523 int flags;
4524 {
4525 int regno_first = -1, regno_last = -1;
4526 int not_dead = 0;
4527 int i;
4528
4529 /* Some targets place small structures in registers for
4530 return values of functions. We have to detect this
4531 case specially here to get correct flow information. */
4532 if (GET_CODE (reg) == PARALLEL
4533 && GET_MODE (reg) == BLKmode)
4534 {
4535 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4536 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4537 return;
4538 }
4539
4540 /* Modifying just one hardware register of a multi-reg value or just a
4541 byte field of a register does not mean the value from before this insn
4542 is now dead. Of course, if it was dead after it's unused now. */
4543
4544 switch (GET_CODE (reg))
4545 {
4546 case ZERO_EXTRACT:
4547 case SIGN_EXTRACT:
4548 case STRICT_LOW_PART:
4549 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4550 do
4551 reg = XEXP (reg, 0);
4552 while (GET_CODE (reg) == SUBREG
4553 || GET_CODE (reg) == ZERO_EXTRACT
4554 || GET_CODE (reg) == SIGN_EXTRACT
4555 || GET_CODE (reg) == STRICT_LOW_PART);
4556 if (GET_CODE (reg) == MEM)
4557 break;
4558 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4559 /* Fall through. */
4560
4561 case REG:
4562 regno_last = regno_first = REGNO (reg);
4563 if (regno_first < FIRST_PSEUDO_REGISTER)
4564 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4565 break;
4566
4567 case SUBREG:
4568 if (GET_CODE (SUBREG_REG (reg)) == REG)
4569 {
4570 enum machine_mode outer_mode = GET_MODE (reg);
4571 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4572
4573 /* Identify the range of registers affected. This is moderately
4574 tricky for hard registers. See alter_subreg. */
4575
4576 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4577 if (regno_first < FIRST_PSEUDO_REGISTER)
4578 {
4579 #ifdef ALTER_HARD_SUBREG
4580 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4581 inner_mode, regno_first);
4582 #else
4583 regno_first += SUBREG_WORD (reg);
4584 #endif
4585 regno_last = (regno_first
4586 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4587
4588 /* Since we've just adjusted the register number ranges, make
4589 sure REG matches. Otherwise some_was_live will be clear
4590 when it shouldn't have been, and we'll create incorrect
4591 REG_UNUSED notes. */
4592 reg = gen_rtx_REG (outer_mode, regno_first);
4593 }
4594 else
4595 {
4596 /* If the number of words in the subreg is less than the number
4597 of words in the full register, we have a well-defined partial
4598 set. Otherwise the high bits are undefined.
4599
4600 This is only really applicable to pseudos, since we just took
4601 care of multi-word hard registers. */
4602 if (((GET_MODE_SIZE (outer_mode)
4603 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4604 < ((GET_MODE_SIZE (inner_mode)
4605 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4606 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4607
4608 reg = SUBREG_REG (reg);
4609 }
4610 }
4611 else
4612 reg = SUBREG_REG (reg);
4613 break;
4614
4615 default:
4616 break;
4617 }
4618
4619 /* If this set is a MEM, then it kills any aliased writes.
4620 If this set is a REG, then it kills any MEMs which use the reg. */
4621 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4622 {
4623 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4624 invalidate_mems_from_set (pbi, reg);
4625
4626 /* If the memory reference had embedded side effects (autoincrement
4627 address modes. Then we may need to kill some entries on the
4628 memory set list. */
4629 if (insn && GET_CODE (reg) == MEM)
4630 invalidate_mems_from_autoinc (pbi, insn);
4631
4632 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4633 /* ??? With more effort we could track conditional memory life. */
4634 && ! cond
4635 /* We do not know the size of a BLKmode store, so we do not track
4636 them for redundant store elimination. */
4637 && GET_MODE (reg) != BLKmode
4638 /* There are no REG_INC notes for SP, so we can't assume we'll see
4639 everything that invalidates it. To be safe, don't eliminate any
4640 stores though SP; none of them should be redundant anyway. */
4641 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4642 {
4643 #ifdef AUTO_INC_DEC
4644 /* Store a copy of mem, otherwise the address may be
4645 scrogged by find_auto_inc. */
4646 if (flags & PROP_AUTOINC)
4647 reg = shallow_copy_rtx (reg);
4648 #endif
4649 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4650 }
4651 }
4652
4653 if (GET_CODE (reg) == REG
4654 && ! (regno_first == FRAME_POINTER_REGNUM
4655 && (! reload_completed || frame_pointer_needed))
4656 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4657 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4658 && (! reload_completed || frame_pointer_needed))
4659 #endif
4660 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4661 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4662 #endif
4663 )
4664 {
4665 int some_was_live = 0, some_was_dead = 0;
4666
4667 for (i = regno_first; i <= regno_last; ++i)
4668 {
4669 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4670 if (pbi->local_set)
4671 {
4672 /* Order of the set operation matters here since both
4673 sets may be the same. */
4674 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4675 if (cond != NULL_RTX
4676 && ! REGNO_REG_SET_P (pbi->local_set, i))
4677 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4678 else
4679 SET_REGNO_REG_SET (pbi->local_set, i);
4680 }
4681 if (code != CLOBBER)
4682 SET_REGNO_REG_SET (pbi->new_set, i);
4683
4684 some_was_live |= needed_regno;
4685 some_was_dead |= ! needed_regno;
4686 }
4687
4688 #ifdef HAVE_conditional_execution
4689 /* Consider conditional death in deciding that the register needs
4690 a death note. */
4691 if (some_was_live && ! not_dead
4692 /* The stack pointer is never dead. Well, not strictly true,
4693 but it's very difficult to tell from here. Hopefully
4694 combine_stack_adjustments will fix up the most egregious
4695 errors. */
4696 && regno_first != STACK_POINTER_REGNUM)
4697 {
4698 for (i = regno_first; i <= regno_last; ++i)
4699 if (! mark_regno_cond_dead (pbi, i, cond))
4700 not_dead = 1;
4701 }
4702 #endif
4703
4704 /* Additional data to record if this is the final pass. */
4705 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4706 | PROP_DEATH_NOTES | PROP_AUTOINC))
4707 {
4708 register rtx y;
4709 register int blocknum = pbi->bb->index;
4710
4711 y = NULL_RTX;
4712 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4713 {
4714 y = pbi->reg_next_use[regno_first];
4715
4716 /* The next use is no longer next, since a store intervenes. */
4717 for (i = regno_first; i <= regno_last; ++i)
4718 pbi->reg_next_use[i] = 0;
4719 }
4720
4721 if (flags & PROP_REG_INFO)
4722 {
4723 for (i = regno_first; i <= regno_last; ++i)
4724 {
4725 /* Count (weighted) references, stores, etc. This counts a
4726 register twice if it is modified, but that is correct. */
4727 REG_N_SETS (i) += 1;
4728 REG_N_REFS (i) += (optimize_size ? 1
4729 : pbi->bb->loop_depth + 1);
4730
4731 /* The insns where a reg is live are normally counted
4732 elsewhere, but we want the count to include the insn
4733 where the reg is set, and the normal counting mechanism
4734 would not count it. */
4735 REG_LIVE_LENGTH (i) += 1;
4736 }
4737
4738 /* If this is a hard reg, record this function uses the reg. */
4739 if (regno_first < FIRST_PSEUDO_REGISTER)
4740 {
4741 for (i = regno_first; i <= regno_last; i++)
4742 regs_ever_live[i] = 1;
4743 }
4744 else
4745 {
4746 /* Keep track of which basic blocks each reg appears in. */
4747 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4748 REG_BASIC_BLOCK (regno_first) = blocknum;
4749 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4750 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4751 }
4752 }
4753
4754 if (! some_was_dead)
4755 {
4756 if (flags & PROP_LOG_LINKS)
4757 {
4758 /* Make a logical link from the next following insn
4759 that uses this register, back to this insn.
4760 The following insns have already been processed.
4761
4762 We don't build a LOG_LINK for hard registers containing
4763 in ASM_OPERANDs. If these registers get replaced,
4764 we might wind up changing the semantics of the insn,
4765 even if reload can make what appear to be valid
4766 assignments later. */
4767 if (y && (BLOCK_NUM (y) == blocknum)
4768 && (regno_first >= FIRST_PSEUDO_REGISTER
4769 || asm_noperands (PATTERN (y)) < 0))
4770 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4771 }
4772 }
4773 else if (not_dead)
4774 ;
4775 else if (! some_was_live)
4776 {
4777 if (flags & PROP_REG_INFO)
4778 REG_N_DEATHS (regno_first) += 1;
4779
4780 if (flags & PROP_DEATH_NOTES)
4781 {
4782 /* Note that dead stores have already been deleted
4783 when possible. If we get here, we have found a
4784 dead store that cannot be eliminated (because the
4785 same insn does something useful). Indicate this
4786 by marking the reg being set as dying here. */
4787 REG_NOTES (insn)
4788 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4789 }
4790 }
4791 else
4792 {
4793 if (flags & PROP_DEATH_NOTES)
4794 {
4795 /* This is a case where we have a multi-word hard register
4796 and some, but not all, of the words of the register are
4797 needed in subsequent insns. Write REG_UNUSED notes
4798 for those parts that were not needed. This case should
4799 be rare. */
4800
4801 for (i = regno_first; i <= regno_last; ++i)
4802 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4803 REG_NOTES (insn)
4804 = alloc_EXPR_LIST (REG_UNUSED,
4805 gen_rtx_REG (reg_raw_mode[i], i),
4806 REG_NOTES (insn));
4807 }
4808 }
4809 }
4810
4811 /* Mark the register as being dead. */
4812 if (some_was_live
4813 && ! not_dead
4814 /* The stack pointer is never dead. Well, not strictly true,
4815 but it's very difficult to tell from here. Hopefully
4816 combine_stack_adjustments will fix up the most egregious
4817 errors. */
4818 && regno_first != STACK_POINTER_REGNUM)
4819 {
4820 for (i = regno_first; i <= regno_last; ++i)
4821 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4822 }
4823 }
4824 else if (GET_CODE (reg) == REG)
4825 {
4826 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4827 pbi->reg_next_use[regno_first] = 0;
4828 }
4829
4830 /* If this is the last pass and this is a SCRATCH, show it will be dying
4831 here and count it. */
4832 else if (GET_CODE (reg) == SCRATCH)
4833 {
4834 if (flags & PROP_DEATH_NOTES)
4835 REG_NOTES (insn)
4836 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4837 }
4838 }
4839 \f
4840 #ifdef HAVE_conditional_execution
4841 /* Mark REGNO conditionally dead.
4842 Return true if the register is now unconditionally dead. */
4843
4844 static int
4845 mark_regno_cond_dead (pbi, regno, cond)
4846 struct propagate_block_info *pbi;
4847 int regno;
4848 rtx cond;
4849 {
4850 /* If this is a store to a predicate register, the value of the
4851 predicate is changing, we don't know that the predicate as seen
4852 before is the same as that seen after. Flush all dependent
4853 conditions from reg_cond_dead. This will make all such
4854 conditionally live registers unconditionally live. */
4855 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4856 flush_reg_cond_reg (pbi, regno);
4857
4858 /* If this is an unconditional store, remove any conditional
4859 life that may have existed. */
4860 if (cond == NULL_RTX)
4861 splay_tree_remove (pbi->reg_cond_dead, regno);
4862 else
4863 {
4864 splay_tree_node node;
4865 struct reg_cond_life_info *rcli;
4866 rtx ncond;
4867
4868 /* Otherwise this is a conditional set. Record that fact.
4869 It may have been conditionally used, or there may be a
4870 subsequent set with a complimentary condition. */
4871
4872 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4873 if (node == NULL)
4874 {
4875 /* The register was unconditionally live previously.
4876 Record the current condition as the condition under
4877 which it is dead. */
4878 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4879 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4880 splay_tree_insert (pbi->reg_cond_dead, regno,
4881 (splay_tree_value) rcli);
4882
4883 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4884
4885 /* Not unconditionaly dead. */
4886 return 0;
4887 }
4888 else
4889 {
4890 /* The register was conditionally live previously.
4891 Add the new condition to the old. */
4892 rcli = (struct reg_cond_life_info *) node->value;
4893 ncond = rcli->condition;
4894 ncond = ior_reg_cond (ncond, cond);
4895
4896 /* If the register is now unconditionally dead,
4897 remove the entry in the splay_tree. */
4898 if (ncond == const1_rtx)
4899 splay_tree_remove (pbi->reg_cond_dead, regno);
4900 else
4901 {
4902 rcli->condition = ncond;
4903
4904 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4905
4906 /* Not unconditionaly dead. */
4907 return 0;
4908 }
4909 }
4910 }
4911
4912 return 1;
4913 }
4914
4915 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4916
4917 static void
4918 free_reg_cond_life_info (value)
4919 splay_tree_value value;
4920 {
4921 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4922 free_EXPR_LIST_list (&rcli->condition);
4923 free (rcli);
4924 }
4925
4926 /* Helper function for flush_reg_cond_reg. */
4927
4928 static int
4929 flush_reg_cond_reg_1 (node, data)
4930 splay_tree_node node;
4931 void *data;
4932 {
4933 struct reg_cond_life_info *rcli;
4934 int *xdata = (int *) data;
4935 unsigned int regno = xdata[0];
4936 rtx c, *prev;
4937
4938 /* Don't need to search if last flushed value was farther on in
4939 the in-order traversal. */
4940 if (xdata[1] >= (int) node->key)
4941 return 0;
4942
4943 /* Splice out portions of the expression that refer to regno. */
4944 rcli = (struct reg_cond_life_info *) node->value;
4945 c = *(prev = &rcli->condition);
4946 while (c)
4947 {
4948 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4949 {
4950 rtx next = XEXP (c, 1);
4951 free_EXPR_LIST_node (c);
4952 c = *prev = next;
4953 }
4954 else
4955 c = *(prev = &XEXP (c, 1));
4956 }
4957
4958 /* If the entire condition is now NULL, signal the node to be removed. */
4959 if (! rcli->condition)
4960 {
4961 xdata[1] = node->key;
4962 return -1;
4963 }
4964 else
4965 return 0;
4966 }
4967
4968 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4969
4970 static void
4971 flush_reg_cond_reg (pbi, regno)
4972 struct propagate_block_info *pbi;
4973 int regno;
4974 {
4975 int pair[2];
4976
4977 pair[0] = regno;
4978 pair[1] = -1;
4979 while (splay_tree_foreach (pbi->reg_cond_dead,
4980 flush_reg_cond_reg_1, pair) == -1)
4981 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4982
4983 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4984 }
4985
4986 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4987 We actually use EXPR_LIST to chain the sub-expressions together
4988 instead of IOR because it's easier to manipulate and we have
4989 the lists.c functions to reuse nodes.
4990
4991 Return a new rtl expression as appropriate. */
4992
4993 static rtx
4994 ior_reg_cond (old, x)
4995 rtx old, x;
4996 {
4997 enum rtx_code x_code;
4998 rtx x_reg;
4999 rtx c;
5000
5001 /* We expect these conditions to be of the form (eq reg 0). */
5002 x_code = GET_CODE (x);
5003 if (GET_RTX_CLASS (x_code) != '<'
5004 || GET_CODE (x_reg = XEXP (x, 0)) != REG
5005 || XEXP (x, 1) != const0_rtx)
5006 abort ();
5007
5008 /* Search the expression for an existing sub-expression of X_REG. */
5009 for (c = old; c; c = XEXP (c, 1))
5010 {
5011 rtx y = XEXP (c, 0);
5012 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
5013 {
5014 /* If we find X already present in OLD, we need do nothing. */
5015 if (GET_CODE (y) == x_code)
5016 return old;
5017
5018 /* If we find X being a compliment of a condition in OLD,
5019 then the entire condition is true. */
5020 if (GET_CODE (y) == reverse_condition (x_code))
5021 return const1_rtx;
5022 }
5023 }
5024
5025 /* Otherwise just add to the chain. */
5026 return alloc_EXPR_LIST (0, x, old);
5027 }
5028
5029 static rtx
5030 not_reg_cond (x)
5031 rtx x;
5032 {
5033 enum rtx_code x_code;
5034 rtx x_reg;
5035
5036 /* We expect these conditions to be of the form (eq reg 0). */
5037 x_code = GET_CODE (x);
5038 if (GET_RTX_CLASS (x_code) != '<'
5039 || GET_CODE (x_reg = XEXP (x, 0)) != REG
5040 || XEXP (x, 1) != const0_rtx)
5041 abort ();
5042
5043 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
5044 VOIDmode, x_reg, const0_rtx),
5045 NULL_RTX);
5046 }
5047
5048 static rtx
5049 nand_reg_cond (old, x)
5050 rtx old, x;
5051 {
5052 enum rtx_code x_code;
5053 rtx x_reg;
5054 rtx c, *prev;
5055
5056 /* We expect these conditions to be of the form (eq reg 0). */
5057 x_code = GET_CODE (x);
5058 if (GET_RTX_CLASS (x_code) != '<'
5059 || GET_CODE (x_reg = XEXP (x, 0)) != REG
5060 || XEXP (x, 1) != const0_rtx)
5061 abort ();
5062
5063 /* Search the expression for an existing sub-expression of X_REG. */
5064
5065 for (c = *(prev = &old); c; c = *(prev = &XEXP (c, 1)))
5066 {
5067 rtx y = XEXP (c, 0);
5068 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
5069 {
5070 /* If we find X already present in OLD, then we need to
5071 splice it out. */
5072 if (GET_CODE (y) == x_code)
5073 {
5074 *prev = XEXP (c, 1);
5075 free_EXPR_LIST_node (c);
5076 return old ? old : const0_rtx;
5077 }
5078
5079 /* If we find X being a compliment of a condition in OLD,
5080 then we need do nothing. */
5081 if (GET_CODE (y) == reverse_condition (x_code))
5082 return old;
5083 }
5084 }
5085
5086 /* Otherwise, by implication, the register in question is now live for
5087 the inverse of the condition X. */
5088 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
5089 VOIDmode, x_reg, const0_rtx),
5090 old);
5091 }
5092 #endif /* HAVE_conditional_execution */
5093 \f
5094 #ifdef AUTO_INC_DEC
5095
5096 /* Try to substitute the auto-inc expression INC as the address inside
5097 MEM which occurs in INSN. Currently, the address of MEM is an expression
5098 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5099 that has a single set whose source is a PLUS of INCR_REG and something
5100 else. */
5101
5102 static void
5103 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5104 struct propagate_block_info *pbi;
5105 rtx inc, insn, mem, incr, incr_reg;
5106 {
5107 int regno = REGNO (incr_reg);
5108 rtx set = single_set (incr);
5109 rtx q = SET_DEST (set);
5110 rtx y = SET_SRC (set);
5111 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5112
5113 /* Make sure this reg appears only once in this insn. */
5114 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5115 return;
5116
5117 if (dead_or_set_p (incr, incr_reg)
5118 /* Mustn't autoinc an eliminable register. */
5119 && (regno >= FIRST_PSEUDO_REGISTER
5120 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5121 {
5122 /* This is the simple case. Try to make the auto-inc. If
5123 we can't, we are done. Otherwise, we will do any
5124 needed updates below. */
5125 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5126 return;
5127 }
5128 else if (GET_CODE (q) == REG
5129 /* PREV_INSN used here to check the semi-open interval
5130 [insn,incr). */
5131 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5132 /* We must also check for sets of q as q may be
5133 a call clobbered hard register and there may
5134 be a call between PREV_INSN (insn) and incr. */
5135 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5136 {
5137 /* We have *p followed sometime later by q = p+size.
5138 Both p and q must be live afterward,
5139 and q is not used between INSN and its assignment.
5140 Change it to q = p, ...*q..., q = q+size.
5141 Then fall into the usual case. */
5142 rtx insns, temp;
5143
5144 start_sequence ();
5145 emit_move_insn (q, incr_reg);
5146 insns = get_insns ();
5147 end_sequence ();
5148
5149 if (basic_block_for_insn)
5150 for (temp = insns; temp; temp = NEXT_INSN (temp))
5151 set_block_for_insn (temp, pbi->bb);
5152
5153 /* If we can't make the auto-inc, or can't make the
5154 replacement into Y, exit. There's no point in making
5155 the change below if we can't do the auto-inc and doing
5156 so is not correct in the pre-inc case. */
5157
5158 XEXP (inc, 0) = q;
5159 validate_change (insn, &XEXP (mem, 0), inc, 1);
5160 validate_change (incr, &XEXP (y, opnum), q, 1);
5161 if (! apply_change_group ())
5162 return;
5163
5164 /* We now know we'll be doing this change, so emit the
5165 new insn(s) and do the updates. */
5166 emit_insns_before (insns, insn);
5167
5168 if (pbi->bb->head == insn)
5169 pbi->bb->head = insns;
5170
5171 /* INCR will become a NOTE and INSN won't contain a
5172 use of INCR_REG. If a use of INCR_REG was just placed in
5173 the insn before INSN, make that the next use.
5174 Otherwise, invalidate it. */
5175 if (GET_CODE (PREV_INSN (insn)) == INSN
5176 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5177 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5178 pbi->reg_next_use[regno] = PREV_INSN (insn);
5179 else
5180 pbi->reg_next_use[regno] = 0;
5181
5182 incr_reg = q;
5183 regno = REGNO (q);
5184
5185 /* REGNO is now used in INCR which is below INSN, but
5186 it previously wasn't live here. If we don't mark
5187 it as live, we'll put a REG_DEAD note for it
5188 on this insn, which is incorrect. */
5189 SET_REGNO_REG_SET (pbi->reg_live, regno);
5190
5191 /* If there are any calls between INSN and INCR, show
5192 that REGNO now crosses them. */
5193 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5194 if (GET_CODE (temp) == CALL_INSN)
5195 REG_N_CALLS_CROSSED (regno)++;
5196 }
5197 else
5198 return;
5199
5200 /* If we haven't returned, it means we were able to make the
5201 auto-inc, so update the status. First, record that this insn
5202 has an implicit side effect. */
5203
5204 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5205
5206 /* Modify the old increment-insn to simply copy
5207 the already-incremented value of our register. */
5208 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5209 abort ();
5210
5211 /* If that makes it a no-op (copying the register into itself) delete
5212 it so it won't appear to be a "use" and a "set" of this
5213 register. */
5214 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5215 {
5216 /* If the original source was dead, it's dead now. */
5217 rtx note;
5218
5219 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5220 {
5221 remove_note (incr, note);
5222 if (XEXP (note, 0) != incr_reg)
5223 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5224 }
5225
5226 PUT_CODE (incr, NOTE);
5227 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5228 NOTE_SOURCE_FILE (incr) = 0;
5229 }
5230
5231 if (regno >= FIRST_PSEUDO_REGISTER)
5232 {
5233 /* Count an extra reference to the reg. When a reg is
5234 incremented, spilling it is worse, so we want to make
5235 that less likely. */
5236 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5237
5238 /* Count the increment as a setting of the register,
5239 even though it isn't a SET in rtl. */
5240 REG_N_SETS (regno)++;
5241 }
5242 }
5243
5244 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5245 reference. */
5246
5247 static void
5248 find_auto_inc (pbi, x, insn)
5249 struct propagate_block_info *pbi;
5250 rtx x;
5251 rtx insn;
5252 {
5253 rtx addr = XEXP (x, 0);
5254 HOST_WIDE_INT offset = 0;
5255 rtx set, y, incr, inc_val;
5256 int regno;
5257 int size = GET_MODE_SIZE (GET_MODE (x));
5258
5259 if (GET_CODE (insn) == JUMP_INSN)
5260 return;
5261
5262 /* Here we detect use of an index register which might be good for
5263 postincrement, postdecrement, preincrement, or predecrement. */
5264
5265 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5266 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5267
5268 if (GET_CODE (addr) != REG)
5269 return;
5270
5271 regno = REGNO (addr);
5272
5273 /* Is the next use an increment that might make auto-increment? */
5274 incr = pbi->reg_next_use[regno];
5275 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5276 return;
5277 set = single_set (incr);
5278 if (set == 0 || GET_CODE (set) != SET)
5279 return;
5280 y = SET_SRC (set);
5281
5282 if (GET_CODE (y) != PLUS)
5283 return;
5284
5285 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5286 inc_val = XEXP (y, 1);
5287 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5288 inc_val = XEXP (y, 0);
5289 else
5290 return;
5291
5292 if (GET_CODE (inc_val) == CONST_INT)
5293 {
5294 if (HAVE_POST_INCREMENT
5295 && (INTVAL (inc_val) == size && offset == 0))
5296 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5297 incr, addr);
5298 else if (HAVE_POST_DECREMENT
5299 && (INTVAL (inc_val) == -size && offset == 0))
5300 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5301 incr, addr);
5302 else if (HAVE_PRE_INCREMENT
5303 && (INTVAL (inc_val) == size && offset == size))
5304 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5305 incr, addr);
5306 else if (HAVE_PRE_DECREMENT
5307 && (INTVAL (inc_val) == -size && offset == -size))
5308 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5309 incr, addr);
5310 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5311 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5312 gen_rtx_PLUS (Pmode,
5313 addr,
5314 inc_val)),
5315 insn, x, incr, addr);
5316 }
5317 else if (GET_CODE (inc_val) == REG
5318 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5319 NEXT_INSN (incr)))
5320
5321 {
5322 if (HAVE_POST_MODIFY_REG && offset == 0)
5323 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5324 gen_rtx_PLUS (Pmode,
5325 addr,
5326 inc_val)),
5327 insn, x, incr, addr);
5328 }
5329 }
5330
5331 #endif /* AUTO_INC_DEC */
5332 \f
5333 static void
5334 mark_used_reg (pbi, reg, cond, insn)
5335 struct propagate_block_info *pbi;
5336 rtx reg;
5337 rtx cond ATTRIBUTE_UNUSED;
5338 rtx insn;
5339 {
5340 int regno = REGNO (reg);
5341 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5342 int some_was_dead = ! some_was_live;
5343 int some_not_set;
5344 int n;
5345
5346 /* A hard reg in a wide mode may really be multiple registers.
5347 If so, mark all of them just like the first. */
5348 if (regno < FIRST_PSEUDO_REGISTER)
5349 {
5350 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5351 while (--n > 0)
5352 {
5353 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5354 some_was_live |= needed_regno;
5355 some_was_dead |= ! needed_regno;
5356 }
5357 }
5358
5359 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5360 {
5361 /* Record where each reg is used, so when the reg is set we know
5362 the next insn that uses it. */
5363 pbi->reg_next_use[regno] = insn;
5364 }
5365
5366 if (pbi->flags & PROP_REG_INFO)
5367 {
5368 if (regno < FIRST_PSEUDO_REGISTER)
5369 {
5370 /* If this is a register we are going to try to eliminate,
5371 don't mark it live here. If we are successful in
5372 eliminating it, it need not be live unless it is used for
5373 pseudos, in which case it will have been set live when it
5374 was allocated to the pseudos. If the register will not
5375 be eliminated, reload will set it live at that point.
5376
5377 Otherwise, record that this function uses this register. */
5378 /* ??? The PPC backend tries to "eliminate" on the pic
5379 register to itself. This should be fixed. In the mean
5380 time, hack around it. */
5381
5382 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5383 && (regno == FRAME_POINTER_REGNUM
5384 || regno == ARG_POINTER_REGNUM)))
5385 {
5386 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5387 do
5388 regs_ever_live[regno + --n] = 1;
5389 while (n > 0);
5390 }
5391 }
5392 else
5393 {
5394 /* Keep track of which basic block each reg appears in. */
5395
5396 register int blocknum = pbi->bb->index;
5397 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5398 REG_BASIC_BLOCK (regno) = blocknum;
5399 else if (REG_BASIC_BLOCK (regno) != blocknum)
5400 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5401
5402 /* Count (weighted) number of uses of each reg. */
5403 REG_N_REFS (regno) += (optimize_size ? 1
5404 : pbi->bb->loop_depth + 1);
5405 }
5406 }
5407
5408 /* Find out if any of the register was set this insn. */
5409 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5410 if (regno < FIRST_PSEUDO_REGISTER)
5411 {
5412 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5413 while (--n > 0)
5414 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5415 }
5416
5417 /* Record and count the insns in which a reg dies. If it is used in
5418 this insn and was dead below the insn then it dies in this insn.
5419 If it was set in this insn, we do not make a REG_DEAD note;
5420 likewise if we already made such a note. */
5421 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5422 && some_was_dead
5423 && some_not_set)
5424 {
5425 /* Check for the case where the register dying partially
5426 overlaps the register set by this insn. */
5427 if (regno < FIRST_PSEUDO_REGISTER
5428 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5429 {
5430 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5431 while (--n >= 0)
5432 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5433 }
5434
5435 /* If none of the words in X is needed, make a REG_DEAD note.
5436 Otherwise, we must make partial REG_DEAD notes. */
5437 if (! some_was_live)
5438 {
5439 if ((pbi->flags & PROP_DEATH_NOTES)
5440 && ! find_regno_note (insn, REG_DEAD, regno))
5441 REG_NOTES (insn)
5442 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5443
5444 if (pbi->flags & PROP_REG_INFO)
5445 REG_N_DEATHS (regno)++;
5446 }
5447 else
5448 {
5449 /* Don't make a REG_DEAD note for a part of a register
5450 that is set in the insn. */
5451
5452 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5453 for (; n >= regno; n--)
5454 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5455 && ! dead_or_set_regno_p (insn, n))
5456 REG_NOTES (insn)
5457 = alloc_EXPR_LIST (REG_DEAD,
5458 gen_rtx_REG (reg_raw_mode[n], n),
5459 REG_NOTES (insn));
5460 }
5461 }
5462
5463 SET_REGNO_REG_SET (pbi->reg_live, regno);
5464 if (regno < FIRST_PSEUDO_REGISTER)
5465 {
5466 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5467 while (--n > 0)
5468 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5469 }
5470
5471 #ifdef HAVE_conditional_execution
5472 /* If this is a conditional use, record that fact. If it is later
5473 conditionally set, we'll know to kill the register. */
5474 if (cond != NULL_RTX)
5475 {
5476 splay_tree_node node;
5477 struct reg_cond_life_info *rcli;
5478 rtx ncond;
5479
5480 if (some_was_live)
5481 {
5482 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5483 if (node == NULL)
5484 {
5485 /* The register was unconditionally live previously.
5486 No need to do anything. */
5487 }
5488 else
5489 {
5490 /* The register was conditionally live previously.
5491 Subtract the new life cond from the old death cond. */
5492 rcli = (struct reg_cond_life_info *) node->value;
5493 ncond = rcli->condition;
5494 ncond = nand_reg_cond (ncond, cond);
5495
5496 /* If the register is now unconditionally live, remove the
5497 entry in the splay_tree. */
5498 if (ncond == const0_rtx)
5499 {
5500 rcli->condition = NULL_RTX;
5501 splay_tree_remove (pbi->reg_cond_dead, regno);
5502 }
5503 else
5504 {
5505 rcli->condition = ncond;
5506 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5507 }
5508 }
5509 }
5510 else
5511 {
5512 /* The register was not previously live at all. Record
5513 the condition under which it is still dead. */
5514 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5515 rcli->condition = not_reg_cond (cond);
5516 splay_tree_insert (pbi->reg_cond_dead, regno,
5517 (splay_tree_value) rcli);
5518
5519 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5520 }
5521 }
5522 else if (some_was_live)
5523 {
5524 splay_tree_node node;
5525 struct reg_cond_life_info *rcli;
5526
5527 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5528 if (node != NULL)
5529 {
5530 /* The register was conditionally live previously, but is now
5531 unconditionally so. Remove it from the conditionally dead
5532 list, so that a conditional set won't cause us to think
5533 it dead. */
5534 rcli = (struct reg_cond_life_info *) node->value;
5535 rcli->condition = NULL_RTX;
5536 splay_tree_remove (pbi->reg_cond_dead, regno);
5537 }
5538 }
5539
5540 #endif
5541 }
5542
5543 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5544 This is done assuming the registers needed from X are those that
5545 have 1-bits in PBI->REG_LIVE.
5546
5547 INSN is the containing instruction. If INSN is dead, this function
5548 is not called. */
5549
5550 static void
5551 mark_used_regs (pbi, x, cond, insn)
5552 struct propagate_block_info *pbi;
5553 rtx x, cond, insn;
5554 {
5555 register RTX_CODE code;
5556 register int regno;
5557 int flags = pbi->flags;
5558
5559 retry:
5560 code = GET_CODE (x);
5561 switch (code)
5562 {
5563 case LABEL_REF:
5564 case SYMBOL_REF:
5565 case CONST_INT:
5566 case CONST:
5567 case CONST_DOUBLE:
5568 case PC:
5569 case ADDR_VEC:
5570 case ADDR_DIFF_VEC:
5571 return;
5572
5573 #ifdef HAVE_cc0
5574 case CC0:
5575 pbi->cc0_live = 1;
5576 return;
5577 #endif
5578
5579 case CLOBBER:
5580 /* If we are clobbering a MEM, mark any registers inside the address
5581 as being used. */
5582 if (GET_CODE (XEXP (x, 0)) == MEM)
5583 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5584 return;
5585
5586 case MEM:
5587 /* Don't bother watching stores to mems if this is not the
5588 final pass. We'll not be deleting dead stores this round. */
5589 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5590 {
5591 /* Invalidate the data for the last MEM stored, but only if MEM is
5592 something that can be stored into. */
5593 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5594 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5595 /* Needn't clear the memory set list. */
5596 ;
5597 else
5598 {
5599 rtx temp = pbi->mem_set_list;
5600 rtx prev = NULL_RTX;
5601 rtx next;
5602
5603 while (temp)
5604 {
5605 next = XEXP (temp, 1);
5606 if (anti_dependence (XEXP (temp, 0), x))
5607 {
5608 /* Splice temp out of the list. */
5609 if (prev)
5610 XEXP (prev, 1) = next;
5611 else
5612 pbi->mem_set_list = next;
5613 free_EXPR_LIST_node (temp);
5614 }
5615 else
5616 prev = temp;
5617 temp = next;
5618 }
5619 }
5620
5621 /* If the memory reference had embedded side effects (autoincrement
5622 address modes. Then we may need to kill some entries on the
5623 memory set list. */
5624 if (insn)
5625 invalidate_mems_from_autoinc (pbi, insn);
5626 }
5627
5628 #ifdef AUTO_INC_DEC
5629 if (flags & PROP_AUTOINC)
5630 find_auto_inc (pbi, x, insn);
5631 #endif
5632 break;
5633
5634 case SUBREG:
5635 #ifdef CLASS_CANNOT_CHANGE_MODE
5636 if (GET_CODE (SUBREG_REG (x)) == REG
5637 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5638 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5639 GET_MODE (SUBREG_REG (x))))
5640 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5641 #endif
5642
5643 /* While we're here, optimize this case. */
5644 x = SUBREG_REG (x);
5645 if (GET_CODE (x) != REG)
5646 goto retry;
5647 /* Fall through. */
5648
5649 case REG:
5650 /* See a register other than being set => mark it as needed. */
5651 mark_used_reg (pbi, x, cond, insn);
5652 return;
5653
5654 case SET:
5655 {
5656 register rtx testreg = SET_DEST (x);
5657 int mark_dest = 0;
5658
5659 /* If storing into MEM, don't show it as being used. But do
5660 show the address as being used. */
5661 if (GET_CODE (testreg) == MEM)
5662 {
5663 #ifdef AUTO_INC_DEC
5664 if (flags & PROP_AUTOINC)
5665 find_auto_inc (pbi, testreg, insn);
5666 #endif
5667 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5668 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5669 return;
5670 }
5671
5672 /* Storing in STRICT_LOW_PART is like storing in a reg
5673 in that this SET might be dead, so ignore it in TESTREG.
5674 but in some other ways it is like using the reg.
5675
5676 Storing in a SUBREG or a bit field is like storing the entire
5677 register in that if the register's value is not used
5678 then this SET is not needed. */
5679 while (GET_CODE (testreg) == STRICT_LOW_PART
5680 || GET_CODE (testreg) == ZERO_EXTRACT
5681 || GET_CODE (testreg) == SIGN_EXTRACT
5682 || GET_CODE (testreg) == SUBREG)
5683 {
5684 #ifdef CLASS_CANNOT_CHANGE_MODE
5685 if (GET_CODE (testreg) == SUBREG
5686 && GET_CODE (SUBREG_REG (testreg)) == REG
5687 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5688 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5689 GET_MODE (testreg)))
5690 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5691 #endif
5692
5693 /* Modifying a single register in an alternate mode
5694 does not use any of the old value. But these other
5695 ways of storing in a register do use the old value. */
5696 if (GET_CODE (testreg) == SUBREG
5697 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5698 ;
5699 else
5700 mark_dest = 1;
5701
5702 testreg = XEXP (testreg, 0);
5703 }
5704
5705 /* If this is a store into a register, recursively scan the
5706 value being stored. */
5707
5708 if ((GET_CODE (testreg) == PARALLEL
5709 && GET_MODE (testreg) == BLKmode)
5710 || (GET_CODE (testreg) == REG
5711 && (regno = REGNO (testreg),
5712 ! (regno == FRAME_POINTER_REGNUM
5713 && (! reload_completed || frame_pointer_needed)))
5714 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5715 && ! (regno == HARD_FRAME_POINTER_REGNUM
5716 && (! reload_completed || frame_pointer_needed))
5717 #endif
5718 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5719 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5720 #endif
5721 ))
5722 {
5723 if (mark_dest)
5724 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5725 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5726 return;
5727 }
5728 }
5729 break;
5730
5731 case ASM_OPERANDS:
5732 case UNSPEC_VOLATILE:
5733 case TRAP_IF:
5734 case ASM_INPUT:
5735 {
5736 /* Traditional and volatile asm instructions must be considered to use
5737 and clobber all hard registers, all pseudo-registers and all of
5738 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5739
5740 Consider for instance a volatile asm that changes the fpu rounding
5741 mode. An insn should not be moved across this even if it only uses
5742 pseudo-regs because it might give an incorrectly rounded result.
5743
5744 ?!? Unfortunately, marking all hard registers as live causes massive
5745 problems for the register allocator and marking all pseudos as live
5746 creates mountains of uninitialized variable warnings.
5747
5748 So for now, just clear the memory set list and mark any regs
5749 we can find in ASM_OPERANDS as used. */
5750 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5751 free_EXPR_LIST_list (&pbi->mem_set_list);
5752
5753 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5754 We can not just fall through here since then we would be confused
5755 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5756 traditional asms unlike their normal usage. */
5757 if (code == ASM_OPERANDS)
5758 {
5759 int j;
5760
5761 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5762 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5763 }
5764 break;
5765 }
5766
5767 case COND_EXEC:
5768 if (cond != NULL_RTX)
5769 abort ();
5770
5771 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5772
5773 cond = COND_EXEC_TEST (x);
5774 x = COND_EXEC_CODE (x);
5775 goto retry;
5776
5777 case PHI:
5778 /* We _do_not_ want to scan operands of phi nodes. Operands of
5779 a phi function are evaluated only when control reaches this
5780 block along a particular edge. Therefore, regs that appear
5781 as arguments to phi should not be added to the global live at
5782 start. */
5783 return;
5784
5785 default:
5786 break;
5787 }
5788
5789 /* Recursively scan the operands of this expression. */
5790
5791 {
5792 register const char *fmt = GET_RTX_FORMAT (code);
5793 register int i;
5794
5795 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5796 {
5797 if (fmt[i] == 'e')
5798 {
5799 /* Tail recursive case: save a function call level. */
5800 if (i == 0)
5801 {
5802 x = XEXP (x, 0);
5803 goto retry;
5804 }
5805 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5806 }
5807 else if (fmt[i] == 'E')
5808 {
5809 register int j;
5810 for (j = 0; j < XVECLEN (x, i); j++)
5811 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5812 }
5813 }
5814 }
5815 }
5816 \f
5817 #ifdef AUTO_INC_DEC
5818
5819 static int
5820 try_pre_increment_1 (pbi, insn)
5821 struct propagate_block_info *pbi;
5822 rtx insn;
5823 {
5824 /* Find the next use of this reg. If in same basic block,
5825 make it do pre-increment or pre-decrement if appropriate. */
5826 rtx x = single_set (insn);
5827 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5828 * INTVAL (XEXP (SET_SRC (x), 1)));
5829 int regno = REGNO (SET_DEST (x));
5830 rtx y = pbi->reg_next_use[regno];
5831 if (y != 0
5832 && SET_DEST (x) != stack_pointer_rtx
5833 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5834 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5835 mode would be better. */
5836 && ! dead_or_set_p (y, SET_DEST (x))
5837 && try_pre_increment (y, SET_DEST (x), amount))
5838 {
5839 /* We have found a suitable auto-increment and already changed
5840 insn Y to do it. So flush this increment instruction. */
5841 propagate_block_delete_insn (pbi->bb, insn);
5842
5843 /* Count a reference to this reg for the increment insn we are
5844 deleting. When a reg is incremented, spilling it is worse,
5845 so we want to make that less likely. */
5846 if (regno >= FIRST_PSEUDO_REGISTER)
5847 {
5848 REG_N_REFS (regno) += (optimize_size ? 1
5849 : pbi->bb->loop_depth + 1);
5850 REG_N_SETS (regno)++;
5851 }
5852
5853 /* Flush any remembered memories depending on the value of
5854 the incremented register. */
5855 invalidate_mems_from_set (pbi, SET_DEST (x));
5856
5857 return 1;
5858 }
5859 return 0;
5860 }
5861
5862 /* Try to change INSN so that it does pre-increment or pre-decrement
5863 addressing on register REG in order to add AMOUNT to REG.
5864 AMOUNT is negative for pre-decrement.
5865 Returns 1 if the change could be made.
5866 This checks all about the validity of the result of modifying INSN. */
5867
5868 static int
5869 try_pre_increment (insn, reg, amount)
5870 rtx insn, reg;
5871 HOST_WIDE_INT amount;
5872 {
5873 register rtx use;
5874
5875 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5876 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5877 int pre_ok = 0;
5878 /* Nonzero if we can try to make a post-increment or post-decrement.
5879 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5880 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5881 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5882 int post_ok = 0;
5883
5884 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5885 int do_post = 0;
5886
5887 /* From the sign of increment, see which possibilities are conceivable
5888 on this target machine. */
5889 if (HAVE_PRE_INCREMENT && amount > 0)
5890 pre_ok = 1;
5891 if (HAVE_POST_INCREMENT && amount > 0)
5892 post_ok = 1;
5893
5894 if (HAVE_PRE_DECREMENT && amount < 0)
5895 pre_ok = 1;
5896 if (HAVE_POST_DECREMENT && amount < 0)
5897 post_ok = 1;
5898
5899 if (! (pre_ok || post_ok))
5900 return 0;
5901
5902 /* It is not safe to add a side effect to a jump insn
5903 because if the incremented register is spilled and must be reloaded
5904 there would be no way to store the incremented value back in memory. */
5905
5906 if (GET_CODE (insn) == JUMP_INSN)
5907 return 0;
5908
5909 use = 0;
5910 if (pre_ok)
5911 use = find_use_as_address (PATTERN (insn), reg, 0);
5912 if (post_ok && (use == 0 || use == (rtx) 1))
5913 {
5914 use = find_use_as_address (PATTERN (insn), reg, -amount);
5915 do_post = 1;
5916 }
5917
5918 if (use == 0 || use == (rtx) 1)
5919 return 0;
5920
5921 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5922 return 0;
5923
5924 /* See if this combination of instruction and addressing mode exists. */
5925 if (! validate_change (insn, &XEXP (use, 0),
5926 gen_rtx_fmt_e (amount > 0
5927 ? (do_post ? POST_INC : PRE_INC)
5928 : (do_post ? POST_DEC : PRE_DEC),
5929 Pmode, reg), 0))
5930 return 0;
5931
5932 /* Record that this insn now has an implicit side effect on X. */
5933 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5934 return 1;
5935 }
5936
5937 #endif /* AUTO_INC_DEC */
5938 \f
5939 /* Find the place in the rtx X where REG is used as a memory address.
5940 Return the MEM rtx that so uses it.
5941 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5942 (plus REG (const_int PLUSCONST)).
5943
5944 If such an address does not appear, return 0.
5945 If REG appears more than once, or is used other than in such an address,
5946 return (rtx)1. */
5947
5948 rtx
5949 find_use_as_address (x, reg, plusconst)
5950 register rtx x;
5951 rtx reg;
5952 HOST_WIDE_INT plusconst;
5953 {
5954 enum rtx_code code = GET_CODE (x);
5955 const char *fmt = GET_RTX_FORMAT (code);
5956 register int i;
5957 register rtx value = 0;
5958 register rtx tem;
5959
5960 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5961 return x;
5962
5963 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5964 && XEXP (XEXP (x, 0), 0) == reg
5965 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5966 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5967 return x;
5968
5969 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5970 {
5971 /* If REG occurs inside a MEM used in a bit-field reference,
5972 that is unacceptable. */
5973 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5974 return (rtx) (HOST_WIDE_INT) 1;
5975 }
5976
5977 if (x == reg)
5978 return (rtx) (HOST_WIDE_INT) 1;
5979
5980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5981 {
5982 if (fmt[i] == 'e')
5983 {
5984 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5985 if (value == 0)
5986 value = tem;
5987 else if (tem != 0)
5988 return (rtx) (HOST_WIDE_INT) 1;
5989 }
5990 else if (fmt[i] == 'E')
5991 {
5992 register int j;
5993 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5994 {
5995 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5996 if (value == 0)
5997 value = tem;
5998 else if (tem != 0)
5999 return (rtx) (HOST_WIDE_INT) 1;
6000 }
6001 }
6002 }
6003
6004 return value;
6005 }
6006 \f
6007 /* Write information about registers and basic blocks into FILE.
6008 This is part of making a debugging dump. */
6009
6010 void
6011 dump_regset (r, outf)
6012 regset r;
6013 FILE *outf;
6014 {
6015 int i;
6016 if (r == NULL)
6017 {
6018 fputs (" (nil)", outf);
6019 return;
6020 }
6021
6022 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6023 {
6024 fprintf (outf, " %d", i);
6025 if (i < FIRST_PSEUDO_REGISTER)
6026 fprintf (outf, " [%s]",
6027 reg_names[i]);
6028 });
6029 }
6030
6031 void
6032 debug_regset (r)
6033 regset r;
6034 {
6035 dump_regset (r, stderr);
6036 putc ('\n', stderr);
6037 }
6038
6039 void
6040 dump_flow_info (file)
6041 FILE *file;
6042 {
6043 register int i;
6044 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6045
6046 fprintf (file, "%d registers.\n", max_regno);
6047 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6048 if (REG_N_REFS (i))
6049 {
6050 enum reg_class class, altclass;
6051 fprintf (file, "\nRegister %d used %d times across %d insns",
6052 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6053 if (REG_BASIC_BLOCK (i) >= 0)
6054 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6055 if (REG_N_SETS (i))
6056 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6057 (REG_N_SETS (i) == 1) ? "" : "s");
6058 if (REG_USERVAR_P (regno_reg_rtx[i]))
6059 fprintf (file, "; user var");
6060 if (REG_N_DEATHS (i) != 1)
6061 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6062 if (REG_N_CALLS_CROSSED (i) == 1)
6063 fprintf (file, "; crosses 1 call");
6064 else if (REG_N_CALLS_CROSSED (i))
6065 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6066 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6067 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6068 class = reg_preferred_class (i);
6069 altclass = reg_alternate_class (i);
6070 if (class != GENERAL_REGS || altclass != ALL_REGS)
6071 {
6072 if (altclass == ALL_REGS || class == ALL_REGS)
6073 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6074 else if (altclass == NO_REGS)
6075 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6076 else
6077 fprintf (file, "; pref %s, else %s",
6078 reg_class_names[(int) class],
6079 reg_class_names[(int) altclass]);
6080 }
6081 if (REGNO_POINTER_FLAG (i))
6082 fprintf (file, "; pointer");
6083 fprintf (file, ".\n");
6084 }
6085
6086 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6087 for (i = 0; i < n_basic_blocks; i++)
6088 {
6089 register basic_block bb = BASIC_BLOCK (i);
6090 register edge e;
6091
6092 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6093 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
6094
6095 fprintf (file, "Predecessors: ");
6096 for (e = bb->pred; e; e = e->pred_next)
6097 dump_edge_info (file, e, 0);
6098
6099 fprintf (file, "\nSuccessors: ");
6100 for (e = bb->succ; e; e = e->succ_next)
6101 dump_edge_info (file, e, 1);
6102
6103 fprintf (file, "\nRegisters live at start:");
6104 dump_regset (bb->global_live_at_start, file);
6105
6106 fprintf (file, "\nRegisters live at end:");
6107 dump_regset (bb->global_live_at_end, file);
6108
6109 putc ('\n', file);
6110 }
6111
6112 putc ('\n', file);
6113 }
6114
6115 void
6116 debug_flow_info ()
6117 {
6118 dump_flow_info (stderr);
6119 }
6120
6121 static void
6122 dump_edge_info (file, e, do_succ)
6123 FILE *file;
6124 edge e;
6125 int do_succ;
6126 {
6127 basic_block side = (do_succ ? e->dest : e->src);
6128
6129 if (side == ENTRY_BLOCK_PTR)
6130 fputs (" ENTRY", file);
6131 else if (side == EXIT_BLOCK_PTR)
6132 fputs (" EXIT", file);
6133 else
6134 fprintf (file, " %d", side->index);
6135
6136 if (e->count)
6137 fprintf (file, " count:%d", e->count);
6138
6139 if (e->flags)
6140 {
6141 static const char * const bitnames[] = {
6142 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6143 };
6144 int comma = 0;
6145 int i, flags = e->flags;
6146
6147 fputc (' ', file);
6148 fputc ('(', file);
6149 for (i = 0; flags; i++)
6150 if (flags & (1 << i))
6151 {
6152 flags &= ~(1 << i);
6153
6154 if (comma)
6155 fputc (',', file);
6156 if (i < (int) ARRAY_SIZE (bitnames))
6157 fputs (bitnames[i], file);
6158 else
6159 fprintf (file, "%d", i);
6160 comma = 1;
6161 }
6162 fputc (')', file);
6163 }
6164 }
6165 \f
6166 /* Print out one basic block with live information at start and end. */
6167
6168 void
6169 dump_bb (bb, outf)
6170 basic_block bb;
6171 FILE *outf;
6172 {
6173 rtx insn;
6174 rtx last;
6175 edge e;
6176
6177 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6178 bb->index, bb->loop_depth, bb->count);
6179 if (bb->eh_beg != -1 || bb->eh_end != -1)
6180 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
6181 putc ('\n', outf);
6182
6183 fputs (";; Predecessors: ", outf);
6184 for (e = bb->pred; e; e = e->pred_next)
6185 dump_edge_info (outf, e, 0);
6186 putc ('\n', outf);
6187
6188 fputs (";; Registers live at start:", outf);
6189 dump_regset (bb->global_live_at_start, outf);
6190 putc ('\n', outf);
6191
6192 for (insn = bb->head, last = NEXT_INSN (bb->end);
6193 insn != last;
6194 insn = NEXT_INSN (insn))
6195 print_rtl_single (outf, insn);
6196
6197 fputs (";; Registers live at end:", outf);
6198 dump_regset (bb->global_live_at_end, outf);
6199 putc ('\n', outf);
6200
6201 fputs (";; Successors: ", outf);
6202 for (e = bb->succ; e; e = e->succ_next)
6203 dump_edge_info (outf, e, 1);
6204 putc ('\n', outf);
6205 }
6206
6207 void
6208 debug_bb (bb)
6209 basic_block bb;
6210 {
6211 dump_bb (bb, stderr);
6212 }
6213
6214 void
6215 debug_bb_n (n)
6216 int n;
6217 {
6218 dump_bb (BASIC_BLOCK (n), stderr);
6219 }
6220
6221 /* Like print_rtl, but also print out live information for the start of each
6222 basic block. */
6223
6224 void
6225 print_rtl_with_bb (outf, rtx_first)
6226 FILE *outf;
6227 rtx rtx_first;
6228 {
6229 register rtx tmp_rtx;
6230
6231 if (rtx_first == 0)
6232 fprintf (outf, "(nil)\n");
6233 else
6234 {
6235 int i;
6236 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6237 int max_uid = get_max_uid ();
6238 basic_block *start = (basic_block *)
6239 xcalloc (max_uid, sizeof (basic_block));
6240 basic_block *end = (basic_block *)
6241 xcalloc (max_uid, sizeof (basic_block));
6242 enum bb_state *in_bb_p = (enum bb_state *)
6243 xcalloc (max_uid, sizeof (enum bb_state));
6244
6245 for (i = n_basic_blocks - 1; i >= 0; i--)
6246 {
6247 basic_block bb = BASIC_BLOCK (i);
6248 rtx x;
6249
6250 start[INSN_UID (bb->head)] = bb;
6251 end[INSN_UID (bb->end)] = bb;
6252 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6253 {
6254 enum bb_state state = IN_MULTIPLE_BB;
6255 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6256 state = IN_ONE_BB;
6257 in_bb_p[INSN_UID (x)] = state;
6258
6259 if (x == bb->end)
6260 break;
6261 }
6262 }
6263
6264 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6265 {
6266 int did_output;
6267 basic_block bb;
6268
6269 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6270 {
6271 fprintf (outf, ";; Start of basic block %d, registers live:",
6272 bb->index);
6273 dump_regset (bb->global_live_at_start, outf);
6274 putc ('\n', outf);
6275 }
6276
6277 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6278 && GET_CODE (tmp_rtx) != NOTE
6279 && GET_CODE (tmp_rtx) != BARRIER)
6280 fprintf (outf, ";; Insn is not within a basic block\n");
6281 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6282 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6283
6284 did_output = print_rtl_single (outf, tmp_rtx);
6285
6286 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6287 {
6288 fprintf (outf, ";; End of basic block %d, registers live:\n",
6289 bb->index);
6290 dump_regset (bb->global_live_at_end, outf);
6291 putc ('\n', outf);
6292 }
6293
6294 if (did_output)
6295 putc ('\n', outf);
6296 }
6297
6298 free (start);
6299 free (end);
6300 free (in_bb_p);
6301 }
6302
6303 if (current_function_epilogue_delay_list != 0)
6304 {
6305 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6306 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6307 tmp_rtx = XEXP (tmp_rtx, 1))
6308 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6309 }
6310 }
6311
6312 /* Dump the rtl into the current debugging dump file, then abort. */
6313 static void
6314 print_rtl_and_abort ()
6315 {
6316 if (rtl_dump_file)
6317 {
6318 print_rtl_with_bb (rtl_dump_file, get_insns ());
6319 fclose (rtl_dump_file);
6320 }
6321 abort ();
6322 }
6323
6324 /* Recompute register set/reference counts immediately prior to register
6325 allocation.
6326
6327 This avoids problems with set/reference counts changing to/from values
6328 which have special meanings to the register allocators.
6329
6330 Additionally, the reference counts are the primary component used by the
6331 register allocators to prioritize pseudos for allocation to hard regs.
6332 More accurate reference counts generally lead to better register allocation.
6333
6334 F is the first insn to be scanned.
6335
6336 LOOP_STEP denotes how much loop_depth should be incremented per
6337 loop nesting level in order to increase the ref count more for
6338 references in a loop.
6339
6340 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6341 possibly other information which is used by the register allocators. */
6342
6343 void
6344 recompute_reg_usage (f, loop_step)
6345 rtx f ATTRIBUTE_UNUSED;
6346 int loop_step ATTRIBUTE_UNUSED;
6347 {
6348 allocate_reg_life_data ();
6349 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6350 }
6351
6352 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6353 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6354 of the number of registers that died. */
6355
6356 int
6357 count_or_remove_death_notes (blocks, kill)
6358 sbitmap blocks;
6359 int kill;
6360 {
6361 int i, count = 0;
6362
6363 for (i = n_basic_blocks - 1; i >= 0; --i)
6364 {
6365 basic_block bb;
6366 rtx insn;
6367
6368 if (blocks && ! TEST_BIT (blocks, i))
6369 continue;
6370
6371 bb = BASIC_BLOCK (i);
6372
6373 for (insn = bb->head;; insn = NEXT_INSN (insn))
6374 {
6375 if (INSN_P (insn))
6376 {
6377 rtx *pprev = &REG_NOTES (insn);
6378 rtx link = *pprev;
6379
6380 while (link)
6381 {
6382 switch (REG_NOTE_KIND (link))
6383 {
6384 case REG_DEAD:
6385 if (GET_CODE (XEXP (link, 0)) == REG)
6386 {
6387 rtx reg = XEXP (link, 0);
6388 int n;
6389
6390 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6391 n = 1;
6392 else
6393 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6394 count += n;
6395 }
6396 /* Fall through. */
6397
6398 case REG_UNUSED:
6399 if (kill)
6400 {
6401 rtx next = XEXP (link, 1);
6402 free_EXPR_LIST_node (link);
6403 *pprev = link = next;
6404 break;
6405 }
6406 /* Fall through. */
6407
6408 default:
6409 pprev = &XEXP (link, 1);
6410 link = *pprev;
6411 break;
6412 }
6413 }
6414 }
6415
6416 if (insn == bb->end)
6417 break;
6418 }
6419 }
6420
6421 return count;
6422 }
6423
6424
6425 /* Update insns block within BB. */
6426
6427 void
6428 update_bb_for_insn (bb)
6429 basic_block bb;
6430 {
6431 rtx insn;
6432
6433 if (! basic_block_for_insn)
6434 return;
6435
6436 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6437 {
6438 set_block_for_insn (insn, bb);
6439
6440 if (insn == bb->end)
6441 break;
6442 }
6443 }
6444
6445
6446 /* Record INSN's block as BB. */
6447
6448 void
6449 set_block_for_insn (insn, bb)
6450 rtx insn;
6451 basic_block bb;
6452 {
6453 size_t uid = INSN_UID (insn);
6454 if (uid >= basic_block_for_insn->num_elements)
6455 {
6456 int new_size;
6457
6458 /* Add one-eighth the size so we don't keep calling xrealloc. */
6459 new_size = uid + (uid + 7) / 8;
6460
6461 VARRAY_GROW (basic_block_for_insn, new_size);
6462 }
6463 VARRAY_BB (basic_block_for_insn, uid) = bb;
6464 }
6465
6466 /* Record INSN's block number as BB. */
6467 /* ??? This has got to go. */
6468
6469 void
6470 set_block_num (insn, bb)
6471 rtx insn;
6472 int bb;
6473 {
6474 set_block_for_insn (insn, BASIC_BLOCK (bb));
6475 }
6476 \f
6477 /* Verify the CFG consistency. This function check some CFG invariants and
6478 aborts when something is wrong. Hope that this function will help to
6479 convert many optimization passes to preserve CFG consistent.
6480
6481 Currently it does following checks:
6482
6483 - test head/end pointers
6484 - overlapping of basic blocks
6485 - edge list corectness
6486 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6487 - tails of basic blocks (ensure that boundary is necesary)
6488 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6489 and NOTE_INSN_BASIC_BLOCK
6490 - check that all insns are in the basic blocks
6491 (except the switch handling code, barriers and notes)
6492 - check that all returns are followed by barriers
6493
6494 In future it can be extended check a lot of other stuff as well
6495 (reachability of basic blocks, life information, etc. etc.). */
6496
6497 void
6498 verify_flow_info ()
6499 {
6500 const int max_uid = get_max_uid ();
6501 const rtx rtx_first = get_insns ();
6502 rtx last_head = get_last_insn ();
6503 basic_block *bb_info;
6504 rtx x;
6505 int i, last_bb_num_seen, num_bb_notes, err = 0;
6506
6507 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6508
6509 for (i = n_basic_blocks - 1; i >= 0; i--)
6510 {
6511 basic_block bb = BASIC_BLOCK (i);
6512 rtx head = bb->head;
6513 rtx end = bb->end;
6514
6515 /* Verify the end of the basic block is in the INSN chain. */
6516 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6517 if (x == end)
6518 break;
6519 if (!x)
6520 {
6521 error ("End insn %d for block %d not found in the insn stream.",
6522 INSN_UID (end), bb->index);
6523 err = 1;
6524 }
6525
6526 /* Work backwards from the end to the head of the basic block
6527 to verify the head is in the RTL chain. */
6528 for (; x != NULL_RTX; x = PREV_INSN (x))
6529 {
6530 /* While walking over the insn chain, verify insns appear
6531 in only one basic block and initialize the BB_INFO array
6532 used by other passes. */
6533 if (bb_info[INSN_UID (x)] != NULL)
6534 {
6535 error ("Insn %d is in multiple basic blocks (%d and %d)",
6536 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6537 err = 1;
6538 }
6539 bb_info[INSN_UID (x)] = bb;
6540
6541 if (x == head)
6542 break;
6543 }
6544 if (!x)
6545 {
6546 error ("Head insn %d for block %d not found in the insn stream.",
6547 INSN_UID (head), bb->index);
6548 err = 1;
6549 }
6550
6551 last_head = x;
6552 }
6553
6554 /* Now check the basic blocks (boundaries etc.) */
6555 for (i = n_basic_blocks - 1; i >= 0; i--)
6556 {
6557 basic_block bb = BASIC_BLOCK (i);
6558 /* Check corectness of edge lists */
6559 edge e;
6560
6561 e = bb->succ;
6562 while (e)
6563 {
6564 if (e->src != bb)
6565 {
6566 fprintf (stderr,
6567 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6568 bb->index);
6569 fprintf (stderr, "Predecessor: ");
6570 dump_edge_info (stderr, e, 0);
6571 fprintf (stderr, "\nSuccessor: ");
6572 dump_edge_info (stderr, e, 1);
6573 fflush (stderr);
6574 err = 1;
6575 }
6576 if (e->dest != EXIT_BLOCK_PTR)
6577 {
6578 edge e2 = e->dest->pred;
6579 while (e2 && e2 != e)
6580 e2 = e2->pred_next;
6581 if (!e2)
6582 {
6583 error ("Basic block %i edge lists are corrupted", bb->index);
6584 err = 1;
6585 }
6586 }
6587 e = e->succ_next;
6588 }
6589
6590 e = bb->pred;
6591 while (e)
6592 {
6593 if (e->dest != bb)
6594 {
6595 error ("Basic block %d pred edge is corrupted", bb->index);
6596 fputs ("Predecessor: ", stderr);
6597 dump_edge_info (stderr, e, 0);
6598 fputs ("\nSuccessor: ", stderr);
6599 dump_edge_info (stderr, e, 1);
6600 fputc ('\n', stderr);
6601 err = 1;
6602 }
6603 if (e->src != ENTRY_BLOCK_PTR)
6604 {
6605 edge e2 = e->src->succ;
6606 while (e2 && e2 != e)
6607 e2 = e2->succ_next;
6608 if (!e2)
6609 {
6610 error ("Basic block %i edge lists are corrupted", bb->index);
6611 err = 1;
6612 }
6613 }
6614 e = e->pred_next;
6615 }
6616
6617 /* OK pointers are correct. Now check the header of basic
6618 block. It ought to contain optional CODE_LABEL followed
6619 by NOTE_BASIC_BLOCK. */
6620 x = bb->head;
6621 if (GET_CODE (x) == CODE_LABEL)
6622 {
6623 if (bb->end == x)
6624 {
6625 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6626 bb->index);
6627 err = 1;
6628 }
6629 x = NEXT_INSN (x);
6630 }
6631 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6632 {
6633 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6634 bb->index);
6635 err = 1;
6636 }
6637
6638 if (bb->end == x)
6639 {
6640 /* Do checks for empty blocks here */
6641 }
6642 else
6643 {
6644 x = NEXT_INSN (x);
6645 while (x)
6646 {
6647 if (NOTE_INSN_BASIC_BLOCK_P (x))
6648 {
6649 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6650 INSN_UID (x), bb->index);
6651 err = 1;
6652 }
6653
6654 if (x == bb->end)
6655 break;
6656
6657 if (GET_CODE (x) == JUMP_INSN
6658 || GET_CODE (x) == CODE_LABEL
6659 || GET_CODE (x) == BARRIER)
6660 {
6661 error ("In basic block %d:", bb->index);
6662 fatal_insn ("Flow control insn inside a basic block", x);
6663 }
6664
6665 x = NEXT_INSN (x);
6666 }
6667 }
6668 }
6669
6670 last_bb_num_seen = -1;
6671 num_bb_notes = 0;
6672 x = rtx_first;
6673 while (x)
6674 {
6675 if (NOTE_INSN_BASIC_BLOCK_P (x))
6676 {
6677 basic_block bb = NOTE_BASIC_BLOCK (x);
6678 num_bb_notes++;
6679 if (bb->index != last_bb_num_seen + 1)
6680 fatal ("Basic blocks not numbered consecutively");
6681 last_bb_num_seen = bb->index;
6682 }
6683
6684 if (!bb_info[INSN_UID (x)])
6685 {
6686 switch (GET_CODE (x))
6687 {
6688 case BARRIER:
6689 case NOTE:
6690 break;
6691
6692 case CODE_LABEL:
6693 /* An addr_vec is placed outside any block block. */
6694 if (NEXT_INSN (x)
6695 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6696 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6697 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6698 {
6699 x = NEXT_INSN (x);
6700 }
6701
6702 /* But in any case, non-deletable labels can appear anywhere. */
6703 break;
6704
6705 default:
6706 fatal_insn ("Insn outside basic block", x);
6707 }
6708 }
6709
6710 if (INSN_P (x)
6711 && GET_CODE (x) == JUMP_INSN
6712 && returnjump_p (x) && ! condjump_p (x)
6713 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6714 fatal_insn ("Return not followed by barrier", x);
6715
6716 x = NEXT_INSN (x);
6717 }
6718
6719 if (num_bb_notes != n_basic_blocks)
6720 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6721 num_bb_notes, n_basic_blocks);
6722
6723 if (err)
6724 abort ();
6725
6726 /* Clean up. */
6727 free (bb_info);
6728 }
6729 \f
6730 /* Functions to access an edge list with a vector representation.
6731 Enough data is kept such that given an index number, the
6732 pred and succ that edge represents can be determined, or
6733 given a pred and a succ, its index number can be returned.
6734 This allows algorithms which consume a lot of memory to
6735 represent the normally full matrix of edge (pred,succ) with a
6736 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6737 wasted space in the client code due to sparse flow graphs. */
6738
6739 /* This functions initializes the edge list. Basically the entire
6740 flowgraph is processed, and all edges are assigned a number,
6741 and the data structure is filled in. */
6742
6743 struct edge_list *
6744 create_edge_list ()
6745 {
6746 struct edge_list *elist;
6747 edge e;
6748 int num_edges;
6749 int x;
6750 int block_count;
6751
6752 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6753
6754 num_edges = 0;
6755
6756 /* Determine the number of edges in the flow graph by counting successor
6757 edges on each basic block. */
6758 for (x = 0; x < n_basic_blocks; x++)
6759 {
6760 basic_block bb = BASIC_BLOCK (x);
6761
6762 for (e = bb->succ; e; e = e->succ_next)
6763 num_edges++;
6764 }
6765 /* Don't forget successors of the entry block. */
6766 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6767 num_edges++;
6768
6769 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6770 elist->num_blocks = block_count;
6771 elist->num_edges = num_edges;
6772 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6773
6774 num_edges = 0;
6775
6776 /* Follow successors of the entry block, and register these edges. */
6777 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6778 {
6779 elist->index_to_edge[num_edges] = e;
6780 num_edges++;
6781 }
6782
6783 for (x = 0; x < n_basic_blocks; x++)
6784 {
6785 basic_block bb = BASIC_BLOCK (x);
6786
6787 /* Follow all successors of blocks, and register these edges. */
6788 for (e = bb->succ; e; e = e->succ_next)
6789 {
6790 elist->index_to_edge[num_edges] = e;
6791 num_edges++;
6792 }
6793 }
6794 return elist;
6795 }
6796
6797 /* This function free's memory associated with an edge list. */
6798
6799 void
6800 free_edge_list (elist)
6801 struct edge_list *elist;
6802 {
6803 if (elist)
6804 {
6805 free (elist->index_to_edge);
6806 free (elist);
6807 }
6808 }
6809
6810 /* This function provides debug output showing an edge list. */
6811
6812 void
6813 print_edge_list (f, elist)
6814 FILE *f;
6815 struct edge_list *elist;
6816 {
6817 int x;
6818 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6819 elist->num_blocks - 2, elist->num_edges);
6820
6821 for (x = 0; x < elist->num_edges; x++)
6822 {
6823 fprintf (f, " %-4d - edge(", x);
6824 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6825 fprintf (f, "entry,");
6826 else
6827 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6828
6829 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6830 fprintf (f, "exit)\n");
6831 else
6832 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6833 }
6834 }
6835
6836 /* This function provides an internal consistency check of an edge list,
6837 verifying that all edges are present, and that there are no
6838 extra edges. */
6839
6840 void
6841 verify_edge_list (f, elist)
6842 FILE *f;
6843 struct edge_list *elist;
6844 {
6845 int x, pred, succ, index;
6846 edge e;
6847
6848 for (x = 0; x < n_basic_blocks; x++)
6849 {
6850 basic_block bb = BASIC_BLOCK (x);
6851
6852 for (e = bb->succ; e; e = e->succ_next)
6853 {
6854 pred = e->src->index;
6855 succ = e->dest->index;
6856 index = EDGE_INDEX (elist, e->src, e->dest);
6857 if (index == EDGE_INDEX_NO_EDGE)
6858 {
6859 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6860 continue;
6861 }
6862 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6863 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6864 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6865 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6866 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6867 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6868 }
6869 }
6870 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6871 {
6872 pred = e->src->index;
6873 succ = e->dest->index;
6874 index = EDGE_INDEX (elist, e->src, e->dest);
6875 if (index == EDGE_INDEX_NO_EDGE)
6876 {
6877 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6878 continue;
6879 }
6880 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6881 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6882 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6883 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6884 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6885 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6886 }
6887 /* We've verified that all the edges are in the list, no lets make sure
6888 there are no spurious edges in the list. */
6889
6890 for (pred = 0; pred < n_basic_blocks; pred++)
6891 for (succ = 0; succ < n_basic_blocks; succ++)
6892 {
6893 basic_block p = BASIC_BLOCK (pred);
6894 basic_block s = BASIC_BLOCK (succ);
6895
6896 int found_edge = 0;
6897
6898 for (e = p->succ; e; e = e->succ_next)
6899 if (e->dest == s)
6900 {
6901 found_edge = 1;
6902 break;
6903 }
6904 for (e = s->pred; e; e = e->pred_next)
6905 if (e->src == p)
6906 {
6907 found_edge = 1;
6908 break;
6909 }
6910 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6911 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6912 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6913 pred, succ);
6914 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6915 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6916 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6917 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6918 BASIC_BLOCK (succ)));
6919 }
6920 for (succ = 0; succ < n_basic_blocks; succ++)
6921 {
6922 basic_block p = ENTRY_BLOCK_PTR;
6923 basic_block s = BASIC_BLOCK (succ);
6924
6925 int found_edge = 0;
6926
6927 for (e = p->succ; e; e = e->succ_next)
6928 if (e->dest == s)
6929 {
6930 found_edge = 1;
6931 break;
6932 }
6933 for (e = s->pred; e; e = e->pred_next)
6934 if (e->src == p)
6935 {
6936 found_edge = 1;
6937 break;
6938 }
6939 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6940 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6941 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6942 succ);
6943 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6944 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6945 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6946 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6947 BASIC_BLOCK (succ)));
6948 }
6949 for (pred = 0; pred < n_basic_blocks; pred++)
6950 {
6951 basic_block p = BASIC_BLOCK (pred);
6952 basic_block s = EXIT_BLOCK_PTR;
6953
6954 int found_edge = 0;
6955
6956 for (e = p->succ; e; e = e->succ_next)
6957 if (e->dest == s)
6958 {
6959 found_edge = 1;
6960 break;
6961 }
6962 for (e = s->pred; e; e = e->pred_next)
6963 if (e->src == p)
6964 {
6965 found_edge = 1;
6966 break;
6967 }
6968 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6969 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6970 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6971 pred);
6972 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6973 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6974 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6975 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6976 EXIT_BLOCK_PTR));
6977 }
6978 }
6979
6980 /* This routine will determine what, if any, edge there is between
6981 a specified predecessor and successor. */
6982
6983 int
6984 find_edge_index (edge_list, pred, succ)
6985 struct edge_list *edge_list;
6986 basic_block pred, succ;
6987 {
6988 int x;
6989 for (x = 0; x < NUM_EDGES (edge_list); x++)
6990 {
6991 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6992 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6993 return x;
6994 }
6995 return (EDGE_INDEX_NO_EDGE);
6996 }
6997
6998 /* This function will remove an edge from the flow graph. */
6999
7000 void
7001 remove_edge (e)
7002 edge e;
7003 {
7004 edge last_pred = NULL;
7005 edge last_succ = NULL;
7006 edge tmp;
7007 basic_block src, dest;
7008 src = e->src;
7009 dest = e->dest;
7010 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7011 last_succ = tmp;
7012
7013 if (!tmp)
7014 abort ();
7015 if (last_succ)
7016 last_succ->succ_next = e->succ_next;
7017 else
7018 src->succ = e->succ_next;
7019
7020 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7021 last_pred = tmp;
7022
7023 if (!tmp)
7024 abort ();
7025 if (last_pred)
7026 last_pred->pred_next = e->pred_next;
7027 else
7028 dest->pred = e->pred_next;
7029
7030 n_edges--;
7031 free (e);
7032 }
7033
7034 /* This routine will remove any fake successor edges for a basic block.
7035 When the edge is removed, it is also removed from whatever predecessor
7036 list it is in. */
7037
7038 static void
7039 remove_fake_successors (bb)
7040 basic_block bb;
7041 {
7042 edge e;
7043 for (e = bb->succ; e;)
7044 {
7045 edge tmp = e;
7046 e = e->succ_next;
7047 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7048 remove_edge (tmp);
7049 }
7050 }
7051
7052 /* This routine will remove all fake edges from the flow graph. If
7053 we remove all fake successors, it will automatically remove all
7054 fake predecessors. */
7055
7056 void
7057 remove_fake_edges ()
7058 {
7059 int x;
7060
7061 for (x = 0; x < n_basic_blocks; x++)
7062 remove_fake_successors (BASIC_BLOCK (x));
7063
7064 /* We've handled all successors except the entry block's. */
7065 remove_fake_successors (ENTRY_BLOCK_PTR);
7066 }
7067
7068 /* This function will add a fake edge between any block which has no
7069 successors, and the exit block. Some data flow equations require these
7070 edges to exist. */
7071
7072 void
7073 add_noreturn_fake_exit_edges ()
7074 {
7075 int x;
7076
7077 for (x = 0; x < n_basic_blocks; x++)
7078 if (BASIC_BLOCK (x)->succ == NULL)
7079 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7080 }
7081
7082 /* This function adds a fake edge between any infinite loops to the
7083 exit block. Some optimizations require a path from each node to
7084 the exit node.
7085
7086 See also Morgan, Figure 3.10, pp. 82-83.
7087
7088 The current implementation is ugly, not attempting to minimize the
7089 number of inserted fake edges. To reduce the number of fake edges
7090 to insert, add fake edges from _innermost_ loops containing only
7091 nodes not reachable from the exit block. */
7092
7093 void
7094 connect_infinite_loops_to_exit ()
7095 {
7096 basic_block unvisited_block;
7097
7098 /* Perform depth-first search in the reverse graph to find nodes
7099 reachable from the exit block. */
7100 struct depth_first_search_dsS dfs_ds;
7101
7102 flow_dfs_compute_reverse_init (&dfs_ds);
7103 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7104
7105 /* Repeatedly add fake edges, updating the unreachable nodes. */
7106 while (1)
7107 {
7108 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7109 if (!unvisited_block)
7110 break;
7111 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7112 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7113 }
7114
7115 flow_dfs_compute_reverse_finish (&dfs_ds);
7116
7117 return;
7118 }
7119
7120 /* Redirect an edge's successor from one block to another. */
7121
7122 void
7123 redirect_edge_succ (e, new_succ)
7124 edge e;
7125 basic_block new_succ;
7126 {
7127 edge *pe;
7128
7129 /* Disconnect the edge from the old successor block. */
7130 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7131 continue;
7132 *pe = (*pe)->pred_next;
7133
7134 /* Reconnect the edge to the new successor block. */
7135 e->pred_next = new_succ->pred;
7136 new_succ->pred = e;
7137 e->dest = new_succ;
7138 }
7139
7140 /* Redirect an edge's predecessor from one block to another. */
7141
7142 void
7143 redirect_edge_pred (e, new_pred)
7144 edge e;
7145 basic_block new_pred;
7146 {
7147 edge *pe;
7148
7149 /* Disconnect the edge from the old predecessor block. */
7150 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7151 continue;
7152 *pe = (*pe)->succ_next;
7153
7154 /* Reconnect the edge to the new predecessor block. */
7155 e->succ_next = new_pred->succ;
7156 new_pred->succ = e;
7157 e->src = new_pred;
7158 }
7159 \f
7160 /* Dump the list of basic blocks in the bitmap NODES. */
7161
7162 static void
7163 flow_nodes_print (str, nodes, file)
7164 const char *str;
7165 const sbitmap nodes;
7166 FILE *file;
7167 {
7168 int node;
7169
7170 if (! nodes)
7171 return;
7172
7173 fprintf (file, "%s { ", str);
7174 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7175 fputs ("}\n", file);
7176 }
7177
7178
7179 /* Dump the list of edges in the array EDGE_LIST. */
7180
7181 static void
7182 flow_edge_list_print (str, edge_list, num_edges, file)
7183 const char *str;
7184 const edge *edge_list;
7185 int num_edges;
7186 FILE *file;
7187 {
7188 int i;
7189
7190 if (! edge_list)
7191 return;
7192
7193 fprintf (file, "%s { ", str);
7194 for (i = 0; i < num_edges; i++)
7195 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7196 edge_list[i]->dest->index);
7197 fputs ("}\n", file);
7198 }
7199
7200
7201 /* Dump loop related CFG information. */
7202
7203 static void
7204 flow_loops_cfg_dump (loops, file)
7205 const struct loops *loops;
7206 FILE *file;
7207 {
7208 int i;
7209
7210 if (! loops->num || ! file || ! loops->cfg.dom)
7211 return;
7212
7213 for (i = 0; i < n_basic_blocks; i++)
7214 {
7215 edge succ;
7216
7217 fprintf (file, ";; %d succs { ", i);
7218 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7219 fprintf (file, "%d ", succ->dest->index);
7220 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7221 }
7222
7223 /* Dump the DFS node order. */
7224 if (loops->cfg.dfs_order)
7225 {
7226 fputs (";; DFS order: ", file);
7227 for (i = 0; i < n_basic_blocks; i++)
7228 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7229 fputs ("\n", file);
7230 }
7231 /* Dump the reverse completion node order. */
7232 if (loops->cfg.rc_order)
7233 {
7234 fputs (";; RC order: ", file);
7235 for (i = 0; i < n_basic_blocks; i++)
7236 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7237 fputs ("\n", file);
7238 }
7239 }
7240
7241 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7242
7243 static int
7244 flow_loop_nested_p (outer, loop)
7245 struct loop *outer;
7246 struct loop *loop;
7247 {
7248 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7249 }
7250
7251
7252 /* Dump the loop information specified by LOOP to the stream FILE
7253 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7254 void
7255 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7256 const struct loop *loop;
7257 FILE *file;
7258 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7259 int verbose;
7260 {
7261 if (! loop || ! loop->header)
7262 return;
7263
7264 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7265 loop->num, INSN_UID (loop->first->head),
7266 INSN_UID (loop->last->end),
7267 loop->shared ? " shared" : "",
7268 loop->invalid ? " invalid" : "");
7269 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7270 loop->header->index, loop->latch->index,
7271 loop->pre_header ? loop->pre_header->index : -1,
7272 loop->first->index, loop->last->index);
7273 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7274 loop->depth, loop->level,
7275 (long) (loop->outer ? loop->outer->num : -1));
7276
7277 if (loop->pre_header_edges)
7278 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7279 loop->num_pre_header_edges, file);
7280 flow_edge_list_print (";; entry edges", loop->entry_edges,
7281 loop->num_entries, file);
7282 fprintf (file, ";; %d", loop->num_nodes);
7283 flow_nodes_print (" nodes", loop->nodes, file);
7284 flow_edge_list_print (";; exit edges", loop->exit_edges,
7285 loop->num_exits, file);
7286 if (loop->exits_doms)
7287 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7288 if (loop_dump_aux)
7289 loop_dump_aux (loop, file, verbose);
7290 }
7291
7292
7293 /* Dump the loop information specified by LOOPS to the stream FILE,
7294 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7295 void
7296 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7297 const struct loops *loops;
7298 FILE *file;
7299 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7300 int verbose;
7301 {
7302 int i;
7303 int num_loops;
7304
7305 num_loops = loops->num;
7306 if (! num_loops || ! file)
7307 return;
7308
7309 fprintf (file, ";; %d loops found, %d levels\n",
7310 num_loops, loops->levels);
7311
7312 for (i = 0; i < num_loops; i++)
7313 {
7314 struct loop *loop = &loops->array[i];
7315
7316 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7317
7318 if (loop->shared)
7319 {
7320 int j;
7321
7322 for (j = 0; j < i; j++)
7323 {
7324 struct loop *oloop = &loops->array[j];
7325
7326 if (loop->header == oloop->header)
7327 {
7328 int disjoint;
7329 int smaller;
7330
7331 smaller = loop->num_nodes < oloop->num_nodes;
7332
7333 /* If the union of LOOP and OLOOP is different than
7334 the larger of LOOP and OLOOP then LOOP and OLOOP
7335 must be disjoint. */
7336 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7337 smaller ? oloop : loop);
7338 fprintf (file,
7339 ";; loop header %d shared by loops %d, %d %s\n",
7340 loop->header->index, i, j,
7341 disjoint ? "disjoint" : "nested");
7342 }
7343 }
7344 }
7345 }
7346
7347 if (verbose)
7348 flow_loops_cfg_dump (loops, file);
7349 }
7350
7351
7352 /* Free all the memory allocated for LOOPS. */
7353
7354 void
7355 flow_loops_free (loops)
7356 struct loops *loops;
7357 {
7358 if (loops->array)
7359 {
7360 int i;
7361
7362 if (! loops->num)
7363 abort ();
7364
7365 /* Free the loop descriptors. */
7366 for (i = 0; i < loops->num; i++)
7367 {
7368 struct loop *loop = &loops->array[i];
7369
7370 if (loop->pre_header_edges)
7371 free (loop->pre_header_edges);
7372 if (loop->nodes)
7373 sbitmap_free (loop->nodes);
7374 if (loop->entry_edges)
7375 free (loop->entry_edges);
7376 if (loop->exit_edges)
7377 free (loop->exit_edges);
7378 if (loop->exits_doms)
7379 sbitmap_free (loop->exits_doms);
7380 }
7381 free (loops->array);
7382 loops->array = NULL;
7383
7384 if (loops->cfg.dom)
7385 sbitmap_vector_free (loops->cfg.dom);
7386 if (loops->cfg.dfs_order)
7387 free (loops->cfg.dfs_order);
7388
7389 if (loops->shared_headers)
7390 sbitmap_free (loops->shared_headers);
7391 }
7392 }
7393
7394
7395 /* Find the entry edges into the loop with header HEADER and nodes
7396 NODES and store in ENTRY_EDGES array. Return the number of entry
7397 edges from the loop. */
7398
7399 static int
7400 flow_loop_entry_edges_find (header, nodes, entry_edges)
7401 basic_block header;
7402 const sbitmap nodes;
7403 edge **entry_edges;
7404 {
7405 edge e;
7406 int num_entries;
7407
7408 *entry_edges = NULL;
7409
7410 num_entries = 0;
7411 for (e = header->pred; e; e = e->pred_next)
7412 {
7413 basic_block src = e->src;
7414
7415 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7416 num_entries++;
7417 }
7418
7419 if (! num_entries)
7420 abort ();
7421
7422 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7423
7424 num_entries = 0;
7425 for (e = header->pred; e; e = e->pred_next)
7426 {
7427 basic_block src = e->src;
7428
7429 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7430 (*entry_edges)[num_entries++] = e;
7431 }
7432
7433 return num_entries;
7434 }
7435
7436
7437 /* Find the exit edges from the loop using the bitmap of loop nodes
7438 NODES and store in EXIT_EDGES array. Return the number of
7439 exit edges from the loop. */
7440
7441 static int
7442 flow_loop_exit_edges_find (nodes, exit_edges)
7443 const sbitmap nodes;
7444 edge **exit_edges;
7445 {
7446 edge e;
7447 int node;
7448 int num_exits;
7449
7450 *exit_edges = NULL;
7451
7452 /* Check all nodes within the loop to see if there are any
7453 successors not in the loop. Note that a node may have multiple
7454 exiting edges ????? A node can have one jumping edge and one fallthru
7455 edge so only one of these can exit the loop. */
7456 num_exits = 0;
7457 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7458 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7459 {
7460 basic_block dest = e->dest;
7461
7462 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7463 num_exits++;
7464 }
7465 });
7466
7467 if (! num_exits)
7468 return 0;
7469
7470 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7471
7472 /* Store all exiting edges into an array. */
7473 num_exits = 0;
7474 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7475 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7476 {
7477 basic_block dest = e->dest;
7478
7479 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7480 (*exit_edges)[num_exits++] = e;
7481 }
7482 });
7483
7484 return num_exits;
7485 }
7486
7487
7488 /* Find the nodes contained within the loop with header HEADER and
7489 latch LATCH and store in NODES. Return the number of nodes within
7490 the loop. */
7491
7492 static int
7493 flow_loop_nodes_find (header, latch, nodes)
7494 basic_block header;
7495 basic_block latch;
7496 sbitmap nodes;
7497 {
7498 basic_block *stack;
7499 int sp;
7500 int num_nodes = 0;
7501
7502 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7503 sp = 0;
7504
7505 /* Start with only the loop header in the set of loop nodes. */
7506 sbitmap_zero (nodes);
7507 SET_BIT (nodes, header->index);
7508 num_nodes++;
7509 header->loop_depth++;
7510
7511 /* Push the loop latch on to the stack. */
7512 if (! TEST_BIT (nodes, latch->index))
7513 {
7514 SET_BIT (nodes, latch->index);
7515 latch->loop_depth++;
7516 num_nodes++;
7517 stack[sp++] = latch;
7518 }
7519
7520 while (sp)
7521 {
7522 basic_block node;
7523 edge e;
7524
7525 node = stack[--sp];
7526 for (e = node->pred; e; e = e->pred_next)
7527 {
7528 basic_block ancestor = e->src;
7529
7530 /* If each ancestor not marked as part of loop, add to set of
7531 loop nodes and push on to stack. */
7532 if (ancestor != ENTRY_BLOCK_PTR
7533 && ! TEST_BIT (nodes, ancestor->index))
7534 {
7535 SET_BIT (nodes, ancestor->index);
7536 ancestor->loop_depth++;
7537 num_nodes++;
7538 stack[sp++] = ancestor;
7539 }
7540 }
7541 }
7542 free (stack);
7543 return num_nodes;
7544 }
7545
7546 /* Compute the depth first search order and store in the array
7547 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7548 RC_ORDER is non-zero, return the reverse completion number for each
7549 node. Returns the number of nodes visited. A depth first search
7550 tries to get as far away from the starting point as quickly as
7551 possible. */
7552
7553 static int
7554 flow_depth_first_order_compute (dfs_order, rc_order)
7555 int *dfs_order;
7556 int *rc_order;
7557 {
7558 edge *stack;
7559 int sp;
7560 int dfsnum = 0;
7561 int rcnum = n_basic_blocks - 1;
7562 sbitmap visited;
7563
7564 /* Allocate stack for back-tracking up CFG. */
7565 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7566 sp = 0;
7567
7568 /* Allocate bitmap to track nodes that have been visited. */
7569 visited = sbitmap_alloc (n_basic_blocks);
7570
7571 /* None of the nodes in the CFG have been visited yet. */
7572 sbitmap_zero (visited);
7573
7574 /* Push the first edge on to the stack. */
7575 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7576
7577 while (sp)
7578 {
7579 edge e;
7580 basic_block src;
7581 basic_block dest;
7582
7583 /* Look at the edge on the top of the stack. */
7584 e = stack[sp - 1];
7585 src = e->src;
7586 dest = e->dest;
7587
7588 /* Check if the edge destination has been visited yet. */
7589 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7590 {
7591 /* Mark that we have visited the destination. */
7592 SET_BIT (visited, dest->index);
7593
7594 if (dfs_order)
7595 dfs_order[dfsnum++] = dest->index;
7596
7597 if (dest->succ)
7598 {
7599 /* Since the DEST node has been visited for the first
7600 time, check its successors. */
7601 stack[sp++] = dest->succ;
7602 }
7603 else
7604 {
7605 /* There are no successors for the DEST node so assign
7606 its reverse completion number. */
7607 if (rc_order)
7608 rc_order[rcnum--] = dest->index;
7609 }
7610 }
7611 else
7612 {
7613 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7614 {
7615 /* There are no more successors for the SRC node
7616 so assign its reverse completion number. */
7617 if (rc_order)
7618 rc_order[rcnum--] = src->index;
7619 }
7620
7621 if (e->succ_next)
7622 stack[sp - 1] = e->succ_next;
7623 else
7624 sp--;
7625 }
7626 }
7627
7628 free (stack);
7629 sbitmap_free (visited);
7630
7631 /* The number of nodes visited should not be greater than
7632 n_basic_blocks. */
7633 if (dfsnum > n_basic_blocks)
7634 abort ();
7635
7636 /* There are some nodes left in the CFG that are unreachable. */
7637 if (dfsnum < n_basic_blocks)
7638 abort ();
7639 return dfsnum;
7640 }
7641
7642 /* Compute the depth first search order on the _reverse_ graph and
7643 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7644 Returns the number of nodes visited.
7645
7646 The computation is split into three pieces:
7647
7648 flow_dfs_compute_reverse_init () creates the necessary data
7649 structures.
7650
7651 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7652 structures. The block will start the search.
7653
7654 flow_dfs_compute_reverse_execute () continues (or starts) the
7655 search using the block on the top of the stack, stopping when the
7656 stack is empty.
7657
7658 flow_dfs_compute_reverse_finish () destroys the necessary data
7659 structures.
7660
7661 Thus, the user will probably call ..._init(), call ..._add_bb() to
7662 add a beginning basic block to the stack, call ..._execute(),
7663 possibly add another bb to the stack and again call ..._execute(),
7664 ..., and finally call _finish(). */
7665
7666 /* Initialize the data structures used for depth-first search on the
7667 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7668 added to the basic block stack. DATA is the current depth-first
7669 search context. If INITIALIZE_STACK is non-zero, there is an
7670 element on the stack. */
7671
7672 static void
7673 flow_dfs_compute_reverse_init (data)
7674 depth_first_search_ds data;
7675 {
7676 /* Allocate stack for back-tracking up CFG. */
7677 data->stack =
7678 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7679 * sizeof (basic_block));
7680 data->sp = 0;
7681
7682 /* Allocate bitmap to track nodes that have been visited. */
7683 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7684
7685 /* None of the nodes in the CFG have been visited yet. */
7686 sbitmap_zero (data->visited_blocks);
7687
7688 return;
7689 }
7690
7691 /* Add the specified basic block to the top of the dfs data
7692 structures. When the search continues, it will start at the
7693 block. */
7694
7695 static void
7696 flow_dfs_compute_reverse_add_bb (data, bb)
7697 depth_first_search_ds data;
7698 basic_block bb;
7699 {
7700 data->stack[data->sp++] = bb;
7701 return;
7702 }
7703
7704 /* Continue the depth-first search through the reverse graph starting
7705 with the block at the stack's top and ending when the stack is
7706 empty. Visited nodes are marked. Returns an unvisited basic
7707 block, or NULL if there is none available. */
7708
7709 static basic_block
7710 flow_dfs_compute_reverse_execute (data)
7711 depth_first_search_ds data;
7712 {
7713 basic_block bb;
7714 edge e;
7715 int i;
7716
7717 while (data->sp > 0)
7718 {
7719 bb = data->stack[--data->sp];
7720
7721 /* Mark that we have visited this node. */
7722 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7723 {
7724 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7725
7726 /* Perform depth-first search on adjacent vertices. */
7727 for (e = bb->pred; e; e = e->pred_next)
7728 flow_dfs_compute_reverse_add_bb (data, e->src);
7729 }
7730 }
7731
7732 /* Determine if there are unvisited basic blocks. */
7733 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7734 if (!TEST_BIT (data->visited_blocks, i))
7735 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7736 return NULL;
7737 }
7738
7739 /* Destroy the data structures needed for depth-first search on the
7740 reverse graph. */
7741
7742 static void
7743 flow_dfs_compute_reverse_finish (data)
7744 depth_first_search_ds data;
7745 {
7746 free (data->stack);
7747 sbitmap_free (data->visited_blocks);
7748 return;
7749 }
7750
7751
7752 /* Find the root node of the loop pre-header extended basic block and
7753 the edges along the trace from the root node to the loop header. */
7754
7755 static void
7756 flow_loop_pre_header_scan (loop)
7757 struct loop *loop;
7758 {
7759 int num = 0;
7760 basic_block ebb;
7761
7762 loop->num_pre_header_edges = 0;
7763
7764 if (loop->num_entries != 1)
7765 return;
7766
7767 ebb = loop->entry_edges[0]->src;
7768
7769 if (ebb != ENTRY_BLOCK_PTR)
7770 {
7771 edge e;
7772
7773 /* Count number of edges along trace from loop header to
7774 root of pre-header extended basic block. Usually this is
7775 only one or two edges. */
7776 num++;
7777 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
7778 {
7779 ebb = ebb->pred->src;
7780 num++;
7781 }
7782
7783 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
7784 loop->num_pre_header_edges = num;
7785
7786 /* Store edges in order that they are followed. The source
7787 of the first edge is the root node of the pre-header extended
7788 basic block and the destination of the last last edge is
7789 the loop header. */
7790 for (e = loop->entry_edges[0]; num; e = e->src->pred)
7791 {
7792 loop->pre_header_edges[--num] = e;
7793 }
7794 }
7795 }
7796
7797
7798 /* Return the block for the pre-header of the loop with header
7799 HEADER where DOM specifies the dominator information. Return NULL if
7800 there is no pre-header. */
7801
7802 static basic_block
7803 flow_loop_pre_header_find (header, dom)
7804 basic_block header;
7805 const sbitmap *dom;
7806 {
7807 basic_block pre_header;
7808 edge e;
7809
7810 /* If block p is a predecessor of the header and is the only block
7811 that the header does not dominate, then it is the pre-header. */
7812 pre_header = NULL;
7813 for (e = header->pred; e; e = e->pred_next)
7814 {
7815 basic_block node = e->src;
7816
7817 if (node != ENTRY_BLOCK_PTR
7818 && ! TEST_BIT (dom[node->index], header->index))
7819 {
7820 if (pre_header == NULL)
7821 pre_header = node;
7822 else
7823 {
7824 /* There are multiple edges into the header from outside
7825 the loop so there is no pre-header block. */
7826 pre_header = NULL;
7827 break;
7828 }
7829 }
7830 }
7831 return pre_header;
7832 }
7833
7834 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7835 previously added. The insertion algorithm assumes that the loops
7836 are added in the order found by a depth first search of the CFG. */
7837
7838 static void
7839 flow_loop_tree_node_add (prevloop, loop)
7840 struct loop *prevloop;
7841 struct loop *loop;
7842 {
7843
7844 if (flow_loop_nested_p (prevloop, loop))
7845 {
7846 prevloop->inner = loop;
7847 loop->outer = prevloop;
7848 return;
7849 }
7850
7851 while (prevloop->outer)
7852 {
7853 if (flow_loop_nested_p (prevloop->outer, loop))
7854 {
7855 prevloop->next = loop;
7856 loop->outer = prevloop->outer;
7857 return;
7858 }
7859 prevloop = prevloop->outer;
7860 }
7861
7862 prevloop->next = loop;
7863 loop->outer = NULL;
7864 }
7865
7866 /* Build the loop hierarchy tree for LOOPS. */
7867
7868 static void
7869 flow_loops_tree_build (loops)
7870 struct loops *loops;
7871 {
7872 int i;
7873 int num_loops;
7874
7875 num_loops = loops->num;
7876 if (! num_loops)
7877 return;
7878
7879 /* Root the loop hierarchy tree with the first loop found.
7880 Since we used a depth first search this should be the
7881 outermost loop. */
7882 loops->tree = &loops->array[0];
7883 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7884
7885 /* Add the remaining loops to the tree. */
7886 for (i = 1; i < num_loops; i++)
7887 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7888 }
7889
7890 /* Helper function to compute loop nesting depth and enclosed loop level
7891 for the natural loop specified by LOOP at the loop depth DEPTH.
7892 Returns the loop level. */
7893
7894 static int
7895 flow_loop_level_compute (loop, depth)
7896 struct loop *loop;
7897 int depth;
7898 {
7899 struct loop *inner;
7900 int level = 1;
7901
7902 if (! loop)
7903 return 0;
7904
7905 /* Traverse loop tree assigning depth and computing level as the
7906 maximum level of all the inner loops of this loop. The loop
7907 level is equivalent to the height of the loop in the loop tree
7908 and corresponds to the number of enclosed loop levels (including
7909 itself). */
7910 for (inner = loop->inner; inner; inner = inner->next)
7911 {
7912 int ilevel;
7913
7914 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7915
7916 if (ilevel > level)
7917 level = ilevel;
7918 }
7919 loop->level = level;
7920 loop->depth = depth;
7921 return level;
7922 }
7923
7924 /* Compute the loop nesting depth and enclosed loop level for the loop
7925 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7926 level. */
7927
7928 static int
7929 flow_loops_level_compute (loops)
7930 struct loops *loops;
7931 {
7932 struct loop *loop;
7933 int level;
7934 int levels = 0;
7935
7936 /* Traverse all the outer level loops. */
7937 for (loop = loops->tree; loop; loop = loop->next)
7938 {
7939 level = flow_loop_level_compute (loop, 1);
7940 if (level > levels)
7941 levels = level;
7942 }
7943 return levels;
7944 }
7945
7946
7947 /* Find all the natural loops in the function and save in LOOPS structure
7948 and recalculate loop_depth information in basic block structures.
7949 FLAGS controls which loop information is collected.
7950 Return the number of natural loops found. */
7951
7952 int
7953 flow_loops_find (loops, flags)
7954 struct loops *loops;
7955 int flags;
7956 {
7957 int i;
7958 int b;
7959 int num_loops;
7960 edge e;
7961 sbitmap headers;
7962 sbitmap *dom;
7963 int *dfs_order;
7964 int *rc_order;
7965
7966 /* This function cannot be repeatedly called with different
7967 flags to build up the loop information. The loop tree
7968 must always be built if this function is called. */
7969 if (! (flags & LOOP_TREE))
7970 abort ();
7971
7972 memset (loops, 0, sizeof (*loops));
7973
7974 /* Taking care of this degenerate case makes the rest of
7975 this code simpler. */
7976 if (n_basic_blocks == 0)
7977 return 0;
7978
7979 dfs_order = NULL;
7980 rc_order = NULL;
7981
7982 /* Compute the dominators. */
7983 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7984 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
7985
7986 /* Count the number of loop edges (back edges). This should be the
7987 same as the number of natural loops. */
7988
7989 num_loops = 0;
7990 for (b = 0; b < n_basic_blocks; b++)
7991 {
7992 basic_block header;
7993
7994 header = BASIC_BLOCK (b);
7995 header->loop_depth = 0;
7996
7997 for (e = header->pred; e; e = e->pred_next)
7998 {
7999 basic_block latch = e->src;
8000
8001 /* Look for back edges where a predecessor is dominated
8002 by this block. A natural loop has a single entry
8003 node (header) that dominates all the nodes in the
8004 loop. It also has single back edge to the header
8005 from a latch node. Note that multiple natural loops
8006 may share the same header. */
8007 if (b != header->index)
8008 abort ();
8009
8010 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8011 num_loops++;
8012 }
8013 }
8014
8015 if (num_loops)
8016 {
8017 /* Compute depth first search order of the CFG so that outer
8018 natural loops will be found before inner natural loops. */
8019 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8020 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8021 flow_depth_first_order_compute (dfs_order, rc_order);
8022
8023 /* Allocate loop structures. */
8024 loops->array
8025 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8026
8027 headers = sbitmap_alloc (n_basic_blocks);
8028 sbitmap_zero (headers);
8029
8030 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8031 sbitmap_zero (loops->shared_headers);
8032
8033 /* Find and record information about all the natural loops
8034 in the CFG. */
8035 num_loops = 0;
8036 for (b = 0; b < n_basic_blocks; b++)
8037 {
8038 basic_block header;
8039
8040 /* Search the nodes of the CFG in reverse completion order
8041 so that we can find outer loops first. */
8042 header = BASIC_BLOCK (rc_order[b]);
8043
8044 /* Look for all the possible latch blocks for this header. */
8045 for (e = header->pred; e; e = e->pred_next)
8046 {
8047 basic_block latch = e->src;
8048
8049 /* Look for back edges where a predecessor is dominated
8050 by this block. A natural loop has a single entry
8051 node (header) that dominates all the nodes in the
8052 loop. It also has single back edge to the header
8053 from a latch node. Note that multiple natural loops
8054 may share the same header. */
8055 if (latch != ENTRY_BLOCK_PTR
8056 && TEST_BIT (dom[latch->index], header->index))
8057 {
8058 struct loop *loop;
8059
8060 loop = loops->array + num_loops;
8061
8062 loop->header = header;
8063 loop->latch = latch;
8064 loop->num = num_loops;
8065
8066 num_loops++;
8067 }
8068 }
8069 }
8070
8071 for (i = 0; i < num_loops; i++)
8072 {
8073 struct loop *loop = &loops->array[i];
8074 int j;
8075
8076 /* Keep track of blocks that are loop headers so
8077 that we can tell which loops should be merged. */
8078 if (TEST_BIT (headers, loop->header->index))
8079 SET_BIT (loops->shared_headers, loop->header->index);
8080 SET_BIT (headers, loop->header->index);
8081
8082 /* Find nodes contained within the loop. */
8083 loop->nodes = sbitmap_alloc (n_basic_blocks);
8084 loop->num_nodes
8085 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8086
8087 /* Compute first and last blocks within the loop.
8088 These are often the same as the loop header and
8089 loop latch respectively, but this is not always
8090 the case. */
8091 loop->first
8092 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8093 loop->last
8094 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8095
8096 if (flags & LOOP_EDGES)
8097 {
8098 /* Find edges which enter the loop header.
8099 Note that the entry edges should only
8100 enter the header of a natural loop. */
8101 loop->num_entries
8102 = flow_loop_entry_edges_find (loop->header,
8103 loop->nodes,
8104 &loop->entry_edges);
8105
8106 /* Find edges which exit the loop. */
8107 loop->num_exits
8108 = flow_loop_exit_edges_find (loop->nodes,
8109 &loop->exit_edges);
8110
8111 /* Determine which loop nodes dominate all the exits
8112 of the loop. */
8113 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8114 sbitmap_copy (loop->exits_doms, loop->nodes);
8115 for (j = 0; j < loop->num_exits; j++)
8116 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8117 dom[loop->exit_edges[j]->src->index]);
8118
8119 /* The header of a natural loop must dominate
8120 all exits. */
8121 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8122 abort ();
8123 }
8124
8125 if (flags & LOOP_PRE_HEADER)
8126 {
8127 /* Look to see if the loop has a pre-header node. */
8128 loop->pre_header
8129 = flow_loop_pre_header_find (loop->header, dom);
8130
8131 flow_loop_pre_header_scan (loop);
8132 }
8133 }
8134
8135 /* Natural loops with shared headers may either be disjoint or
8136 nested. Disjoint loops with shared headers cannot be inner
8137 loops and should be merged. For now just mark loops that share
8138 headers. */
8139 for (i = 0; i < num_loops; i++)
8140 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8141 loops->array[i].shared = 1;
8142
8143 sbitmap_free (headers);
8144 }
8145
8146 loops->num = num_loops;
8147
8148 /* Save CFG derived information to avoid recomputing it. */
8149 loops->cfg.dom = dom;
8150 loops->cfg.dfs_order = dfs_order;
8151 loops->cfg.rc_order = rc_order;
8152
8153 /* Build the loop hierarchy tree. */
8154 flow_loops_tree_build (loops);
8155
8156 /* Assign the loop nesting depth and enclosed loop level for each
8157 loop. */
8158 loops->levels = flow_loops_level_compute (loops);
8159
8160 return num_loops;
8161 }
8162
8163
8164 /* Update the information regarding the loops in the CFG
8165 specified by LOOPS. */
8166 int
8167 flow_loops_update (loops, flags)
8168 struct loops *loops;
8169 int flags;
8170 {
8171 /* One day we may want to update the current loop data. For now
8172 throw away the old stuff and rebuild what we need. */
8173 if (loops->array)
8174 flow_loops_free (loops);
8175
8176 return flow_loops_find (loops, flags);
8177 }
8178
8179
8180 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8181
8182 int
8183 flow_loop_outside_edge_p (loop, e)
8184 const struct loop *loop;
8185 edge e;
8186 {
8187 if (e->dest != loop->header)
8188 abort ();
8189 return (e->src == ENTRY_BLOCK_PTR)
8190 || ! TEST_BIT (loop->nodes, e->src->index);
8191 }
8192
8193 /* Clear LOG_LINKS fields of insns in a chain.
8194 Also clear the global_live_at_{start,end} fields of the basic block
8195 structures. */
8196
8197 void
8198 clear_log_links (insns)
8199 rtx insns;
8200 {
8201 rtx i;
8202 int b;
8203
8204 for (i = insns; i; i = NEXT_INSN (i))
8205 if (INSN_P (i))
8206 LOG_LINKS (i) = 0;
8207
8208 for (b = 0; b < n_basic_blocks; b++)
8209 {
8210 basic_block bb = BASIC_BLOCK (b);
8211
8212 bb->global_live_at_start = NULL;
8213 bb->global_live_at_end = NULL;
8214 }
8215
8216 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8217 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8218 }
8219
8220 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8221 correspond to the hard registers, if any, set in that map. This
8222 could be done far more efficiently by having all sorts of special-cases
8223 with moving single words, but probably isn't worth the trouble. */
8224
8225 void
8226 reg_set_to_hard_reg_set (to, from)
8227 HARD_REG_SET *to;
8228 bitmap from;
8229 {
8230 int i;
8231
8232 EXECUTE_IF_SET_IN_BITMAP
8233 (from, 0, i,
8234 {
8235 if (i >= FIRST_PSEUDO_REGISTER)
8236 return;
8237 SET_HARD_REG_BIT (*to, i);
8238 });
8239 }
8240
8241 /* Called once at intialization time. */
8242
8243 void
8244 init_flow ()
8245 {
8246 static int initialized;
8247
8248 if (!initialized)
8249 {
8250 gcc_obstack_init (&flow_obstack);
8251 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8252 initialized = 1;
8253 }
8254 else
8255 {
8256 obstack_free (&flow_obstack, flow_firstobj);
8257 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8258 }
8259 }