cp-tree.h (lang_decl_flags): Rename defined_in_class to initialized_in_class.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 /* Not in basic-block.h, since it is private to this file. When set, it
172 causes us to keep REG_N_SETS uptodate for original pseudo registers. */
173 #define PROP_POSTRELOAD 64
174
175 /* The obstack on which the flow graph components are allocated. */
176
177 struct obstack flow_obstack;
178 static char *flow_firstobj;
179
180 /* Number of basic blocks in the current function. */
181
182 int n_basic_blocks;
183
184 /* Number of edges in the current function. */
185
186 int n_edges;
187
188 /* The basic block array. */
189
190 varray_type basic_block_info;
191
192 /* The special entry and exit blocks. */
193
194 struct basic_block_def entry_exit_blocks[2]
195 = {{NULL, /* head */
196 NULL, /* end */
197 NULL, /* pred */
198 NULL, /* succ */
199 NULL, /* local_set */
200 NULL, /* cond_local_set */
201 NULL, /* global_live_at_start */
202 NULL, /* global_live_at_end */
203 NULL, /* aux */
204 ENTRY_BLOCK, /* index */
205 0, /* loop_depth */
206 -1, -1, /* eh_beg, eh_end */
207 0 /* count */
208 },
209 {
210 NULL, /* head */
211 NULL, /* end */
212 NULL, /* pred */
213 NULL, /* succ */
214 NULL, /* local_set */
215 NULL, /* cond_local_set */
216 NULL, /* global_live_at_start */
217 NULL, /* global_live_at_end */
218 NULL, /* aux */
219 EXIT_BLOCK, /* index */
220 0, /* loop_depth */
221 -1, -1, /* eh_beg, eh_end */
222 0 /* count */
223 }
224 };
225
226 /* Nonzero if the second flow pass has completed. */
227 int flow2_completed;
228
229 /* Maximum register number used in this function, plus one. */
230
231 int max_regno;
232
233 /* Indexed by n, giving various register information */
234
235 varray_type reg_n_info;
236
237 /* Size of a regset for the current function,
238 in (1) bytes and (2) elements. */
239
240 int regset_bytes;
241 int regset_size;
242
243 /* Regset of regs live when calls to `setjmp'-like functions happen. */
244 /* ??? Does this exist only for the setjmp-clobbered warning message? */
245
246 regset regs_live_at_setjmp;
247
248 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
249 that have to go in the same hard reg.
250 The first two regs in the list are a pair, and the next two
251 are another pair, etc. */
252 rtx regs_may_share;
253
254 /* Callback that determines if it's ok for a function to have no
255 noreturn attribute. */
256 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
257
258 /* Set of registers that may be eliminable. These are handled specially
259 in updating regs_ever_live. */
260
261 static HARD_REG_SET elim_reg_set;
262
263 /* The basic block structure for every insn, indexed by uid. */
264
265 varray_type basic_block_for_insn;
266
267 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
268 /* ??? Should probably be using LABEL_NUSES instead. It would take a
269 bit of surgery to be able to use or co-opt the routines in jump. */
270
271 static rtx label_value_list;
272 static rtx tail_recursion_label_list;
273
274 /* Holds information for tracking conditional register life information. */
275 struct reg_cond_life_info
276 {
277 /* An EXPR_LIST of conditions under which a register is dead. */
278 rtx condition;
279
280 /* ??? Could store mask of bytes that are dead, so that we could finally
281 track lifetimes of multi-word registers accessed via subregs. */
282 };
283
284 /* For use in communicating between propagate_block and its subroutines.
285 Holds all information needed to compute life and def-use information. */
286
287 struct propagate_block_info
288 {
289 /* The basic block we're considering. */
290 basic_block bb;
291
292 /* Bit N is set if register N is conditionally or unconditionally live. */
293 regset reg_live;
294
295 /* Bit N is set if register N is set this insn. */
296 regset new_set;
297
298 /* Element N is the next insn that uses (hard or pseudo) register N
299 within the current basic block; or zero, if there is no such insn. */
300 rtx *reg_next_use;
301
302 /* Contains a list of all the MEMs we are tracking for dead store
303 elimination. */
304 rtx mem_set_list;
305
306 /* If non-null, record the set of registers set unconditionally in the
307 basic block. */
308 regset local_set;
309
310 /* If non-null, record the set of registers set conditionally in the
311 basic block. */
312 regset cond_local_set;
313
314 #ifdef HAVE_conditional_execution
315 /* Indexed by register number, holds a reg_cond_life_info for each
316 register that is not unconditionally live or dead. */
317 splay_tree reg_cond_dead;
318
319 /* Bit N is set if register N is in an expression in reg_cond_dead. */
320 regset reg_cond_reg;
321 #endif
322
323 /* Non-zero if the value of CC0 is live. */
324 int cc0_live;
325
326 /* Flags controling the set of information propagate_block collects. */
327 int flags;
328 };
329
330 /* Store the data structures necessary for depth-first search. */
331 struct depth_first_search_dsS {
332 /* stack for backtracking during the algorithm */
333 basic_block *stack;
334
335 /* number of edges in the stack. That is, positions 0, ..., sp-1
336 have edges. */
337 unsigned int sp;
338
339 /* record of basic blocks already seen by depth-first search */
340 sbitmap visited_blocks;
341 };
342 typedef struct depth_first_search_dsS *depth_first_search_ds;
343
344 /* Forward declarations */
345 static int count_basic_blocks PARAMS ((rtx));
346 static void find_basic_blocks_1 PARAMS ((rtx));
347 static rtx find_label_refs PARAMS ((rtx, rtx));
348 static void clear_edges PARAMS ((void));
349 static void make_edges PARAMS ((rtx));
350 static void make_label_edge PARAMS ((sbitmap *, basic_block,
351 rtx, int));
352 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
353 basic_block, rtx, int));
354 static void mark_critical_edges PARAMS ((void));
355 static void move_stray_eh_region_notes PARAMS ((void));
356 static void record_active_eh_regions PARAMS ((rtx));
357
358 static void commit_one_edge_insertion PARAMS ((edge));
359
360 static void delete_unreachable_blocks PARAMS ((void));
361 static void delete_eh_regions PARAMS ((void));
362 static int can_delete_note_p PARAMS ((rtx));
363 static void expunge_block PARAMS ((basic_block));
364 static int can_delete_label_p PARAMS ((rtx));
365 static int tail_recursion_label_p PARAMS ((rtx));
366 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
367 basic_block));
368 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
369 basic_block));
370 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
371 static void try_merge_blocks PARAMS ((void));
372 static void tidy_fallthru_edges PARAMS ((void));
373 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
374 static void verify_wide_reg PARAMS ((int, rtx, rtx));
375 static void verify_local_live_at_start PARAMS ((regset, basic_block));
376 static int set_noop_p PARAMS ((rtx));
377 static int noop_move_p PARAMS ((rtx));
378 static void delete_noop_moves PARAMS ((rtx));
379 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
380 static void notice_stack_pointer_modification PARAMS ((rtx));
381 static void mark_reg PARAMS ((rtx, void *));
382 static void mark_regs_live_at_end PARAMS ((regset));
383 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
384 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
385 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
386 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
387 static int insn_dead_p PARAMS ((struct propagate_block_info *,
388 rtx, int, rtx));
389 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
390 rtx, rtx));
391 static void mark_set_regs PARAMS ((struct propagate_block_info *,
392 rtx, rtx));
393 static void mark_set_1 PARAMS ((struct propagate_block_info *,
394 enum rtx_code, rtx, rtx,
395 rtx, int));
396 #ifdef HAVE_conditional_execution
397 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
398 int, rtx));
399 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
400 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
401 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
402 int));
403 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
404 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
405 static rtx not_reg_cond PARAMS ((rtx));
406 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
407 #endif
408 #ifdef AUTO_INC_DEC
409 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
410 rtx, rtx, rtx, rtx, rtx));
411 static void find_auto_inc PARAMS ((struct propagate_block_info *,
412 rtx, rtx));
413 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
414 rtx));
415 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
416 #endif
417 static void mark_used_reg PARAMS ((struct propagate_block_info *,
418 rtx, rtx, rtx));
419 static void mark_used_regs PARAMS ((struct propagate_block_info *,
420 rtx, rtx, rtx));
421 void dump_flow_info PARAMS ((FILE *));
422 void debug_flow_info PARAMS ((void));
423 static void dump_edge_info PARAMS ((FILE *, edge, int));
424 static void print_rtl_and_abort PARAMS ((void));
425
426 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
427 rtx));
428 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
429 rtx));
430 static void remove_fake_successors PARAMS ((basic_block));
431 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
432 FILE *));
433 static void flow_edge_list_print PARAMS ((const char *, const edge *,
434 int, FILE *));
435 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
436 FILE *));
437 static int flow_loop_nested_p PARAMS ((struct loop *,
438 struct loop *));
439 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
440 edge **));
441 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
442 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
443 static int flow_depth_first_order_compute PARAMS ((int *, int *));
444 static void flow_dfs_compute_reverse_init
445 PARAMS ((depth_first_search_ds));
446 static void flow_dfs_compute_reverse_add_bb
447 PARAMS ((depth_first_search_ds, basic_block));
448 static basic_block flow_dfs_compute_reverse_execute
449 PARAMS ((depth_first_search_ds));
450 static void flow_dfs_compute_reverse_finish
451 PARAMS ((depth_first_search_ds));
452 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
453 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
454 const sbitmap *));
455 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
456 static void flow_loops_tree_build PARAMS ((struct loops *));
457 static int flow_loop_level_compute PARAMS ((struct loop *, int));
458 static int flow_loops_level_compute PARAMS ((struct loops *));
459 static void allocate_bb_life_data PARAMS ((void));
460 \f
461 /* Find basic blocks of the current function.
462 F is the first insn of the function and NREGS the number of register
463 numbers in use. */
464
465 void
466 find_basic_blocks (f, nregs, file)
467 rtx f;
468 int nregs ATTRIBUTE_UNUSED;
469 FILE *file ATTRIBUTE_UNUSED;
470 {
471 int max_uid;
472
473 /* Flush out existing data. */
474 if (basic_block_info != NULL)
475 {
476 int i;
477
478 clear_edges ();
479
480 /* Clear bb->aux on all extant basic blocks. We'll use this as a
481 tag for reuse during create_basic_block, just in case some pass
482 copies around basic block notes improperly. */
483 for (i = 0; i < n_basic_blocks; ++i)
484 BASIC_BLOCK (i)->aux = NULL;
485
486 VARRAY_FREE (basic_block_info);
487 }
488
489 n_basic_blocks = count_basic_blocks (f);
490
491 /* Size the basic block table. The actual structures will be allocated
492 by find_basic_blocks_1, since we want to keep the structure pointers
493 stable across calls to find_basic_blocks. */
494 /* ??? This whole issue would be much simpler if we called find_basic_blocks
495 exactly once, and thereafter we don't have a single long chain of
496 instructions at all until close to the end of compilation when we
497 actually lay them out. */
498
499 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
500
501 find_basic_blocks_1 (f);
502
503 /* Record the block to which an insn belongs. */
504 /* ??? This should be done another way, by which (perhaps) a label is
505 tagged directly with the basic block that it starts. It is used for
506 more than that currently, but IMO that is the only valid use. */
507
508 max_uid = get_max_uid ();
509 #ifdef AUTO_INC_DEC
510 /* Leave space for insns life_analysis makes in some cases for auto-inc.
511 These cases are rare, so we don't need too much space. */
512 max_uid += max_uid / 10;
513 #endif
514
515 compute_bb_for_insn (max_uid);
516
517 /* Discover the edges of our cfg. */
518 record_active_eh_regions (f);
519 make_edges (label_value_list);
520
521 /* Do very simple cleanup now, for the benefit of code that runs between
522 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
523 tidy_fallthru_edges ();
524
525 mark_critical_edges ();
526
527 #ifdef ENABLE_CHECKING
528 verify_flow_info ();
529 #endif
530 }
531
532 void
533 check_function_return_warnings ()
534 {
535 if (warn_missing_noreturn
536 && !TREE_THIS_VOLATILE (cfun->decl)
537 && EXIT_BLOCK_PTR->pred == NULL
538 && (lang_missing_noreturn_ok_p
539 && !lang_missing_noreturn_ok_p (cfun->decl)))
540 warning ("function might be possible candidate for attribute `noreturn'");
541
542 /* If we have a path to EXIT, then we do return. */
543 if (TREE_THIS_VOLATILE (cfun->decl)
544 && EXIT_BLOCK_PTR->pred != NULL)
545 warning ("`noreturn' function does return");
546
547 /* If the clobber_return_insn appears in some basic block, then we
548 do reach the end without returning a value. */
549 else if (warn_return_type
550 && cfun->x_clobber_return_insn != NULL
551 && EXIT_BLOCK_PTR->pred != NULL)
552 {
553 int max_uid = get_max_uid ();
554
555 /* If clobber_return_insn was excised by jump1, then renumber_insns
556 can make max_uid smaller than the number still recorded in our rtx.
557 That's fine, since this is a quick way of verifying that the insn
558 is no longer in the chain. */
559 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
560 {
561 /* Recompute insn->block mapping, since the initial mapping is
562 set before we delete unreachable blocks. */
563 compute_bb_for_insn (max_uid);
564
565 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
566 warning ("control reaches end of non-void function");
567 }
568 }
569 }
570
571 /* Count the basic blocks of the function. */
572
573 static int
574 count_basic_blocks (f)
575 rtx f;
576 {
577 register rtx insn;
578 register RTX_CODE prev_code;
579 register int count = 0;
580 int eh_region = 0;
581 int call_had_abnormal_edge = 0;
582
583 prev_code = JUMP_INSN;
584 for (insn = f; insn; insn = NEXT_INSN (insn))
585 {
586 register RTX_CODE code = GET_CODE (insn);
587
588 if (code == CODE_LABEL
589 || (GET_RTX_CLASS (code) == 'i'
590 && (prev_code == JUMP_INSN
591 || prev_code == BARRIER
592 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
593 count++;
594
595 /* Record whether this call created an edge. */
596 if (code == CALL_INSN)
597 {
598 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
599 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
600
601 call_had_abnormal_edge = 0;
602
603 /* If there is an EH region or rethrow, we have an edge. */
604 if ((eh_region && region > 0)
605 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
606 call_had_abnormal_edge = 1;
607 else if (nonlocal_goto_handler_labels && region >= 0)
608 /* If there is a nonlocal goto label and the specified
609 region number isn't -1, we have an edge. (0 means
610 no throw, but might have a nonlocal goto). */
611 call_had_abnormal_edge = 1;
612 }
613
614 if (code != NOTE)
615 prev_code = code;
616 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
617 ++eh_region;
618 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
619 --eh_region;
620 }
621
622 /* The rest of the compiler works a bit smoother when we don't have to
623 check for the edge case of do-nothing functions with no basic blocks. */
624 if (count == 0)
625 {
626 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
627 count = 1;
628 }
629
630 return count;
631 }
632
633 /* Scan a list of insns for labels referred to other than by jumps.
634 This is used to scan the alternatives of a call placeholder. */
635 static rtx
636 find_label_refs (f, lvl)
637 rtx f;
638 rtx lvl;
639 {
640 rtx insn;
641
642 for (insn = f; insn; insn = NEXT_INSN (insn))
643 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
644 {
645 rtx note;
646
647 /* Make a list of all labels referred to other than by jumps
648 (which just don't have the REG_LABEL notes).
649
650 Make a special exception for labels followed by an ADDR*VEC,
651 as this would be a part of the tablejump setup code.
652
653 Make a special exception for the eh_return_stub_label, which
654 we know isn't part of any otherwise visible control flow.
655
656 Make a special exception to registers loaded with label
657 values just before jump insns that use them. */
658
659 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
660 if (REG_NOTE_KIND (note) == REG_LABEL)
661 {
662 rtx lab = XEXP (note, 0), next;
663
664 if (lab == eh_return_stub_label)
665 ;
666 else if ((next = next_nonnote_insn (lab)) != NULL
667 && GET_CODE (next) == JUMP_INSN
668 && (GET_CODE (PATTERN (next)) == ADDR_VEC
669 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
670 ;
671 else if (GET_CODE (lab) == NOTE)
672 ;
673 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
674 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
675 ;
676 else
677 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
678 }
679 }
680
681 return lvl;
682 }
683
684 /* Find all basic blocks of the function whose first insn is F.
685
686 Collect and return a list of labels whose addresses are taken. This
687 will be used in make_edges for use with computed gotos. */
688
689 static void
690 find_basic_blocks_1 (f)
691 rtx f;
692 {
693 register rtx insn, next;
694 int i = 0;
695 rtx bb_note = NULL_RTX;
696 rtx eh_list = NULL_RTX;
697 rtx lvl = NULL_RTX;
698 rtx trll = NULL_RTX;
699 rtx head = NULL_RTX;
700 rtx end = NULL_RTX;
701
702 /* We process the instructions in a slightly different way than we did
703 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
704 closed out the previous block, so that it gets attached at the proper
705 place. Since this form should be equivalent to the previous,
706 count_basic_blocks continues to use the old form as a check. */
707
708 for (insn = f; insn; insn = next)
709 {
710 enum rtx_code code = GET_CODE (insn);
711
712 next = NEXT_INSN (insn);
713
714 switch (code)
715 {
716 case NOTE:
717 {
718 int kind = NOTE_LINE_NUMBER (insn);
719
720 /* Keep a LIFO list of the currently active exception notes. */
721 if (kind == NOTE_INSN_EH_REGION_BEG)
722 eh_list = alloc_INSN_LIST (insn, eh_list);
723 else if (kind == NOTE_INSN_EH_REGION_END)
724 {
725 rtx t = eh_list;
726
727 eh_list = XEXP (eh_list, 1);
728 free_INSN_LIST_node (t);
729 }
730
731 /* Look for basic block notes with which to keep the
732 basic_block_info pointers stable. Unthread the note now;
733 we'll put it back at the right place in create_basic_block.
734 Or not at all if we've already found a note in this block. */
735 else if (kind == NOTE_INSN_BASIC_BLOCK)
736 {
737 if (bb_note == NULL_RTX)
738 bb_note = insn;
739 else
740 next = flow_delete_insn (insn);
741 }
742 break;
743 }
744
745 case CODE_LABEL:
746 /* A basic block starts at a label. If we've closed one off due
747 to a barrier or some such, no need to do it again. */
748 if (head != NULL_RTX)
749 {
750 /* While we now have edge lists with which other portions of
751 the compiler might determine a call ending a basic block
752 does not imply an abnormal edge, it will be a bit before
753 everything can be updated. So continue to emit a noop at
754 the end of such a block. */
755 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
756 {
757 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
758 end = emit_insn_after (nop, end);
759 }
760
761 create_basic_block (i++, head, end, bb_note);
762 bb_note = NULL_RTX;
763 }
764
765 head = end = insn;
766 break;
767
768 case JUMP_INSN:
769 /* A basic block ends at a jump. */
770 if (head == NULL_RTX)
771 head = insn;
772 else
773 {
774 /* ??? Make a special check for table jumps. The way this
775 happens is truly and amazingly gross. We are about to
776 create a basic block that contains just a code label and
777 an addr*vec jump insn. Worse, an addr_diff_vec creates
778 its own natural loop.
779
780 Prevent this bit of brain damage, pasting things together
781 correctly in make_edges.
782
783 The correct solution involves emitting the table directly
784 on the tablejump instruction as a note, or JUMP_LABEL. */
785
786 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
787 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
788 {
789 head = end = NULL;
790 n_basic_blocks--;
791 break;
792 }
793 }
794 end = insn;
795 goto new_bb_inclusive;
796
797 case BARRIER:
798 /* A basic block ends at a barrier. It may be that an unconditional
799 jump already closed the basic block -- no need to do it again. */
800 if (head == NULL_RTX)
801 break;
802
803 /* While we now have edge lists with which other portions of the
804 compiler might determine a call ending a basic block does not
805 imply an abnormal edge, it will be a bit before everything can
806 be updated. So continue to emit a noop at the end of such a
807 block. */
808 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
809 {
810 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
811 end = emit_insn_after (nop, end);
812 }
813 goto new_bb_exclusive;
814
815 case CALL_INSN:
816 {
817 /* Record whether this call created an edge. */
818 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
819 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
820 int call_has_abnormal_edge = 0;
821
822 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
823 {
824 /* Scan each of the alternatives for label refs. */
825 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
826 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
827 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
828 /* Record its tail recursion label, if any. */
829 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
830 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
831 }
832
833 /* If there is an EH region or rethrow, we have an edge. */
834 if ((eh_list && region > 0)
835 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
836 call_has_abnormal_edge = 1;
837 else if (nonlocal_goto_handler_labels && region >= 0)
838 /* If there is a nonlocal goto label and the specified
839 region number isn't -1, we have an edge. (0 means
840 no throw, but might have a nonlocal goto). */
841 call_has_abnormal_edge = 1;
842
843 /* A basic block ends at a call that can either throw or
844 do a non-local goto. */
845 if (call_has_abnormal_edge)
846 {
847 new_bb_inclusive:
848 if (head == NULL_RTX)
849 head = insn;
850 end = insn;
851
852 new_bb_exclusive:
853 create_basic_block (i++, head, end, bb_note);
854 head = end = NULL_RTX;
855 bb_note = NULL_RTX;
856 break;
857 }
858 }
859 /* Fall through. */
860
861 default:
862 if (GET_RTX_CLASS (code) == 'i')
863 {
864 if (head == NULL_RTX)
865 head = insn;
866 end = insn;
867 }
868 break;
869 }
870
871 if (GET_RTX_CLASS (code) == 'i'
872 && GET_CODE (insn) != JUMP_INSN)
873 {
874 rtx note;
875
876 /* Make a list of all labels referred to other than by jumps.
877
878 Make a special exception for labels followed by an ADDR*VEC,
879 as this would be a part of the tablejump setup code.
880
881 Make a special exception for the eh_return_stub_label, which
882 we know isn't part of any otherwise visible control flow.
883
884 Make a special exception to registers loaded with label
885 values just before jump insns that use them. */
886
887 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
888 if (REG_NOTE_KIND (note) == REG_LABEL)
889 {
890 rtx lab = XEXP (note, 0), next;
891
892 if (lab == eh_return_stub_label)
893 ;
894 else if ((next = next_nonnote_insn (lab)) != NULL
895 && GET_CODE (next) == JUMP_INSN
896 && (GET_CODE (PATTERN (next)) == ADDR_VEC
897 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
898 ;
899 else if (GET_CODE (lab) == NOTE)
900 ;
901 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
902 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
903 ;
904 else
905 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
906 }
907 }
908 }
909
910 if (head != NULL_RTX)
911 create_basic_block (i++, head, end, bb_note);
912 else if (bb_note)
913 flow_delete_insn (bb_note);
914
915 if (i != n_basic_blocks)
916 abort ();
917
918 label_value_list = lvl;
919 tail_recursion_label_list = trll;
920 }
921
922 /* Tidy the CFG by deleting unreachable code and whatnot. */
923
924 void
925 cleanup_cfg (f)
926 rtx f;
927 {
928 delete_unreachable_blocks ();
929 move_stray_eh_region_notes ();
930 record_active_eh_regions (f);
931 try_merge_blocks ();
932 mark_critical_edges ();
933
934 /* Kill the data we won't maintain. */
935 free_EXPR_LIST_list (&label_value_list);
936 free_EXPR_LIST_list (&tail_recursion_label_list);
937 }
938
939 /* Create a new basic block consisting of the instructions between
940 HEAD and END inclusive. Reuses the note and basic block struct
941 in BB_NOTE, if any. */
942
943 void
944 create_basic_block (index, head, end, bb_note)
945 int index;
946 rtx head, end, bb_note;
947 {
948 basic_block bb;
949
950 if (bb_note
951 && ! RTX_INTEGRATED_P (bb_note)
952 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
953 && bb->aux == NULL)
954 {
955 /* If we found an existing note, thread it back onto the chain. */
956
957 rtx after;
958
959 if (GET_CODE (head) == CODE_LABEL)
960 after = head;
961 else
962 {
963 after = PREV_INSN (head);
964 head = bb_note;
965 }
966
967 if (after != bb_note && NEXT_INSN (after) != bb_note)
968 reorder_insns (bb_note, bb_note, after);
969 }
970 else
971 {
972 /* Otherwise we must create a note and a basic block structure.
973 Since we allow basic block structs in rtl, give the struct
974 the same lifetime by allocating it off the function obstack
975 rather than using malloc. */
976
977 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
978 memset (bb, 0, sizeof (*bb));
979
980 if (GET_CODE (head) == CODE_LABEL)
981 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
982 else
983 {
984 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
985 head = bb_note;
986 }
987 NOTE_BASIC_BLOCK (bb_note) = bb;
988 }
989
990 /* Always include the bb note in the block. */
991 if (NEXT_INSN (end) == bb_note)
992 end = bb_note;
993
994 bb->head = head;
995 bb->end = end;
996 bb->index = index;
997 BASIC_BLOCK (index) = bb;
998
999 /* Tag the block so that we know it has been used when considering
1000 other basic block notes. */
1001 bb->aux = bb;
1002 }
1003 \f
1004 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1005 indexed by INSN_UID. MAX is the size of the array. */
1006
1007 void
1008 compute_bb_for_insn (max)
1009 int max;
1010 {
1011 int i;
1012
1013 if (basic_block_for_insn)
1014 VARRAY_FREE (basic_block_for_insn);
1015 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1016
1017 for (i = 0; i < n_basic_blocks; ++i)
1018 {
1019 basic_block bb = BASIC_BLOCK (i);
1020 rtx insn, end;
1021
1022 end = bb->end;
1023 insn = bb->head;
1024 while (1)
1025 {
1026 int uid = INSN_UID (insn);
1027 if (uid < max)
1028 VARRAY_BB (basic_block_for_insn, uid) = bb;
1029 if (insn == end)
1030 break;
1031 insn = NEXT_INSN (insn);
1032 }
1033 }
1034 }
1035
1036 /* Free the memory associated with the edge structures. */
1037
1038 static void
1039 clear_edges ()
1040 {
1041 int i;
1042 edge n, e;
1043
1044 for (i = 0; i < n_basic_blocks; ++i)
1045 {
1046 basic_block bb = BASIC_BLOCK (i);
1047
1048 for (e = bb->succ; e; e = n)
1049 {
1050 n = e->succ_next;
1051 free (e);
1052 }
1053
1054 bb->succ = 0;
1055 bb->pred = 0;
1056 }
1057
1058 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1059 {
1060 n = e->succ_next;
1061 free (e);
1062 }
1063
1064 ENTRY_BLOCK_PTR->succ = 0;
1065 EXIT_BLOCK_PTR->pred = 0;
1066
1067 n_edges = 0;
1068 }
1069
1070 /* Identify the edges between basic blocks.
1071
1072 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1073 that are otherwise unreachable may be reachable with a non-local goto.
1074
1075 BB_EH_END is an array indexed by basic block number in which we record
1076 the list of exception regions active at the end of the basic block. */
1077
1078 static void
1079 make_edges (label_value_list)
1080 rtx label_value_list;
1081 {
1082 int i;
1083 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1084 sbitmap *edge_cache = NULL;
1085
1086 /* Assume no computed jump; revise as we create edges. */
1087 current_function_has_computed_jump = 0;
1088
1089 /* Heavy use of computed goto in machine-generated code can lead to
1090 nearly fully-connected CFGs. In that case we spend a significant
1091 amount of time searching the edge lists for duplicates. */
1092 if (forced_labels || label_value_list)
1093 {
1094 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1095 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1096 }
1097
1098 /* By nature of the way these get numbered, block 0 is always the entry. */
1099 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1100
1101 for (i = 0; i < n_basic_blocks; ++i)
1102 {
1103 basic_block bb = BASIC_BLOCK (i);
1104 rtx insn, x;
1105 enum rtx_code code;
1106 int force_fallthru = 0;
1107
1108 /* Examine the last instruction of the block, and discover the
1109 ways we can leave the block. */
1110
1111 insn = bb->end;
1112 code = GET_CODE (insn);
1113
1114 /* A branch. */
1115 if (code == JUMP_INSN)
1116 {
1117 rtx tmp;
1118
1119 /* Recognize a non-local goto as a branch outside the
1120 current function. */
1121 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1122 ;
1123
1124 /* ??? Recognize a tablejump and do the right thing. */
1125 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1126 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1127 && GET_CODE (tmp) == JUMP_INSN
1128 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1129 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1130 {
1131 rtvec vec;
1132 int j;
1133
1134 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1135 vec = XVEC (PATTERN (tmp), 0);
1136 else
1137 vec = XVEC (PATTERN (tmp), 1);
1138
1139 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1140 make_label_edge (edge_cache, bb,
1141 XEXP (RTVEC_ELT (vec, j), 0), 0);
1142
1143 /* Some targets (eg, ARM) emit a conditional jump that also
1144 contains the out-of-range target. Scan for these and
1145 add an edge if necessary. */
1146 if ((tmp = single_set (insn)) != NULL
1147 && SET_DEST (tmp) == pc_rtx
1148 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1149 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1150 make_label_edge (edge_cache, bb,
1151 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1152
1153 #ifdef CASE_DROPS_THROUGH
1154 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1155 us naturally detecting fallthru into the next block. */
1156 force_fallthru = 1;
1157 #endif
1158 }
1159
1160 /* If this is a computed jump, then mark it as reaching
1161 everything on the label_value_list and forced_labels list. */
1162 else if (computed_jump_p (insn))
1163 {
1164 current_function_has_computed_jump = 1;
1165
1166 for (x = label_value_list; x; x = XEXP (x, 1))
1167 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1168
1169 for (x = forced_labels; x; x = XEXP (x, 1))
1170 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1171 }
1172
1173 /* Returns create an exit out. */
1174 else if (returnjump_p (insn))
1175 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1176
1177 /* Otherwise, we have a plain conditional or unconditional jump. */
1178 else
1179 {
1180 if (! JUMP_LABEL (insn))
1181 abort ();
1182 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1183 }
1184 }
1185
1186 /* If this is a sibling call insn, then this is in effect a
1187 combined call and return, and so we need an edge to the
1188 exit block. No need to worry about EH edges, since we
1189 wouldn't have created the sibling call in the first place. */
1190
1191 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1192 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1193 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1194
1195 /* If this is a CALL_INSN, then mark it as reaching the active EH
1196 handler for this CALL_INSN. If we're handling asynchronous
1197 exceptions then any insn can reach any of the active handlers.
1198
1199 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1200
1201 else if (code == CALL_INSN || asynchronous_exceptions)
1202 {
1203 /* Add any appropriate EH edges. We do this unconditionally
1204 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1205 on the call, and this needn't be within an EH region. */
1206 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1207
1208 /* If we have asynchronous exceptions, do the same for *all*
1209 exception regions active in the block. */
1210 if (asynchronous_exceptions
1211 && bb->eh_beg != bb->eh_end)
1212 {
1213 if (bb->eh_beg >= 0)
1214 make_eh_edge (edge_cache, eh_nest_info, bb,
1215 NULL_RTX, bb->eh_beg);
1216
1217 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1218 if (GET_CODE (x) == NOTE
1219 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1220 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1221 {
1222 int region = NOTE_EH_HANDLER (x);
1223 make_eh_edge (edge_cache, eh_nest_info, bb,
1224 NULL_RTX, region);
1225 }
1226 }
1227
1228 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1229 {
1230 /* ??? This could be made smarter: in some cases it's possible
1231 to tell that certain calls will not do a nonlocal goto.
1232
1233 For example, if the nested functions that do the nonlocal
1234 gotos do not have their addresses taken, then only calls to
1235 those functions or to other nested functions that use them
1236 could possibly do nonlocal gotos. */
1237 /* We do know that a REG_EH_REGION note with a value less
1238 than 0 is guaranteed not to perform a non-local goto. */
1239 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1240 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1241 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1242 make_label_edge (edge_cache, bb, XEXP (x, 0),
1243 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1244 }
1245 }
1246
1247 /* We know something about the structure of the function __throw in
1248 libgcc2.c. It is the only function that ever contains eh_stub
1249 labels. It modifies its return address so that the last block
1250 returns to one of the eh_stub labels within it. So we have to
1251 make additional edges in the flow graph. */
1252 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1253 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1254
1255 /* Find out if we can drop through to the next block. */
1256 insn = next_nonnote_insn (insn);
1257 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1258 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1259 else if (i + 1 < n_basic_blocks)
1260 {
1261 rtx tmp = BLOCK_HEAD (i + 1);
1262 if (GET_CODE (tmp) == NOTE)
1263 tmp = next_nonnote_insn (tmp);
1264 if (force_fallthru || insn == tmp)
1265 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1266 }
1267 }
1268
1269 free_eh_nesting_info (eh_nest_info);
1270 if (edge_cache)
1271 sbitmap_vector_free (edge_cache);
1272 }
1273
1274 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1275 about the edge that is accumulated between calls. */
1276
1277 void
1278 make_edge (edge_cache, src, dst, flags)
1279 sbitmap *edge_cache;
1280 basic_block src, dst;
1281 int flags;
1282 {
1283 int use_edge_cache;
1284 edge e;
1285
1286 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1287 many edges to them, and we didn't allocate memory for it. */
1288 use_edge_cache = (edge_cache
1289 && src != ENTRY_BLOCK_PTR
1290 && dst != EXIT_BLOCK_PTR);
1291
1292 /* Make sure we don't add duplicate edges. */
1293 switch (use_edge_cache)
1294 {
1295 default:
1296 /* Quick test for non-existance of the edge. */
1297 if (! TEST_BIT (edge_cache[src->index], dst->index))
1298 break;
1299
1300 /* The edge exists; early exit if no work to do. */
1301 if (flags == 0)
1302 return;
1303
1304 /* FALLTHRU */
1305 case 0:
1306 for (e = src->succ; e; e = e->succ_next)
1307 if (e->dest == dst)
1308 {
1309 e->flags |= flags;
1310 return;
1311 }
1312 break;
1313 }
1314
1315 e = (edge) xcalloc (1, sizeof (*e));
1316 n_edges++;
1317
1318 e->succ_next = src->succ;
1319 e->pred_next = dst->pred;
1320 e->src = src;
1321 e->dest = dst;
1322 e->flags = flags;
1323
1324 src->succ = e;
1325 dst->pred = e;
1326
1327 if (use_edge_cache)
1328 SET_BIT (edge_cache[src->index], dst->index);
1329 }
1330
1331 /* Create an edge from a basic block to a label. */
1332
1333 static void
1334 make_label_edge (edge_cache, src, label, flags)
1335 sbitmap *edge_cache;
1336 basic_block src;
1337 rtx label;
1338 int flags;
1339 {
1340 if (GET_CODE (label) != CODE_LABEL)
1341 abort ();
1342
1343 /* If the label was never emitted, this insn is junk, but avoid a
1344 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1345 as a result of a syntax error and a diagnostic has already been
1346 printed. */
1347
1348 if (INSN_UID (label) == 0)
1349 return;
1350
1351 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1352 }
1353
1354 /* Create the edges generated by INSN in REGION. */
1355
1356 static void
1357 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1358 sbitmap *edge_cache;
1359 eh_nesting_info *eh_nest_info;
1360 basic_block src;
1361 rtx insn;
1362 int region;
1363 {
1364 handler_info **handler_list;
1365 int num, is_call;
1366
1367 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1368 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1369 while (--num >= 0)
1370 {
1371 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1372 EDGE_ABNORMAL | EDGE_EH | is_call);
1373 }
1374 }
1375
1376 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1377 dangerous if we intend to move basic blocks around. Move such notes
1378 into the following block. */
1379
1380 static void
1381 move_stray_eh_region_notes ()
1382 {
1383 int i;
1384 basic_block b1, b2;
1385
1386 if (n_basic_blocks < 2)
1387 return;
1388
1389 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1390 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1391 {
1392 rtx insn, next, list = NULL_RTX;
1393
1394 b1 = BASIC_BLOCK (i);
1395 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1396 {
1397 next = NEXT_INSN (insn);
1398 if (GET_CODE (insn) == NOTE
1399 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1400 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1401 {
1402 /* Unlink from the insn chain. */
1403 NEXT_INSN (PREV_INSN (insn)) = next;
1404 PREV_INSN (next) = PREV_INSN (insn);
1405
1406 /* Queue it. */
1407 NEXT_INSN (insn) = list;
1408 list = insn;
1409 }
1410 }
1411
1412 if (list == NULL_RTX)
1413 continue;
1414
1415 /* Find where to insert these things. */
1416 insn = b2->head;
1417 if (GET_CODE (insn) == CODE_LABEL)
1418 insn = NEXT_INSN (insn);
1419
1420 while (list)
1421 {
1422 next = NEXT_INSN (list);
1423 add_insn_after (list, insn);
1424 list = next;
1425 }
1426 }
1427 }
1428
1429 /* Recompute eh_beg/eh_end for each basic block. */
1430
1431 static void
1432 record_active_eh_regions (f)
1433 rtx f;
1434 {
1435 rtx insn, eh_list = NULL_RTX;
1436 int i = 0;
1437 basic_block bb = BASIC_BLOCK (0);
1438
1439 for (insn = f; insn; insn = NEXT_INSN (insn))
1440 {
1441 if (bb->head == insn)
1442 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1443
1444 if (GET_CODE (insn) == NOTE)
1445 {
1446 int kind = NOTE_LINE_NUMBER (insn);
1447 if (kind == NOTE_INSN_EH_REGION_BEG)
1448 eh_list = alloc_INSN_LIST (insn, eh_list);
1449 else if (kind == NOTE_INSN_EH_REGION_END)
1450 {
1451 rtx t = XEXP (eh_list, 1);
1452 free_INSN_LIST_node (eh_list);
1453 eh_list = t;
1454 }
1455 }
1456
1457 if (bb->end == insn)
1458 {
1459 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1460 i += 1;
1461 if (i == n_basic_blocks)
1462 break;
1463 bb = BASIC_BLOCK (i);
1464 }
1465 }
1466 }
1467
1468 /* Identify critical edges and set the bits appropriately. */
1469
1470 static void
1471 mark_critical_edges ()
1472 {
1473 int i, n = n_basic_blocks;
1474 basic_block bb;
1475
1476 /* We begin with the entry block. This is not terribly important now,
1477 but could be if a front end (Fortran) implemented alternate entry
1478 points. */
1479 bb = ENTRY_BLOCK_PTR;
1480 i = -1;
1481
1482 while (1)
1483 {
1484 edge e;
1485
1486 /* (1) Critical edges must have a source with multiple successors. */
1487 if (bb->succ && bb->succ->succ_next)
1488 {
1489 for (e = bb->succ; e; e = e->succ_next)
1490 {
1491 /* (2) Critical edges must have a destination with multiple
1492 predecessors. Note that we know there is at least one
1493 predecessor -- the edge we followed to get here. */
1494 if (e->dest->pred->pred_next)
1495 e->flags |= EDGE_CRITICAL;
1496 else
1497 e->flags &= ~EDGE_CRITICAL;
1498 }
1499 }
1500 else
1501 {
1502 for (e = bb->succ; e; e = e->succ_next)
1503 e->flags &= ~EDGE_CRITICAL;
1504 }
1505
1506 if (++i >= n)
1507 break;
1508 bb = BASIC_BLOCK (i);
1509 }
1510 }
1511 \f
1512 /* Split a block BB after insn INSN creating a new fallthru edge.
1513 Return the new edge. Note that to keep other parts of the compiler happy,
1514 this function renumbers all the basic blocks so that the new
1515 one has a number one greater than the block split. */
1516
1517 edge
1518 split_block (bb, insn)
1519 basic_block bb;
1520 rtx insn;
1521 {
1522 basic_block new_bb;
1523 edge new_edge;
1524 edge e;
1525 rtx bb_note;
1526 int i, j;
1527
1528 /* There is no point splitting the block after its end. */
1529 if (bb->end == insn)
1530 return 0;
1531
1532 /* Create the new structures. */
1533 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1534 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1535 n_edges++;
1536
1537 memset (new_bb, 0, sizeof (*new_bb));
1538
1539 new_bb->head = NEXT_INSN (insn);
1540 new_bb->end = bb->end;
1541 bb->end = insn;
1542
1543 new_bb->succ = bb->succ;
1544 bb->succ = new_edge;
1545 new_bb->pred = new_edge;
1546 new_bb->count = bb->count;
1547 new_bb->loop_depth = bb->loop_depth;
1548
1549 new_edge->src = bb;
1550 new_edge->dest = new_bb;
1551 new_edge->flags = EDGE_FALLTHRU;
1552 new_edge->probability = REG_BR_PROB_BASE;
1553 new_edge->count = bb->count;
1554
1555 /* Redirect the src of the successor edges of bb to point to new_bb. */
1556 for (e = new_bb->succ; e; e = e->succ_next)
1557 e->src = new_bb;
1558
1559 /* Place the new block just after the block being split. */
1560 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1561
1562 /* Some parts of the compiler expect blocks to be number in
1563 sequential order so insert the new block immediately after the
1564 block being split.. */
1565 j = bb->index;
1566 for (i = n_basic_blocks - 1; i > j + 1; --i)
1567 {
1568 basic_block tmp = BASIC_BLOCK (i - 1);
1569 BASIC_BLOCK (i) = tmp;
1570 tmp->index = i;
1571 }
1572
1573 BASIC_BLOCK (i) = new_bb;
1574 new_bb->index = i;
1575
1576 /* Create the basic block note. */
1577 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1578 new_bb->head);
1579 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1580 new_bb->head = bb_note;
1581
1582 update_bb_for_insn (new_bb);
1583
1584 if (bb->global_live_at_start)
1585 {
1586 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1587 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1588 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1589
1590 /* We now have to calculate which registers are live at the end
1591 of the split basic block and at the start of the new basic
1592 block. Start with those registers that are known to be live
1593 at the end of the original basic block and get
1594 propagate_block to determine which registers are live. */
1595 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1596 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1597 COPY_REG_SET (bb->global_live_at_end,
1598 new_bb->global_live_at_start);
1599 }
1600
1601 return new_edge;
1602 }
1603
1604
1605 /* Split a (typically critical) edge. Return the new block.
1606 Abort on abnormal edges.
1607
1608 ??? The code generally expects to be called on critical edges.
1609 The case of a block ending in an unconditional jump to a
1610 block with multiple predecessors is not handled optimally. */
1611
1612 basic_block
1613 split_edge (edge_in)
1614 edge edge_in;
1615 {
1616 basic_block old_pred, bb, old_succ;
1617 edge edge_out;
1618 rtx bb_note;
1619 int i, j;
1620
1621 /* Abnormal edges cannot be split. */
1622 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1623 abort ();
1624
1625 old_pred = edge_in->src;
1626 old_succ = edge_in->dest;
1627
1628 /* Remove the existing edge from the destination's pred list. */
1629 {
1630 edge *pp;
1631 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1632 continue;
1633 *pp = edge_in->pred_next;
1634 edge_in->pred_next = NULL;
1635 }
1636
1637 /* Create the new structures. */
1638 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1639 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1640 n_edges++;
1641
1642 memset (bb, 0, sizeof (*bb));
1643
1644 /* ??? This info is likely going to be out of date very soon. */
1645 if (old_succ->global_live_at_start)
1646 {
1647 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1648 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1649 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1650 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1651 }
1652
1653 /* Wire them up. */
1654 bb->pred = edge_in;
1655 bb->succ = edge_out;
1656 bb->count = edge_in->count;
1657
1658 edge_in->dest = bb;
1659 edge_in->flags &= ~EDGE_CRITICAL;
1660
1661 edge_out->pred_next = old_succ->pred;
1662 edge_out->succ_next = NULL;
1663 edge_out->src = bb;
1664 edge_out->dest = old_succ;
1665 edge_out->flags = EDGE_FALLTHRU;
1666 edge_out->probability = REG_BR_PROB_BASE;
1667 edge_out->count = edge_in->count;
1668
1669 old_succ->pred = edge_out;
1670
1671 /* Tricky case -- if there existed a fallthru into the successor
1672 (and we're not it) we must add a new unconditional jump around
1673 the new block we're actually interested in.
1674
1675 Further, if that edge is critical, this means a second new basic
1676 block must be created to hold it. In order to simplify correct
1677 insn placement, do this before we touch the existing basic block
1678 ordering for the block we were really wanting. */
1679 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1680 {
1681 edge e;
1682 for (e = edge_out->pred_next; e; e = e->pred_next)
1683 if (e->flags & EDGE_FALLTHRU)
1684 break;
1685
1686 if (e)
1687 {
1688 basic_block jump_block;
1689 rtx pos;
1690
1691 if ((e->flags & EDGE_CRITICAL) == 0
1692 && e->src != ENTRY_BLOCK_PTR)
1693 {
1694 /* Non critical -- we can simply add a jump to the end
1695 of the existing predecessor. */
1696 jump_block = e->src;
1697 }
1698 else
1699 {
1700 /* We need a new block to hold the jump. The simplest
1701 way to do the bulk of the work here is to recursively
1702 call ourselves. */
1703 jump_block = split_edge (e);
1704 e = jump_block->succ;
1705 }
1706
1707 /* Now add the jump insn ... */
1708 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1709 jump_block->end);
1710 jump_block->end = pos;
1711 if (basic_block_for_insn)
1712 set_block_for_insn (pos, jump_block);
1713 emit_barrier_after (pos);
1714
1715 /* ... let jump know that label is in use, ... */
1716 JUMP_LABEL (pos) = old_succ->head;
1717 ++LABEL_NUSES (old_succ->head);
1718
1719 /* ... and clear fallthru on the outgoing edge. */
1720 e->flags &= ~EDGE_FALLTHRU;
1721
1722 /* Continue splitting the interesting edge. */
1723 }
1724 }
1725
1726 /* Place the new block just in front of the successor. */
1727 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1728 if (old_succ == EXIT_BLOCK_PTR)
1729 j = n_basic_blocks - 1;
1730 else
1731 j = old_succ->index;
1732 for (i = n_basic_blocks - 1; i > j; --i)
1733 {
1734 basic_block tmp = BASIC_BLOCK (i - 1);
1735 BASIC_BLOCK (i) = tmp;
1736 tmp->index = i;
1737 }
1738 BASIC_BLOCK (i) = bb;
1739 bb->index = i;
1740
1741 /* Create the basic block note.
1742
1743 Where we place the note can have a noticable impact on the generated
1744 code. Consider this cfg:
1745
1746 E
1747 |
1748 0
1749 / \
1750 +->1-->2--->E
1751 | |
1752 +--+
1753
1754 If we need to insert an insn on the edge from block 0 to block 1,
1755 we want to ensure the instructions we insert are outside of any
1756 loop notes that physically sit between block 0 and block 1. Otherwise
1757 we confuse the loop optimizer into thinking the loop is a phony. */
1758 if (old_succ != EXIT_BLOCK_PTR
1759 && PREV_INSN (old_succ->head)
1760 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1761 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1762 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1763 PREV_INSN (old_succ->head));
1764 else if (old_succ != EXIT_BLOCK_PTR)
1765 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1766 else
1767 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1768 NOTE_BASIC_BLOCK (bb_note) = bb;
1769 bb->head = bb->end = bb_note;
1770
1771 /* Not quite simple -- for non-fallthru edges, we must adjust the
1772 predecessor's jump instruction to target our new block. */
1773 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1774 {
1775 rtx tmp, insn = old_pred->end;
1776 rtx old_label = old_succ->head;
1777 rtx new_label = gen_label_rtx ();
1778
1779 if (GET_CODE (insn) != JUMP_INSN)
1780 abort ();
1781
1782 /* ??? Recognize a tablejump and adjust all matching cases. */
1783 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1784 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1785 && GET_CODE (tmp) == JUMP_INSN
1786 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1787 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1788 {
1789 rtvec vec;
1790 int j;
1791
1792 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1793 vec = XVEC (PATTERN (tmp), 0);
1794 else
1795 vec = XVEC (PATTERN (tmp), 1);
1796
1797 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1798 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1799 {
1800 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1801 --LABEL_NUSES (old_label);
1802 ++LABEL_NUSES (new_label);
1803 }
1804
1805 /* Handle casesi dispatch insns */
1806 if ((tmp = single_set (insn)) != NULL
1807 && SET_DEST (tmp) == pc_rtx
1808 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1809 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1810 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1811 {
1812 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1813 new_label);
1814 --LABEL_NUSES (old_label);
1815 ++LABEL_NUSES (new_label);
1816 }
1817 }
1818 else
1819 {
1820 /* This would have indicated an abnormal edge. */
1821 if (computed_jump_p (insn))
1822 abort ();
1823
1824 /* A return instruction can't be redirected. */
1825 if (returnjump_p (insn))
1826 abort ();
1827
1828 /* If the insn doesn't go where we think, we're confused. */
1829 if (JUMP_LABEL (insn) != old_label)
1830 abort ();
1831
1832 redirect_jump (insn, new_label, 0);
1833 }
1834
1835 emit_label_before (new_label, bb_note);
1836 bb->head = new_label;
1837 }
1838
1839 return bb;
1840 }
1841
1842 /* Queue instructions for insertion on an edge between two basic blocks.
1843 The new instructions and basic blocks (if any) will not appear in the
1844 CFG until commit_edge_insertions is called. */
1845
1846 void
1847 insert_insn_on_edge (pattern, e)
1848 rtx pattern;
1849 edge e;
1850 {
1851 /* We cannot insert instructions on an abnormal critical edge.
1852 It will be easier to find the culprit if we die now. */
1853 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1854 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1855 abort ();
1856
1857 if (e->insns == NULL_RTX)
1858 start_sequence ();
1859 else
1860 push_to_sequence (e->insns);
1861
1862 emit_insn (pattern);
1863
1864 e->insns = get_insns ();
1865 end_sequence ();
1866 }
1867
1868 /* Update the CFG for the instructions queued on edge E. */
1869
1870 static void
1871 commit_one_edge_insertion (e)
1872 edge e;
1873 {
1874 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1875 basic_block bb;
1876
1877 /* Pull the insns off the edge now since the edge might go away. */
1878 insns = e->insns;
1879 e->insns = NULL_RTX;
1880
1881 /* Figure out where to put these things. If the destination has
1882 one predecessor, insert there. Except for the exit block. */
1883 if (e->dest->pred->pred_next == NULL
1884 && e->dest != EXIT_BLOCK_PTR)
1885 {
1886 bb = e->dest;
1887
1888 /* Get the location correct wrt a code label, and "nice" wrt
1889 a basic block note, and before everything else. */
1890 tmp = bb->head;
1891 if (GET_CODE (tmp) == CODE_LABEL)
1892 tmp = NEXT_INSN (tmp);
1893 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1894 tmp = NEXT_INSN (tmp);
1895 if (tmp == bb->head)
1896 before = tmp;
1897 else
1898 after = PREV_INSN (tmp);
1899 }
1900
1901 /* If the source has one successor and the edge is not abnormal,
1902 insert there. Except for the entry block. */
1903 else if ((e->flags & EDGE_ABNORMAL) == 0
1904 && e->src->succ->succ_next == NULL
1905 && e->src != ENTRY_BLOCK_PTR)
1906 {
1907 bb = e->src;
1908 /* It is possible to have a non-simple jump here. Consider a target
1909 where some forms of unconditional jumps clobber a register. This
1910 happens on the fr30 for example.
1911
1912 We know this block has a single successor, so we can just emit
1913 the queued insns before the jump. */
1914 if (GET_CODE (bb->end) == JUMP_INSN)
1915 {
1916 before = bb->end;
1917 }
1918 else
1919 {
1920 /* We'd better be fallthru, or we've lost track of what's what. */
1921 if ((e->flags & EDGE_FALLTHRU) == 0)
1922 abort ();
1923
1924 after = bb->end;
1925 }
1926 }
1927
1928 /* Otherwise we must split the edge. */
1929 else
1930 {
1931 bb = split_edge (e);
1932 after = bb->end;
1933 }
1934
1935 /* Now that we've found the spot, do the insertion. */
1936
1937 /* Set the new block number for these insns, if structure is allocated. */
1938 if (basic_block_for_insn)
1939 {
1940 rtx i;
1941 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1942 set_block_for_insn (i, bb);
1943 }
1944
1945 if (before)
1946 {
1947 emit_insns_before (insns, before);
1948 if (before == bb->head)
1949 bb->head = insns;
1950
1951 last = prev_nonnote_insn (before);
1952 }
1953 else
1954 {
1955 last = emit_insns_after (insns, after);
1956 if (after == bb->end)
1957 bb->end = last;
1958 }
1959
1960 if (returnjump_p (last))
1961 {
1962 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1963 This is not currently a problem because this only happens
1964 for the (single) epilogue, which already has a fallthru edge
1965 to EXIT. */
1966
1967 e = bb->succ;
1968 if (e->dest != EXIT_BLOCK_PTR
1969 || e->succ_next != NULL
1970 || (e->flags & EDGE_FALLTHRU) == 0)
1971 abort ();
1972 e->flags &= ~EDGE_FALLTHRU;
1973
1974 emit_barrier_after (last);
1975 bb->end = last;
1976
1977 if (before)
1978 flow_delete_insn (before);
1979 }
1980 else if (GET_CODE (last) == JUMP_INSN)
1981 abort ();
1982 }
1983
1984 /* Update the CFG for all queued instructions. */
1985
1986 void
1987 commit_edge_insertions ()
1988 {
1989 int i;
1990 basic_block bb;
1991
1992 #ifdef ENABLE_CHECKING
1993 verify_flow_info ();
1994 #endif
1995
1996 i = -1;
1997 bb = ENTRY_BLOCK_PTR;
1998 while (1)
1999 {
2000 edge e, next;
2001
2002 for (e = bb->succ; e; e = next)
2003 {
2004 next = e->succ_next;
2005 if (e->insns)
2006 commit_one_edge_insertion (e);
2007 }
2008
2009 if (++i >= n_basic_blocks)
2010 break;
2011 bb = BASIC_BLOCK (i);
2012 }
2013 }
2014 \f
2015 /* Delete all unreachable basic blocks. */
2016
2017 static void
2018 delete_unreachable_blocks ()
2019 {
2020 basic_block *worklist, *tos;
2021 int deleted_handler;
2022 edge e;
2023 int i, n;
2024
2025 n = n_basic_blocks;
2026 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2027
2028 /* Use basic_block->aux as a marker. Clear them all. */
2029
2030 for (i = 0; i < n; ++i)
2031 BASIC_BLOCK (i)->aux = NULL;
2032
2033 /* Add our starting points to the worklist. Almost always there will
2034 be only one. It isn't inconcievable that we might one day directly
2035 support Fortran alternate entry points. */
2036
2037 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2038 {
2039 *tos++ = e->dest;
2040
2041 /* Mark the block with a handy non-null value. */
2042 e->dest->aux = e;
2043 }
2044
2045 /* Iterate: find everything reachable from what we've already seen. */
2046
2047 while (tos != worklist)
2048 {
2049 basic_block b = *--tos;
2050
2051 for (e = b->succ; e; e = e->succ_next)
2052 if (!e->dest->aux)
2053 {
2054 *tos++ = e->dest;
2055 e->dest->aux = e;
2056 }
2057 }
2058
2059 /* Delete all unreachable basic blocks. Count down so that we don't
2060 interfere with the block renumbering that happens in flow_delete_block. */
2061
2062 deleted_handler = 0;
2063
2064 for (i = n - 1; i >= 0; --i)
2065 {
2066 basic_block b = BASIC_BLOCK (i);
2067
2068 if (b->aux != NULL)
2069 /* This block was found. Tidy up the mark. */
2070 b->aux = NULL;
2071 else
2072 deleted_handler |= flow_delete_block (b);
2073 }
2074
2075 tidy_fallthru_edges ();
2076
2077 /* If we deleted an exception handler, we may have EH region begin/end
2078 blocks to remove as well. */
2079 if (deleted_handler)
2080 delete_eh_regions ();
2081
2082 free (worklist);
2083 }
2084
2085 /* Find EH regions for which there is no longer a handler, and delete them. */
2086
2087 static void
2088 delete_eh_regions ()
2089 {
2090 rtx insn;
2091
2092 update_rethrow_references ();
2093
2094 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2095 if (GET_CODE (insn) == NOTE)
2096 {
2097 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2098 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2099 {
2100 int num = NOTE_EH_HANDLER (insn);
2101 /* A NULL handler indicates a region is no longer needed,
2102 as long as its rethrow label isn't used. */
2103 if (get_first_handler (num) == NULL && ! rethrow_used (num))
2104 {
2105 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2106 NOTE_SOURCE_FILE (insn) = 0;
2107 }
2108 }
2109 }
2110 }
2111
2112 /* Return true if NOTE is not one of the ones that must be kept paired,
2113 so that we may simply delete them. */
2114
2115 static int
2116 can_delete_note_p (note)
2117 rtx note;
2118 {
2119 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2120 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2121 }
2122
2123 /* Unlink a chain of insns between START and FINISH, leaving notes
2124 that must be paired. */
2125
2126 void
2127 flow_delete_insn_chain (start, finish)
2128 rtx start, finish;
2129 {
2130 /* Unchain the insns one by one. It would be quicker to delete all
2131 of these with a single unchaining, rather than one at a time, but
2132 we need to keep the NOTE's. */
2133
2134 rtx next;
2135
2136 while (1)
2137 {
2138 next = NEXT_INSN (start);
2139 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2140 ;
2141 else if (GET_CODE (start) == CODE_LABEL
2142 && ! can_delete_label_p (start))
2143 {
2144 const char *name = LABEL_NAME (start);
2145 PUT_CODE (start, NOTE);
2146 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2147 NOTE_SOURCE_FILE (start) = name;
2148 }
2149 else
2150 next = flow_delete_insn (start);
2151
2152 if (start == finish)
2153 break;
2154 start = next;
2155 }
2156 }
2157
2158 /* Delete the insns in a (non-live) block. We physically delete every
2159 non-deleted-note insn, and update the flow graph appropriately.
2160
2161 Return nonzero if we deleted an exception handler. */
2162
2163 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2164 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2165
2166 int
2167 flow_delete_block (b)
2168 basic_block b;
2169 {
2170 int deleted_handler = 0;
2171 rtx insn, end, tmp;
2172
2173 /* If the head of this block is a CODE_LABEL, then it might be the
2174 label for an exception handler which can't be reached.
2175
2176 We need to remove the label from the exception_handler_label list
2177 and remove the associated NOTE_INSN_EH_REGION_BEG and
2178 NOTE_INSN_EH_REGION_END notes. */
2179
2180 insn = b->head;
2181
2182 never_reached_warning (insn);
2183
2184 if (GET_CODE (insn) == CODE_LABEL)
2185 {
2186 rtx x, *prev = &exception_handler_labels;
2187
2188 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2189 {
2190 if (XEXP (x, 0) == insn)
2191 {
2192 /* Found a match, splice this label out of the EH label list. */
2193 *prev = XEXP (x, 1);
2194 XEXP (x, 1) = NULL_RTX;
2195 XEXP (x, 0) = NULL_RTX;
2196
2197 /* Remove the handler from all regions */
2198 remove_handler (insn);
2199 deleted_handler = 1;
2200 break;
2201 }
2202 prev = &XEXP (x, 1);
2203 }
2204 }
2205
2206 /* Include any jump table following the basic block. */
2207 end = b->end;
2208 if (GET_CODE (end) == JUMP_INSN
2209 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2210 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2211 && GET_CODE (tmp) == JUMP_INSN
2212 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2213 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2214 end = tmp;
2215
2216 /* Include any barrier that may follow the basic block. */
2217 tmp = next_nonnote_insn (end);
2218 if (tmp && GET_CODE (tmp) == BARRIER)
2219 end = tmp;
2220
2221 /* Selectively delete the entire chain. */
2222 flow_delete_insn_chain (insn, end);
2223
2224 /* Remove the edges into and out of this block. Note that there may
2225 indeed be edges in, if we are removing an unreachable loop. */
2226 {
2227 edge e, next, *q;
2228
2229 for (e = b->pred; e; e = next)
2230 {
2231 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2232 continue;
2233 *q = e->succ_next;
2234 next = e->pred_next;
2235 n_edges--;
2236 free (e);
2237 }
2238 for (e = b->succ; e; e = next)
2239 {
2240 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2241 continue;
2242 *q = e->pred_next;
2243 next = e->succ_next;
2244 n_edges--;
2245 free (e);
2246 }
2247
2248 b->pred = NULL;
2249 b->succ = NULL;
2250 }
2251
2252 /* Remove the basic block from the array, and compact behind it. */
2253 expunge_block (b);
2254
2255 return deleted_handler;
2256 }
2257
2258 /* Remove block B from the basic block array and compact behind it. */
2259
2260 static void
2261 expunge_block (b)
2262 basic_block b;
2263 {
2264 int i, n = n_basic_blocks;
2265
2266 for (i = b->index; i + 1 < n; ++i)
2267 {
2268 basic_block x = BASIC_BLOCK (i + 1);
2269 BASIC_BLOCK (i) = x;
2270 x->index = i;
2271 }
2272
2273 basic_block_info->num_elements--;
2274 n_basic_blocks--;
2275 }
2276
2277 /* Delete INSN by patching it out. Return the next insn. */
2278
2279 rtx
2280 flow_delete_insn (insn)
2281 rtx insn;
2282 {
2283 rtx prev = PREV_INSN (insn);
2284 rtx next = NEXT_INSN (insn);
2285 rtx note;
2286
2287 PREV_INSN (insn) = NULL_RTX;
2288 NEXT_INSN (insn) = NULL_RTX;
2289 INSN_DELETED_P (insn) = 1;
2290
2291 if (prev)
2292 NEXT_INSN (prev) = next;
2293 if (next)
2294 PREV_INSN (next) = prev;
2295 else
2296 set_last_insn (prev);
2297
2298 if (GET_CODE (insn) == CODE_LABEL)
2299 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2300
2301 /* If deleting a jump, decrement the use count of the label. Deleting
2302 the label itself should happen in the normal course of block merging. */
2303 if (GET_CODE (insn) == JUMP_INSN
2304 && JUMP_LABEL (insn)
2305 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2306 LABEL_NUSES (JUMP_LABEL (insn))--;
2307
2308 /* Also if deleting an insn that references a label. */
2309 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2310 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2311 LABEL_NUSES (XEXP (note, 0))--;
2312
2313 return next;
2314 }
2315
2316 /* True if a given label can be deleted. */
2317
2318 static int
2319 can_delete_label_p (label)
2320 rtx label;
2321 {
2322 rtx x;
2323
2324 if (LABEL_PRESERVE_P (label))
2325 return 0;
2326
2327 for (x = forced_labels; x; x = XEXP (x, 1))
2328 if (label == XEXP (x, 0))
2329 return 0;
2330 for (x = label_value_list; x; x = XEXP (x, 1))
2331 if (label == XEXP (x, 0))
2332 return 0;
2333 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2334 if (label == XEXP (x, 0))
2335 return 0;
2336
2337 /* User declared labels must be preserved. */
2338 if (LABEL_NAME (label) != 0)
2339 return 0;
2340
2341 return 1;
2342 }
2343
2344 static int
2345 tail_recursion_label_p (label)
2346 rtx label;
2347 {
2348 rtx x;
2349
2350 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2351 if (label == XEXP (x, 0))
2352 return 1;
2353
2354 return 0;
2355 }
2356
2357 /* Blocks A and B are to be merged into a single block A. The insns
2358 are already contiguous, hence `nomove'. */
2359
2360 void
2361 merge_blocks_nomove (a, b)
2362 basic_block a, b;
2363 {
2364 edge e;
2365 rtx b_head, b_end, a_end;
2366 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2367 int b_empty = 0;
2368
2369 /* If there was a CODE_LABEL beginning B, delete it. */
2370 b_head = b->head;
2371 b_end = b->end;
2372 if (GET_CODE (b_head) == CODE_LABEL)
2373 {
2374 /* Detect basic blocks with nothing but a label. This can happen
2375 in particular at the end of a function. */
2376 if (b_head == b_end)
2377 b_empty = 1;
2378 del_first = del_last = b_head;
2379 b_head = NEXT_INSN (b_head);
2380 }
2381
2382 /* Delete the basic block note. */
2383 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2384 {
2385 if (b_head == b_end)
2386 b_empty = 1;
2387 if (! del_last)
2388 del_first = b_head;
2389 del_last = b_head;
2390 b_head = NEXT_INSN (b_head);
2391 }
2392
2393 /* If there was a jump out of A, delete it. */
2394 a_end = a->end;
2395 if (GET_CODE (a_end) == JUMP_INSN)
2396 {
2397 rtx prev;
2398
2399 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2400 if (GET_CODE (prev) != NOTE
2401 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2402 || prev == a->head)
2403 break;
2404
2405 del_first = a_end;
2406
2407 #ifdef HAVE_cc0
2408 /* If this was a conditional jump, we need to also delete
2409 the insn that set cc0. */
2410 if (prev && sets_cc0_p (prev))
2411 {
2412 rtx tmp = prev;
2413 prev = prev_nonnote_insn (prev);
2414 if (!prev)
2415 prev = a->head;
2416 del_first = tmp;
2417 }
2418 #endif
2419
2420 a_end = prev;
2421 }
2422 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2423 del_first = NEXT_INSN (a_end);
2424
2425 /* Delete everything marked above as well as crap that might be
2426 hanging out between the two blocks. */
2427 flow_delete_insn_chain (del_first, del_last);
2428
2429 /* Normally there should only be one successor of A and that is B, but
2430 partway though the merge of blocks for conditional_execution we'll
2431 be merging a TEST block with THEN and ELSE successors. Free the
2432 whole lot of them and hope the caller knows what they're doing. */
2433 while (a->succ)
2434 remove_edge (a->succ);
2435
2436 /* Adjust the edges out of B for the new owner. */
2437 for (e = b->succ; e; e = e->succ_next)
2438 e->src = a;
2439 a->succ = b->succ;
2440
2441 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2442 b->pred = b->succ = NULL;
2443
2444 /* Reassociate the insns of B with A. */
2445 if (!b_empty)
2446 {
2447 if (basic_block_for_insn)
2448 {
2449 BLOCK_FOR_INSN (b_head) = a;
2450 while (b_head != b_end)
2451 {
2452 b_head = NEXT_INSN (b_head);
2453 BLOCK_FOR_INSN (b_head) = a;
2454 }
2455 }
2456 a_end = b_end;
2457 }
2458 a->end = a_end;
2459
2460 expunge_block (b);
2461 }
2462
2463 /* Blocks A and B are to be merged into a single block. A has no incoming
2464 fallthru edge, so it can be moved before B without adding or modifying
2465 any jumps (aside from the jump from A to B). */
2466
2467 static int
2468 merge_blocks_move_predecessor_nojumps (a, b)
2469 basic_block a, b;
2470 {
2471 rtx start, end, barrier;
2472 int index;
2473
2474 start = a->head;
2475 end = a->end;
2476
2477 barrier = next_nonnote_insn (end);
2478 if (GET_CODE (barrier) != BARRIER)
2479 abort ();
2480 flow_delete_insn (barrier);
2481
2482 /* Move block and loop notes out of the chain so that we do not
2483 disturb their order.
2484
2485 ??? A better solution would be to squeeze out all the non-nested notes
2486 and adjust the block trees appropriately. Even better would be to have
2487 a tighter connection between block trees and rtl so that this is not
2488 necessary. */
2489 start = squeeze_notes (start, end);
2490
2491 /* Scramble the insn chain. */
2492 if (end != PREV_INSN (b->head))
2493 reorder_insns (start, end, PREV_INSN (b->head));
2494
2495 if (rtl_dump_file)
2496 {
2497 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2498 a->index, b->index);
2499 }
2500
2501 /* Swap the records for the two blocks around. Although we are deleting B,
2502 A is now where B was and we want to compact the BB array from where
2503 A used to be. */
2504 BASIC_BLOCK (a->index) = b;
2505 BASIC_BLOCK (b->index) = a;
2506 index = a->index;
2507 a->index = b->index;
2508 b->index = index;
2509
2510 /* Now blocks A and B are contiguous. Merge them. */
2511 merge_blocks_nomove (a, b);
2512
2513 return 1;
2514 }
2515
2516 /* Blocks A and B are to be merged into a single block. B has no outgoing
2517 fallthru edge, so it can be moved after A without adding or modifying
2518 any jumps (aside from the jump from A to B). */
2519
2520 static int
2521 merge_blocks_move_successor_nojumps (a, b)
2522 basic_block a, b;
2523 {
2524 rtx start, end, barrier;
2525
2526 start = b->head;
2527 end = b->end;
2528 barrier = NEXT_INSN (end);
2529
2530 /* Recognize a jump table following block B. */
2531 if (GET_CODE (barrier) == CODE_LABEL
2532 && NEXT_INSN (barrier)
2533 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2534 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2535 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2536 {
2537 end = NEXT_INSN (barrier);
2538 barrier = NEXT_INSN (end);
2539 }
2540
2541 /* There had better have been a barrier there. Delete it. */
2542 if (GET_CODE (barrier) != BARRIER)
2543 abort ();
2544 flow_delete_insn (barrier);
2545
2546 /* Move block and loop notes out of the chain so that we do not
2547 disturb their order.
2548
2549 ??? A better solution would be to squeeze out all the non-nested notes
2550 and adjust the block trees appropriately. Even better would be to have
2551 a tighter connection between block trees and rtl so that this is not
2552 necessary. */
2553 start = squeeze_notes (start, end);
2554
2555 /* Scramble the insn chain. */
2556 reorder_insns (start, end, a->end);
2557
2558 /* Now blocks A and B are contiguous. Merge them. */
2559 merge_blocks_nomove (a, b);
2560
2561 if (rtl_dump_file)
2562 {
2563 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2564 b->index, a->index);
2565 }
2566
2567 return 1;
2568 }
2569
2570 /* Attempt to merge basic blocks that are potentially non-adjacent.
2571 Return true iff the attempt succeeded. */
2572
2573 static int
2574 merge_blocks (e, b, c)
2575 edge e;
2576 basic_block b, c;
2577 {
2578 /* If C has a tail recursion label, do not merge. There is no
2579 edge recorded from the call_placeholder back to this label, as
2580 that would make optimize_sibling_and_tail_recursive_calls more
2581 complex for no gain. */
2582 if (GET_CODE (c->head) == CODE_LABEL
2583 && tail_recursion_label_p (c->head))
2584 return 0;
2585
2586 /* If B has a fallthru edge to C, no need to move anything. */
2587 if (e->flags & EDGE_FALLTHRU)
2588 {
2589 merge_blocks_nomove (b, c);
2590
2591 if (rtl_dump_file)
2592 {
2593 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2594 b->index, c->index);
2595 }
2596
2597 return 1;
2598 }
2599 else
2600 {
2601 edge tmp_edge;
2602 basic_block d;
2603 int c_has_outgoing_fallthru;
2604 int b_has_incoming_fallthru;
2605
2606 /* We must make sure to not munge nesting of exception regions,
2607 lexical blocks, and loop notes.
2608
2609 The first is taken care of by requiring that the active eh
2610 region at the end of one block always matches the active eh
2611 region at the beginning of the next block.
2612
2613 The later two are taken care of by squeezing out all the notes. */
2614
2615 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2616 executed and we may want to treat blocks which have two out
2617 edges, one normal, one abnormal as only having one edge for
2618 block merging purposes. */
2619
2620 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2621 if (tmp_edge->flags & EDGE_FALLTHRU)
2622 break;
2623 c_has_outgoing_fallthru = (tmp_edge != NULL);
2624
2625 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2626 if (tmp_edge->flags & EDGE_FALLTHRU)
2627 break;
2628 b_has_incoming_fallthru = (tmp_edge != NULL);
2629
2630 /* If B does not have an incoming fallthru, and the exception regions
2631 match, then it can be moved immediately before C without introducing
2632 or modifying jumps.
2633
2634 C can not be the first block, so we do not have to worry about
2635 accessing a non-existent block. */
2636 d = BASIC_BLOCK (c->index - 1);
2637 if (! b_has_incoming_fallthru
2638 && d->eh_end == b->eh_beg
2639 && b->eh_end == c->eh_beg)
2640 return merge_blocks_move_predecessor_nojumps (b, c);
2641
2642 /* Otherwise, we're going to try to move C after B. Make sure the
2643 exception regions match.
2644
2645 If B is the last basic block, then we must not try to access the
2646 block structure for block B + 1. Luckily in that case we do not
2647 need to worry about matching exception regions. */
2648 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2649 if (b->eh_end == c->eh_beg
2650 && (d == NULL || c->eh_end == d->eh_beg))
2651 {
2652 /* If C does not have an outgoing fallthru, then it can be moved
2653 immediately after B without introducing or modifying jumps. */
2654 if (! c_has_outgoing_fallthru)
2655 return merge_blocks_move_successor_nojumps (b, c);
2656
2657 /* Otherwise, we'll need to insert an extra jump, and possibly
2658 a new block to contain it. */
2659 /* ??? Not implemented yet. */
2660 }
2661
2662 return 0;
2663 }
2664 }
2665
2666 /* Top level driver for merge_blocks. */
2667
2668 static void
2669 try_merge_blocks ()
2670 {
2671 int i;
2672
2673 /* Attempt to merge blocks as made possible by edge removal. If a block
2674 has only one successor, and the successor has only one predecessor,
2675 they may be combined. */
2676
2677 for (i = 0; i < n_basic_blocks;)
2678 {
2679 basic_block c, b = BASIC_BLOCK (i);
2680 edge s;
2681
2682 /* A loop because chains of blocks might be combineable. */
2683 while ((s = b->succ) != NULL
2684 && s->succ_next == NULL
2685 && (s->flags & EDGE_EH) == 0
2686 && (c = s->dest) != EXIT_BLOCK_PTR
2687 && c->pred->pred_next == NULL
2688 /* If the jump insn has side effects, we can't kill the edge. */
2689 && (GET_CODE (b->end) != JUMP_INSN
2690 || onlyjump_p (b->end))
2691 && merge_blocks (s, b, c))
2692 continue;
2693
2694 /* Don't get confused by the index shift caused by deleting blocks. */
2695 i = b->index + 1;
2696 }
2697 }
2698
2699 /* The given edge should potentially be a fallthru edge. If that is in
2700 fact true, delete the jump and barriers that are in the way. */
2701
2702 void
2703 tidy_fallthru_edge (e, b, c)
2704 edge e;
2705 basic_block b, c;
2706 {
2707 rtx q;
2708
2709 /* ??? In a late-running flow pass, other folks may have deleted basic
2710 blocks by nopping out blocks, leaving multiple BARRIERs between here
2711 and the target label. They ought to be chastized and fixed.
2712
2713 We can also wind up with a sequence of undeletable labels between
2714 one block and the next.
2715
2716 So search through a sequence of barriers, labels, and notes for
2717 the head of block C and assert that we really do fall through. */
2718
2719 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2720 return;
2721
2722 /* Remove what will soon cease being the jump insn from the source block.
2723 If block B consisted only of this single jump, turn it into a deleted
2724 note. */
2725 q = b->end;
2726 if (GET_CODE (q) == JUMP_INSN
2727 && onlyjump_p (q)
2728 && (any_uncondjump_p (q)
2729 || (b->succ == e && e->succ_next == NULL)))
2730 {
2731 #ifdef HAVE_cc0
2732 /* If this was a conditional jump, we need to also delete
2733 the insn that set cc0. */
2734 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2735 q = PREV_INSN (q);
2736 #endif
2737
2738 if (b->head == q)
2739 {
2740 PUT_CODE (q, NOTE);
2741 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2742 NOTE_SOURCE_FILE (q) = 0;
2743 }
2744 else
2745 q = PREV_INSN (q);
2746
2747 b->end = q;
2748 }
2749
2750 /* Selectively unlink the sequence. */
2751 if (q != PREV_INSN (c->head))
2752 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2753
2754 e->flags |= EDGE_FALLTHRU;
2755 }
2756
2757 /* Fix up edges that now fall through, or rather should now fall through
2758 but previously required a jump around now deleted blocks. Simplify
2759 the search by only examining blocks numerically adjacent, since this
2760 is how find_basic_blocks created them. */
2761
2762 static void
2763 tidy_fallthru_edges ()
2764 {
2765 int i;
2766
2767 for (i = 1; i < n_basic_blocks; ++i)
2768 {
2769 basic_block b = BASIC_BLOCK (i - 1);
2770 basic_block c = BASIC_BLOCK (i);
2771 edge s;
2772
2773 /* We care about simple conditional or unconditional jumps with
2774 a single successor.
2775
2776 If we had a conditional branch to the next instruction when
2777 find_basic_blocks was called, then there will only be one
2778 out edge for the block which ended with the conditional
2779 branch (since we do not create duplicate edges).
2780
2781 Furthermore, the edge will be marked as a fallthru because we
2782 merge the flags for the duplicate edges. So we do not want to
2783 check that the edge is not a FALLTHRU edge. */
2784 if ((s = b->succ) != NULL
2785 && s->succ_next == NULL
2786 && s->dest == c
2787 /* If the jump insn has side effects, we can't tidy the edge. */
2788 && (GET_CODE (b->end) != JUMP_INSN
2789 || onlyjump_p (b->end)))
2790 tidy_fallthru_edge (s, b, c);
2791 }
2792 }
2793 \f
2794 /* Perform data flow analysis.
2795 F is the first insn of the function; FLAGS is a set of PROP_* flags
2796 to be used in accumulating flow info. */
2797
2798 void
2799 life_analysis (f, file, flags)
2800 rtx f;
2801 FILE *file;
2802 int flags;
2803 {
2804 #ifdef ELIMINABLE_REGS
2805 register int i;
2806 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2807 #endif
2808
2809 /* Record which registers will be eliminated. We use this in
2810 mark_used_regs. */
2811
2812 CLEAR_HARD_REG_SET (elim_reg_set);
2813
2814 #ifdef ELIMINABLE_REGS
2815 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2816 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2817 #else
2818 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2819 #endif
2820
2821 if (! optimize)
2822 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2823
2824 /* The post-reload life analysis have (on a global basis) the same
2825 registers live as was computed by reload itself. elimination
2826 Otherwise offsets and such may be incorrect.
2827
2828 Reload will make some registers as live even though they do not
2829 appear in the rtl.
2830
2831 We don't want to create new auto-incs after reload, since they
2832 are unlikely to be useful and can cause problems with shared
2833 stack slots. */
2834 if (reload_completed)
2835 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2836
2837 /* We want alias analysis information for local dead store elimination. */
2838 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2839 init_alias_analysis ();
2840
2841 /* Always remove no-op moves. Do this before other processing so
2842 that we don't have to keep re-scanning them. */
2843 delete_noop_moves (f);
2844
2845 /* Some targets can emit simpler epilogues if they know that sp was
2846 not ever modified during the function. After reload, of course,
2847 we've already emitted the epilogue so there's no sense searching. */
2848 if (! reload_completed)
2849 notice_stack_pointer_modification (f);
2850
2851 /* Allocate and zero out data structures that will record the
2852 data from lifetime analysis. */
2853 allocate_reg_life_data ();
2854 allocate_bb_life_data ();
2855
2856 /* Find the set of registers live on function exit. */
2857 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2858
2859 /* "Update" life info from zero. It'd be nice to begin the
2860 relaxation with just the exit and noreturn blocks, but that set
2861 is not immediately handy. */
2862
2863 if (flags & PROP_REG_INFO)
2864 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2865 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2866
2867 /* Clean up. */
2868 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2869 end_alias_analysis ();
2870
2871 if (file)
2872 dump_flow_info (file);
2873
2874 free_basic_block_vars (1);
2875 }
2876
2877 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2878 Search for REGNO. If found, abort if it is not wider than word_mode. */
2879
2880 static int
2881 verify_wide_reg_1 (px, pregno)
2882 rtx *px;
2883 void *pregno;
2884 {
2885 rtx x = *px;
2886 unsigned int regno = *(int *) pregno;
2887
2888 if (GET_CODE (x) == REG && REGNO (x) == regno)
2889 {
2890 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2891 abort ();
2892 return 1;
2893 }
2894 return 0;
2895 }
2896
2897 /* A subroutine of verify_local_live_at_start. Search through insns
2898 between HEAD and END looking for register REGNO. */
2899
2900 static void
2901 verify_wide_reg (regno, head, end)
2902 int regno;
2903 rtx head, end;
2904 {
2905 while (1)
2906 {
2907 if (INSN_P (head)
2908 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2909 return;
2910 if (head == end)
2911 break;
2912 head = NEXT_INSN (head);
2913 }
2914
2915 /* We didn't find the register at all. Something's way screwy. */
2916 if (rtl_dump_file)
2917 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
2918 print_rtl_and_abort ();
2919 }
2920
2921 /* A subroutine of update_life_info. Verify that there are no untoward
2922 changes in live_at_start during a local update. */
2923
2924 static void
2925 verify_local_live_at_start (new_live_at_start, bb)
2926 regset new_live_at_start;
2927 basic_block bb;
2928 {
2929 if (reload_completed)
2930 {
2931 /* After reload, there are no pseudos, nor subregs of multi-word
2932 registers. The regsets should exactly match. */
2933 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2934 {
2935 if (rtl_dump_file)
2936 {
2937 fprintf (rtl_dump_file,
2938 "live_at_start mismatch in bb %d, aborting\n",
2939 bb->index);
2940 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
2941 debug_bitmap_file (rtl_dump_file, new_live_at_start);
2942 }
2943 print_rtl_and_abort ();
2944 }
2945 }
2946 else
2947 {
2948 int i;
2949
2950 /* Find the set of changed registers. */
2951 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2952
2953 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2954 {
2955 /* No registers should die. */
2956 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2957 {
2958 if (rtl_dump_file)
2959 fprintf (rtl_dump_file,
2960 "Register %d died unexpectedly in block %d\n", i,
2961 bb->index);
2962 print_rtl_and_abort ();
2963 }
2964
2965 /* Verify that the now-live register is wider than word_mode. */
2966 verify_wide_reg (i, bb->head, bb->end);
2967 });
2968 }
2969 }
2970
2971 /* Updates life information starting with the basic blocks set in BLOCKS.
2972 If BLOCKS is null, consider it to be the universal set.
2973
2974 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2975 we are only expecting local modifications to basic blocks. If we find
2976 extra registers live at the beginning of a block, then we either killed
2977 useful data, or we have a broken split that wants data not provided.
2978 If we find registers removed from live_at_start, that means we have
2979 a broken peephole that is killing a register it shouldn't.
2980
2981 ??? This is not true in one situation -- when a pre-reload splitter
2982 generates subregs of a multi-word pseudo, current life analysis will
2983 lose the kill. So we _can_ have a pseudo go live. How irritating.
2984
2985 Including PROP_REG_INFO does not properly refresh regs_ever_live
2986 unless the caller resets it to zero. */
2987
2988 void
2989 update_life_info (blocks, extent, prop_flags)
2990 sbitmap blocks;
2991 enum update_life_extent extent;
2992 int prop_flags;
2993 {
2994 regset tmp;
2995 regset_head tmp_head;
2996 int i;
2997
2998 tmp = INITIALIZE_REG_SET (tmp_head);
2999
3000 if (reload_completed)
3001 prop_flags |= PROP_POSTRELOAD;
3002
3003 /* For a global update, we go through the relaxation process again. */
3004 if (extent != UPDATE_LIFE_LOCAL)
3005 {
3006 calculate_global_regs_live (blocks, blocks,
3007 prop_flags & PROP_SCAN_DEAD_CODE);
3008
3009 /* If asked, remove notes from the blocks we'll update. */
3010 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3011 count_or_remove_death_notes (blocks, 1);
3012 }
3013
3014 if (blocks)
3015 {
3016 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3017 {
3018 basic_block bb = BASIC_BLOCK (i);
3019
3020 COPY_REG_SET (tmp, bb->global_live_at_end);
3021 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3022
3023 if (extent == UPDATE_LIFE_LOCAL)
3024 verify_local_live_at_start (tmp, bb);
3025 });
3026 }
3027 else
3028 {
3029 for (i = n_basic_blocks - 1; i >= 0; --i)
3030 {
3031 basic_block bb = BASIC_BLOCK (i);
3032
3033 COPY_REG_SET (tmp, bb->global_live_at_end);
3034 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3035
3036 if (extent == UPDATE_LIFE_LOCAL)
3037 verify_local_live_at_start (tmp, bb);
3038 }
3039 }
3040
3041 FREE_REG_SET (tmp);
3042
3043 if (prop_flags & PROP_REG_INFO)
3044 {
3045 /* The only pseudos that are live at the beginning of the function
3046 are those that were not set anywhere in the function. local-alloc
3047 doesn't know how to handle these correctly, so mark them as not
3048 local to any one basic block. */
3049 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3050 FIRST_PSEUDO_REGISTER, i,
3051 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3052
3053 /* We have a problem with any pseudoreg that lives across the setjmp.
3054 ANSI says that if a user variable does not change in value between
3055 the setjmp and the longjmp, then the longjmp preserves it. This
3056 includes longjmp from a place where the pseudo appears dead.
3057 (In principle, the value still exists if it is in scope.)
3058 If the pseudo goes in a hard reg, some other value may occupy
3059 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3060 Conclusion: such a pseudo must not go in a hard reg. */
3061 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3062 FIRST_PSEUDO_REGISTER, i,
3063 {
3064 if (regno_reg_rtx[i] != 0)
3065 {
3066 REG_LIVE_LENGTH (i) = -1;
3067 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3068 }
3069 });
3070 }
3071 }
3072
3073 /* Free the variables allocated by find_basic_blocks.
3074
3075 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3076
3077 void
3078 free_basic_block_vars (keep_head_end_p)
3079 int keep_head_end_p;
3080 {
3081 if (basic_block_for_insn)
3082 {
3083 VARRAY_FREE (basic_block_for_insn);
3084 basic_block_for_insn = NULL;
3085 }
3086
3087 if (! keep_head_end_p)
3088 {
3089 clear_edges ();
3090 VARRAY_FREE (basic_block_info);
3091 n_basic_blocks = 0;
3092
3093 ENTRY_BLOCK_PTR->aux = NULL;
3094 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3095 EXIT_BLOCK_PTR->aux = NULL;
3096 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3097 }
3098 }
3099
3100 /* Return nonzero if the destination of SET equals the source. */
3101
3102 static int
3103 set_noop_p (set)
3104 rtx set;
3105 {
3106 rtx src = SET_SRC (set);
3107 rtx dst = SET_DEST (set);
3108
3109 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
3110 {
3111 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
3112 return 0;
3113 src = SUBREG_REG (src);
3114 dst = SUBREG_REG (dst);
3115 }
3116
3117 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
3118 && REGNO (src) == REGNO (dst));
3119 }
3120
3121 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3122 value to itself. */
3123
3124 static int
3125 noop_move_p (insn)
3126 rtx insn;
3127 {
3128 rtx pat = PATTERN (insn);
3129
3130 /* Insns carrying these notes are useful later on. */
3131 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3132 return 0;
3133
3134 if (GET_CODE (pat) == SET && set_noop_p (pat))
3135 return 1;
3136
3137 if (GET_CODE (pat) == PARALLEL)
3138 {
3139 int i;
3140 /* If nothing but SETs of registers to themselves,
3141 this insn can also be deleted. */
3142 for (i = 0; i < XVECLEN (pat, 0); i++)
3143 {
3144 rtx tem = XVECEXP (pat, 0, i);
3145
3146 if (GET_CODE (tem) == USE
3147 || GET_CODE (tem) == CLOBBER)
3148 continue;
3149
3150 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3151 return 0;
3152 }
3153
3154 return 1;
3155 }
3156 return 0;
3157 }
3158
3159 /* Delete any insns that copy a register to itself. */
3160
3161 static void
3162 delete_noop_moves (f)
3163 rtx f;
3164 {
3165 rtx insn;
3166 for (insn = f; insn; insn = NEXT_INSN (insn))
3167 {
3168 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3169 {
3170 PUT_CODE (insn, NOTE);
3171 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3172 NOTE_SOURCE_FILE (insn) = 0;
3173 }
3174 }
3175 }
3176
3177 /* Determine if the stack pointer is constant over the life of the function.
3178 Only useful before prologues have been emitted. */
3179
3180 static void
3181 notice_stack_pointer_modification_1 (x, pat, data)
3182 rtx x;
3183 rtx pat ATTRIBUTE_UNUSED;
3184 void *data ATTRIBUTE_UNUSED;
3185 {
3186 if (x == stack_pointer_rtx
3187 /* The stack pointer is only modified indirectly as the result
3188 of a push until later in flow. See the comments in rtl.texi
3189 regarding Embedded Side-Effects on Addresses. */
3190 || (GET_CODE (x) == MEM
3191 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3192 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3193 current_function_sp_is_unchanging = 0;
3194 }
3195
3196 static void
3197 notice_stack_pointer_modification (f)
3198 rtx f;
3199 {
3200 rtx insn;
3201
3202 /* Assume that the stack pointer is unchanging if alloca hasn't
3203 been used. */
3204 current_function_sp_is_unchanging = !current_function_calls_alloca;
3205 if (! current_function_sp_is_unchanging)
3206 return;
3207
3208 for (insn = f; insn; insn = NEXT_INSN (insn))
3209 {
3210 if (INSN_P (insn))
3211 {
3212 /* Check if insn modifies the stack pointer. */
3213 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3214 NULL);
3215 if (! current_function_sp_is_unchanging)
3216 return;
3217 }
3218 }
3219 }
3220
3221 /* Mark a register in SET. Hard registers in large modes get all
3222 of their component registers set as well. */
3223
3224 static void
3225 mark_reg (reg, xset)
3226 rtx reg;
3227 void *xset;
3228 {
3229 regset set = (regset) xset;
3230 int regno = REGNO (reg);
3231
3232 if (GET_MODE (reg) == BLKmode)
3233 abort ();
3234
3235 SET_REGNO_REG_SET (set, regno);
3236 if (regno < FIRST_PSEUDO_REGISTER)
3237 {
3238 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3239 while (--n > 0)
3240 SET_REGNO_REG_SET (set, regno + n);
3241 }
3242 }
3243
3244 /* Mark those regs which are needed at the end of the function as live
3245 at the end of the last basic block. */
3246
3247 static void
3248 mark_regs_live_at_end (set)
3249 regset set;
3250 {
3251 int i;
3252
3253 /* If exiting needs the right stack value, consider the stack pointer
3254 live at the end of the function. */
3255 if ((HAVE_epilogue && reload_completed)
3256 || ! EXIT_IGNORE_STACK
3257 || (! FRAME_POINTER_REQUIRED
3258 && ! current_function_calls_alloca
3259 && flag_omit_frame_pointer)
3260 || current_function_sp_is_unchanging)
3261 {
3262 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3263 }
3264
3265 /* Mark the frame pointer if needed at the end of the function. If
3266 we end up eliminating it, it will be removed from the live list
3267 of each basic block by reload. */
3268
3269 if (! reload_completed || frame_pointer_needed)
3270 {
3271 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3272 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3273 /* If they are different, also mark the hard frame pointer as live. */
3274 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3275 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3276 #endif
3277 }
3278
3279 #ifdef PIC_OFFSET_TABLE_REGNUM
3280 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3281 /* Many architectures have a GP register even without flag_pic.
3282 Assume the pic register is not in use, or will be handled by
3283 other means, if it is not fixed. */
3284 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3285 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3286 #endif
3287 #endif
3288
3289 /* Mark all global registers, and all registers used by the epilogue
3290 as being live at the end of the function since they may be
3291 referenced by our caller. */
3292 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3293 if (global_regs[i] || EPILOGUE_USES (i))
3294 SET_REGNO_REG_SET (set, i);
3295
3296 /* Mark all call-saved registers that we actaully used. */
3297 if (HAVE_epilogue && reload_completed)
3298 {
3299 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3300 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3301 SET_REGNO_REG_SET (set, i);
3302 }
3303
3304 /* Mark function return value. */
3305 diddle_return_value (mark_reg, set);
3306 }
3307
3308 /* Callback function for for_each_successor_phi. DATA is a regset.
3309 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3310 INSN, in the regset. */
3311
3312 static int
3313 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3314 rtx insn ATTRIBUTE_UNUSED;
3315 int dest_regno ATTRIBUTE_UNUSED;
3316 int src_regno;
3317 void *data;
3318 {
3319 regset live = (regset) data;
3320 SET_REGNO_REG_SET (live, src_regno);
3321 return 0;
3322 }
3323
3324 /* Propagate global life info around the graph of basic blocks. Begin
3325 considering blocks with their corresponding bit set in BLOCKS_IN.
3326 If BLOCKS_IN is null, consider it the universal set.
3327
3328 BLOCKS_OUT is set for every block that was changed. */
3329
3330 static void
3331 calculate_global_regs_live (blocks_in, blocks_out, flags)
3332 sbitmap blocks_in, blocks_out;
3333 int flags;
3334 {
3335 basic_block *queue, *qhead, *qtail, *qend;
3336 regset tmp, new_live_at_end;
3337 regset_head tmp_head;
3338 regset_head new_live_at_end_head;
3339 int i;
3340
3341 tmp = INITIALIZE_REG_SET (tmp_head);
3342 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3343
3344 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3345 because the `head == tail' style test for an empty queue doesn't
3346 work with a full queue. */
3347 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3348 qtail = queue;
3349 qhead = qend = queue + n_basic_blocks + 2;
3350
3351 /* Queue the blocks set in the initial mask. Do this in reverse block
3352 number order so that we are more likely for the first round to do
3353 useful work. We use AUX non-null to flag that the block is queued. */
3354 if (blocks_in)
3355 {
3356 /* Clear out the garbage that might be hanging out in bb->aux. */
3357 for (i = n_basic_blocks - 1; i >= 0; --i)
3358 BASIC_BLOCK (i)->aux = NULL;
3359
3360 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3361 {
3362 basic_block bb = BASIC_BLOCK (i);
3363 *--qhead = bb;
3364 bb->aux = bb;
3365 });
3366 }
3367 else
3368 {
3369 for (i = 0; i < n_basic_blocks; ++i)
3370 {
3371 basic_block bb = BASIC_BLOCK (i);
3372 *--qhead = bb;
3373 bb->aux = bb;
3374 }
3375 }
3376
3377 if (blocks_out)
3378 sbitmap_zero (blocks_out);
3379
3380 while (qhead != qtail)
3381 {
3382 int rescan, changed;
3383 basic_block bb;
3384 edge e;
3385
3386 bb = *qhead++;
3387 if (qhead == qend)
3388 qhead = queue;
3389 bb->aux = NULL;
3390
3391 /* Begin by propogating live_at_start from the successor blocks. */
3392 CLEAR_REG_SET (new_live_at_end);
3393 for (e = bb->succ; e; e = e->succ_next)
3394 {
3395 basic_block sb = e->dest;
3396 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3397 }
3398
3399 /* The all-important stack pointer must always be live. */
3400 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3401
3402 /* Before reload, there are a few registers that must be forced
3403 live everywhere -- which might not already be the case for
3404 blocks within infinite loops. */
3405 if (! reload_completed)
3406 {
3407 /* Any reference to any pseudo before reload is a potential
3408 reference of the frame pointer. */
3409 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3410
3411 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3412 /* Pseudos with argument area equivalences may require
3413 reloading via the argument pointer. */
3414 if (fixed_regs[ARG_POINTER_REGNUM])
3415 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3416 #endif
3417
3418 #ifdef PIC_OFFSET_TABLE_REGNUM
3419 /* Any constant, or pseudo with constant equivalences, may
3420 require reloading from memory using the pic register. */
3421 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3422 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3423 #endif
3424 }
3425
3426 /* Regs used in phi nodes are not included in
3427 global_live_at_start, since they are live only along a
3428 particular edge. Set those regs that are live because of a
3429 phi node alternative corresponding to this particular block. */
3430 if (in_ssa_form)
3431 for_each_successor_phi (bb, &set_phi_alternative_reg,
3432 new_live_at_end);
3433
3434 if (bb == ENTRY_BLOCK_PTR)
3435 {
3436 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3437 continue;
3438 }
3439
3440 /* On our first pass through this block, we'll go ahead and continue.
3441 Recognize first pass by local_set NULL. On subsequent passes, we
3442 get to skip out early if live_at_end wouldn't have changed. */
3443
3444 if (bb->local_set == NULL)
3445 {
3446 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3447 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3448 rescan = 1;
3449 }
3450 else
3451 {
3452 /* If any bits were removed from live_at_end, we'll have to
3453 rescan the block. This wouldn't be necessary if we had
3454 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3455 local_live is really dependent on live_at_end. */
3456 CLEAR_REG_SET (tmp);
3457 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3458 new_live_at_end, BITMAP_AND_COMPL);
3459
3460 if (! rescan)
3461 {
3462 /* If any of the registers in the new live_at_end set are
3463 conditionally set in this basic block, we must rescan.
3464 This is because conditional lifetimes at the end of the
3465 block do not just take the live_at_end set into account,
3466 but also the liveness at the start of each successor
3467 block. We can miss changes in those sets if we only
3468 compare the new live_at_end against the previous one. */
3469 CLEAR_REG_SET (tmp);
3470 rescan = bitmap_operation (tmp, new_live_at_end,
3471 bb->cond_local_set, BITMAP_AND);
3472 }
3473
3474 if (! rescan)
3475 {
3476 /* Find the set of changed bits. Take this opportunity
3477 to notice that this set is empty and early out. */
3478 CLEAR_REG_SET (tmp);
3479 changed = bitmap_operation (tmp, bb->global_live_at_end,
3480 new_live_at_end, BITMAP_XOR);
3481 if (! changed)
3482 continue;
3483
3484 /* If any of the changed bits overlap with local_set,
3485 we'll have to rescan the block. Detect overlap by
3486 the AND with ~local_set turning off bits. */
3487 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3488 BITMAP_AND_COMPL);
3489 }
3490 }
3491
3492 /* Let our caller know that BB changed enough to require its
3493 death notes updated. */
3494 if (blocks_out)
3495 SET_BIT (blocks_out, bb->index);
3496
3497 if (! rescan)
3498 {
3499 /* Add to live_at_start the set of all registers in
3500 new_live_at_end that aren't in the old live_at_end. */
3501
3502 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3503 BITMAP_AND_COMPL);
3504 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3505
3506 changed = bitmap_operation (bb->global_live_at_start,
3507 bb->global_live_at_start,
3508 tmp, BITMAP_IOR);
3509 if (! changed)
3510 continue;
3511 }
3512 else
3513 {
3514 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3515
3516 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3517 into live_at_start. */
3518 propagate_block (bb, new_live_at_end, bb->local_set,
3519 bb->cond_local_set, flags);
3520
3521 /* If live_at start didn't change, no need to go farther. */
3522 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3523 continue;
3524
3525 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3526 }
3527
3528 /* Queue all predecessors of BB so that we may re-examine
3529 their live_at_end. */
3530 for (e = bb->pred; e; e = e->pred_next)
3531 {
3532 basic_block pb = e->src;
3533 if (pb->aux == NULL)
3534 {
3535 *qtail++ = pb;
3536 if (qtail == qend)
3537 qtail = queue;
3538 pb->aux = pb;
3539 }
3540 }
3541 }
3542
3543 FREE_REG_SET (tmp);
3544 FREE_REG_SET (new_live_at_end);
3545
3546 if (blocks_out)
3547 {
3548 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3549 {
3550 basic_block bb = BASIC_BLOCK (i);
3551 FREE_REG_SET (bb->local_set);
3552 FREE_REG_SET (bb->cond_local_set);
3553 });
3554 }
3555 else
3556 {
3557 for (i = n_basic_blocks - 1; i >= 0; --i)
3558 {
3559 basic_block bb = BASIC_BLOCK (i);
3560 FREE_REG_SET (bb->local_set);
3561 FREE_REG_SET (bb->cond_local_set);
3562 }
3563 }
3564
3565 free (queue);
3566 }
3567 \f
3568 /* Subroutines of life analysis. */
3569
3570 /* Allocate the permanent data structures that represent the results
3571 of life analysis. Not static since used also for stupid life analysis. */
3572
3573 static void
3574 allocate_bb_life_data ()
3575 {
3576 register int i;
3577
3578 for (i = 0; i < n_basic_blocks; i++)
3579 {
3580 basic_block bb = BASIC_BLOCK (i);
3581
3582 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3583 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3584 }
3585
3586 ENTRY_BLOCK_PTR->global_live_at_end
3587 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3588 EXIT_BLOCK_PTR->global_live_at_start
3589 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3590
3591 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3592 }
3593
3594 void
3595 allocate_reg_life_data ()
3596 {
3597 int i;
3598
3599 max_regno = max_reg_num ();
3600
3601 /* Recalculate the register space, in case it has grown. Old style
3602 vector oriented regsets would set regset_{size,bytes} here also. */
3603 allocate_reg_info (max_regno, FALSE, FALSE);
3604
3605 /* Reset all the data we'll collect in propagate_block and its
3606 subroutines. */
3607 for (i = 0; i < max_regno; i++)
3608 {
3609 REG_N_SETS (i) = 0;
3610 REG_N_REFS (i) = 0;
3611 REG_N_DEATHS (i) = 0;
3612 REG_N_CALLS_CROSSED (i) = 0;
3613 REG_LIVE_LENGTH (i) = 0;
3614 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3615 }
3616 }
3617
3618 /* Delete dead instructions for propagate_block. */
3619
3620 static void
3621 propagate_block_delete_insn (bb, insn)
3622 basic_block bb;
3623 rtx insn;
3624 {
3625 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3626
3627 /* If the insn referred to a label, and that label was attached to
3628 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3629 pretty much mandatory to delete it, because the ADDR_VEC may be
3630 referencing labels that no longer exist. */
3631
3632 if (inote)
3633 {
3634 rtx label = XEXP (inote, 0);
3635 rtx next;
3636
3637 if (LABEL_NUSES (label) == 1
3638 && (next = next_nonnote_insn (label)) != NULL
3639 && GET_CODE (next) == JUMP_INSN
3640 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3641 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3642 {
3643 rtx pat = PATTERN (next);
3644 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3645 int len = XVECLEN (pat, diff_vec_p);
3646 int i;
3647
3648 for (i = 0; i < len; i++)
3649 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3650
3651 flow_delete_insn (next);
3652 }
3653 }
3654
3655 if (bb->end == insn)
3656 bb->end = PREV_INSN (insn);
3657 flow_delete_insn (insn);
3658 }
3659
3660 /* Delete dead libcalls for propagate_block. Return the insn
3661 before the libcall. */
3662
3663 static rtx
3664 propagate_block_delete_libcall (bb, insn, note)
3665 basic_block bb;
3666 rtx insn, note;
3667 {
3668 rtx first = XEXP (note, 0);
3669 rtx before = PREV_INSN (first);
3670
3671 if (insn == bb->end)
3672 bb->end = before;
3673
3674 flow_delete_insn_chain (first, insn);
3675 return before;
3676 }
3677
3678 /* Update the life-status of regs for one insn. Return the previous insn. */
3679
3680 rtx
3681 propagate_one_insn (pbi, insn)
3682 struct propagate_block_info *pbi;
3683 rtx insn;
3684 {
3685 rtx prev = PREV_INSN (insn);
3686 int flags = pbi->flags;
3687 int insn_is_dead = 0;
3688 int libcall_is_dead = 0;
3689 rtx note;
3690 int i;
3691
3692 if (! INSN_P (insn))
3693 return prev;
3694
3695 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3696 if (flags & PROP_SCAN_DEAD_CODE)
3697 {
3698 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3699 REG_NOTES (insn));
3700 libcall_is_dead = (insn_is_dead && note != 0
3701 && libcall_dead_p (pbi, note, insn));
3702 }
3703
3704 /* We almost certainly don't want to delete prologue or epilogue
3705 instructions. Warn about probable compiler losage. */
3706 if (insn_is_dead
3707 && reload_completed
3708 && (((HAVE_epilogue || HAVE_prologue)
3709 && prologue_epilogue_contains (insn))
3710 || (HAVE_sibcall_epilogue
3711 && sibcall_epilogue_contains (insn)))
3712 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3713 {
3714 if (flags & PROP_KILL_DEAD_CODE)
3715 {
3716 warning ("ICE: would have deleted prologue/epilogue insn");
3717 if (!inhibit_warnings)
3718 debug_rtx (insn);
3719 }
3720 libcall_is_dead = insn_is_dead = 0;
3721 }
3722
3723 /* If an instruction consists of just dead store(s) on final pass,
3724 delete it. */
3725 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3726 {
3727 /* Record sets. Do this even for dead instructions, since they
3728 would have killed the values if they hadn't been deleted. */
3729 mark_set_regs (pbi, PATTERN (insn), insn);
3730
3731 /* CC0 is now known to be dead. Either this insn used it,
3732 in which case it doesn't anymore, or clobbered it,
3733 so the next insn can't use it. */
3734 pbi->cc0_live = 0;
3735
3736 if (libcall_is_dead)
3737 {
3738 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3739 insn = NEXT_INSN (prev);
3740 }
3741 else
3742 propagate_block_delete_insn (pbi->bb, insn);
3743
3744 return prev;
3745 }
3746
3747 /* See if this is an increment or decrement that can be merged into
3748 a following memory address. */
3749 #ifdef AUTO_INC_DEC
3750 {
3751 register rtx x = single_set (insn);
3752
3753 /* Does this instruction increment or decrement a register? */
3754 if ((flags & PROP_AUTOINC)
3755 && x != 0
3756 && GET_CODE (SET_DEST (x)) == REG
3757 && (GET_CODE (SET_SRC (x)) == PLUS
3758 || GET_CODE (SET_SRC (x)) == MINUS)
3759 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3760 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3761 /* Ok, look for a following memory ref we can combine with.
3762 If one is found, change the memory ref to a PRE_INC
3763 or PRE_DEC, cancel this insn, and return 1.
3764 Return 0 if nothing has been done. */
3765 && try_pre_increment_1 (pbi, insn))
3766 return prev;
3767 }
3768 #endif /* AUTO_INC_DEC */
3769
3770 CLEAR_REG_SET (pbi->new_set);
3771
3772 /* If this is not the final pass, and this insn is copying the value of
3773 a library call and it's dead, don't scan the insns that perform the
3774 library call, so that the call's arguments are not marked live. */
3775 if (libcall_is_dead)
3776 {
3777 /* Record the death of the dest reg. */
3778 mark_set_regs (pbi, PATTERN (insn), insn);
3779
3780 insn = XEXP (note, 0);
3781 return PREV_INSN (insn);
3782 }
3783 else if (GET_CODE (PATTERN (insn)) == SET
3784 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3785 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3786 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3787 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3788 /* We have an insn to pop a constant amount off the stack.
3789 (Such insns use PLUS regardless of the direction of the stack,
3790 and any insn to adjust the stack by a constant is always a pop.)
3791 These insns, if not dead stores, have no effect on life. */
3792 ;
3793 else
3794 {
3795 /* Any regs live at the time of a call instruction must not go
3796 in a register clobbered by calls. Find all regs now live and
3797 record this for them. */
3798
3799 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3800 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3801 { REG_N_CALLS_CROSSED (i)++; });
3802
3803 /* Record sets. Do this even for dead instructions, since they
3804 would have killed the values if they hadn't been deleted. */
3805 mark_set_regs (pbi, PATTERN (insn), insn);
3806
3807 if (GET_CODE (insn) == CALL_INSN)
3808 {
3809 register int i;
3810 rtx note, cond;
3811
3812 cond = NULL_RTX;
3813 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3814 cond = COND_EXEC_TEST (PATTERN (insn));
3815
3816 /* Non-constant calls clobber memory. */
3817 if (! CONST_CALL_P (insn))
3818 free_EXPR_LIST_list (&pbi->mem_set_list);
3819
3820 /* There may be extra registers to be clobbered. */
3821 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3822 note;
3823 note = XEXP (note, 1))
3824 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3825 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3826 cond, insn, pbi->flags);
3827
3828 /* Calls change all call-used and global registers. */
3829 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3830 if (call_used_regs[i] && ! global_regs[i]
3831 && ! fixed_regs[i])
3832 {
3833 /* We do not want REG_UNUSED notes for these registers. */
3834 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3835 cond, insn,
3836 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3837 }
3838 }
3839
3840 /* If an insn doesn't use CC0, it becomes dead since we assume
3841 that every insn clobbers it. So show it dead here;
3842 mark_used_regs will set it live if it is referenced. */
3843 pbi->cc0_live = 0;
3844
3845 /* Record uses. */
3846 if (! insn_is_dead)
3847 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3848
3849 /* Sometimes we may have inserted something before INSN (such as a move)
3850 when we make an auto-inc. So ensure we will scan those insns. */
3851 #ifdef AUTO_INC_DEC
3852 prev = PREV_INSN (insn);
3853 #endif
3854
3855 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3856 {
3857 register int i;
3858 rtx note, cond;
3859
3860 cond = NULL_RTX;
3861 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3862 cond = COND_EXEC_TEST (PATTERN (insn));
3863
3864 /* Calls use their arguments. */
3865 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3866 note;
3867 note = XEXP (note, 1))
3868 if (GET_CODE (XEXP (note, 0)) == USE)
3869 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3870 cond, insn);
3871
3872 /* The stack ptr is used (honorarily) by a CALL insn. */
3873 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3874
3875 /* Calls may also reference any of the global registers,
3876 so they are made live. */
3877 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3878 if (global_regs[i])
3879 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3880 cond, insn);
3881 }
3882 }
3883
3884 /* On final pass, update counts of how many insns in which each reg
3885 is live. */
3886 if (flags & PROP_REG_INFO)
3887 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3888 { REG_LIVE_LENGTH (i)++; });
3889
3890 return prev;
3891 }
3892
3893 /* Initialize a propagate_block_info struct for public consumption.
3894 Note that the structure itself is opaque to this file, but that
3895 the user can use the regsets provided here. */
3896
3897 struct propagate_block_info *
3898 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3899 basic_block bb;
3900 regset live, local_set, cond_local_set;
3901 int flags;
3902 {
3903 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3904
3905 pbi->bb = bb;
3906 pbi->reg_live = live;
3907 pbi->mem_set_list = NULL_RTX;
3908 pbi->local_set = local_set;
3909 pbi->cond_local_set = cond_local_set;
3910 pbi->cc0_live = 0;
3911 pbi->flags = flags;
3912
3913 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3914 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3915 else
3916 pbi->reg_next_use = NULL;
3917
3918 pbi->new_set = BITMAP_XMALLOC ();
3919
3920 #ifdef HAVE_conditional_execution
3921 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3922 free_reg_cond_life_info);
3923 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3924
3925 /* If this block ends in a conditional branch, for each register live
3926 from one side of the branch and not the other, record the register
3927 as conditionally dead. */
3928 if (GET_CODE (bb->end) == JUMP_INSN
3929 && any_condjump_p (bb->end))
3930 {
3931 regset_head diff_head;
3932 regset diff = INITIALIZE_REG_SET (diff_head);
3933 basic_block bb_true, bb_false;
3934 rtx cond_true, cond_false, set_src;
3935 int i;
3936
3937 /* Identify the successor blocks. */
3938 bb_true = bb->succ->dest;
3939 if (bb->succ->succ_next != NULL)
3940 {
3941 bb_false = bb->succ->succ_next->dest;
3942
3943 if (bb->succ->flags & EDGE_FALLTHRU)
3944 {
3945 basic_block t = bb_false;
3946 bb_false = bb_true;
3947 bb_true = t;
3948 }
3949 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3950 abort ();
3951 }
3952 else
3953 {
3954 /* This can happen with a conditional jump to the next insn. */
3955 if (JUMP_LABEL (bb->end) != bb_true->head)
3956 abort ();
3957
3958 /* Simplest way to do nothing. */
3959 bb_false = bb_true;
3960 }
3961
3962 /* Extract the condition from the branch. */
3963 set_src = SET_SRC (pc_set (bb->end));
3964 cond_true = XEXP (set_src, 0);
3965 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3966 GET_MODE (cond_true), XEXP (cond_true, 0),
3967 XEXP (cond_true, 1));
3968 if (GET_CODE (XEXP (set_src, 1)) == PC)
3969 {
3970 rtx t = cond_false;
3971 cond_false = cond_true;
3972 cond_true = t;
3973 }
3974
3975 /* Compute which register lead different lives in the successors. */
3976 if (bitmap_operation (diff, bb_true->global_live_at_start,
3977 bb_false->global_live_at_start, BITMAP_XOR))
3978 {
3979 rtx reg = XEXP (cond_true, 0);
3980
3981 if (GET_CODE (reg) == SUBREG)
3982 reg = SUBREG_REG (reg);
3983
3984 if (GET_CODE (reg) != REG)
3985 abort ();
3986
3987 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3988
3989 /* For each such register, mark it conditionally dead. */
3990 EXECUTE_IF_SET_IN_REG_SET
3991 (diff, 0, i,
3992 {
3993 struct reg_cond_life_info *rcli;
3994 rtx cond;
3995
3996 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3997
3998 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3999 cond = cond_false;
4000 else
4001 cond = cond_true;
4002 rcli->condition = cond;
4003
4004 splay_tree_insert (pbi->reg_cond_dead, i,
4005 (splay_tree_value) rcli);
4006 });
4007 }
4008
4009 FREE_REG_SET (diff);
4010 }
4011 #endif
4012
4013 /* If this block has no successors, any stores to the frame that aren't
4014 used later in the block are dead. So make a pass over the block
4015 recording any such that are made and show them dead at the end. We do
4016 a very conservative and simple job here. */
4017 if (optimize
4018 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4019 && (TYPE_RETURNS_STACK_DEPRESSED
4020 (TREE_TYPE (current_function_decl))))
4021 && (flags & PROP_SCAN_DEAD_CODE)
4022 && (bb->succ == NULL
4023 || (bb->succ->succ_next == NULL
4024 && bb->succ->dest == EXIT_BLOCK_PTR)))
4025 {
4026 rtx insn;
4027 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4028 if (GET_CODE (insn) == INSN
4029 && GET_CODE (PATTERN (insn)) == SET
4030 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
4031 {
4032 rtx mem = SET_DEST (PATTERN (insn));
4033
4034 if (XEXP (mem, 0) == frame_pointer_rtx
4035 || (GET_CODE (XEXP (mem, 0)) == PLUS
4036 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
4037 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
4038 {
4039 #ifdef AUTO_INC_DEC
4040 /* Store a copy of mem, otherwise the address may be scrogged
4041 by find_auto_inc. This matters because insn_dead_p uses
4042 an rtx_equal_p check to determine if two addresses are
4043 the same. This works before find_auto_inc, but fails
4044 after find_auto_inc, causing discrepencies between the
4045 set of live registers calculated during the
4046 calculate_global_regs_live phase and what actually exists
4047 after flow completes, leading to aborts. */
4048 if (flags & PROP_AUTOINC)
4049 mem = shallow_copy_rtx (mem);
4050 #endif
4051 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4052 }
4053 }
4054 }
4055
4056 return pbi;
4057 }
4058
4059 /* Release a propagate_block_info struct. */
4060
4061 void
4062 free_propagate_block_info (pbi)
4063 struct propagate_block_info *pbi;
4064 {
4065 free_EXPR_LIST_list (&pbi->mem_set_list);
4066
4067 BITMAP_XFREE (pbi->new_set);
4068
4069 #ifdef HAVE_conditional_execution
4070 splay_tree_delete (pbi->reg_cond_dead);
4071 BITMAP_XFREE (pbi->reg_cond_reg);
4072 #endif
4073
4074 if (pbi->reg_next_use)
4075 free (pbi->reg_next_use);
4076
4077 free (pbi);
4078 }
4079
4080 /* Compute the registers live at the beginning of a basic block BB from
4081 those live at the end.
4082
4083 When called, REG_LIVE contains those live at the end. On return, it
4084 contains those live at the beginning.
4085
4086 LOCAL_SET, if non-null, will be set with all registers killed
4087 unconditionally by this basic block.
4088 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4089 killed conditionally by this basic block. If there is any unconditional
4090 set of a register, then the corresponding bit will be set in LOCAL_SET
4091 and cleared in COND_LOCAL_SET.
4092 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4093 case, the resulting set will be equal to the union of the two sets that
4094 would otherwise be computed. */
4095
4096 void
4097 propagate_block (bb, live, local_set, cond_local_set, flags)
4098 basic_block bb;
4099 regset live;
4100 regset local_set;
4101 regset cond_local_set;
4102 int flags;
4103 {
4104 struct propagate_block_info *pbi;
4105 rtx insn, prev;
4106
4107 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4108
4109 if (flags & PROP_REG_INFO)
4110 {
4111 register int i;
4112
4113 /* Process the regs live at the end of the block.
4114 Mark them as not local to any one basic block. */
4115 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4116 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4117 }
4118
4119 /* Scan the block an insn at a time from end to beginning. */
4120
4121 for (insn = bb->end;; insn = prev)
4122 {
4123 /* If this is a call to `setjmp' et al, warn if any
4124 non-volatile datum is live. */
4125 if ((flags & PROP_REG_INFO)
4126 && GET_CODE (insn) == NOTE
4127 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4128 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4129
4130 prev = propagate_one_insn (pbi, insn);
4131
4132 if (insn == bb->head)
4133 break;
4134 }
4135
4136 free_propagate_block_info (pbi);
4137 }
4138 \f
4139 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4140 (SET expressions whose destinations are registers dead after the insn).
4141 NEEDED is the regset that says which regs are alive after the insn.
4142
4143 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4144
4145 If X is the entire body of an insn, NOTES contains the reg notes
4146 pertaining to the insn. */
4147
4148 static int
4149 insn_dead_p (pbi, x, call_ok, notes)
4150 struct propagate_block_info *pbi;
4151 rtx x;
4152 int call_ok;
4153 rtx notes ATTRIBUTE_UNUSED;
4154 {
4155 enum rtx_code code = GET_CODE (x);
4156
4157 #ifdef AUTO_INC_DEC
4158 /* If flow is invoked after reload, we must take existing AUTO_INC
4159 expresions into account. */
4160 if (reload_completed)
4161 {
4162 for (; notes; notes = XEXP (notes, 1))
4163 {
4164 if (REG_NOTE_KIND (notes) == REG_INC)
4165 {
4166 int regno = REGNO (XEXP (notes, 0));
4167
4168 /* Don't delete insns to set global regs. */
4169 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4170 || REGNO_REG_SET_P (pbi->reg_live, regno))
4171 return 0;
4172 }
4173 }
4174 }
4175 #endif
4176
4177 /* If setting something that's a reg or part of one,
4178 see if that register's altered value will be live. */
4179
4180 if (code == SET)
4181 {
4182 rtx r = SET_DEST (x);
4183
4184 #ifdef HAVE_cc0
4185 if (GET_CODE (r) == CC0)
4186 return ! pbi->cc0_live;
4187 #endif
4188
4189 /* A SET that is a subroutine call cannot be dead. */
4190 if (GET_CODE (SET_SRC (x)) == CALL)
4191 {
4192 if (! call_ok)
4193 return 0;
4194 }
4195
4196 /* Don't eliminate loads from volatile memory or volatile asms. */
4197 else if (volatile_refs_p (SET_SRC (x)))
4198 return 0;
4199
4200 if (GET_CODE (r) == MEM)
4201 {
4202 rtx temp;
4203
4204 if (MEM_VOLATILE_P (r))
4205 return 0;
4206
4207 /* Walk the set of memory locations we are currently tracking
4208 and see if one is an identical match to this memory location.
4209 If so, this memory write is dead (remember, we're walking
4210 backwards from the end of the block to the start). */
4211 temp = pbi->mem_set_list;
4212 while (temp)
4213 {
4214 rtx mem = XEXP (temp, 0);
4215
4216 if (rtx_equal_p (mem, r))
4217 return 1;
4218 #ifdef AUTO_INC_DEC
4219 /* Check if memory reference matches an auto increment. Only
4220 post increment/decrement or modify are valid. */
4221 if (GET_MODE (mem) == GET_MODE (r)
4222 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4223 || GET_CODE (XEXP (mem, 0)) == POST_INC
4224 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4225 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4226 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4227 return 1;
4228 #endif
4229 temp = XEXP (temp, 1);
4230 }
4231 }
4232 else
4233 {
4234 while (GET_CODE (r) == SUBREG
4235 || GET_CODE (r) == STRICT_LOW_PART
4236 || GET_CODE (r) == ZERO_EXTRACT)
4237 r = XEXP (r, 0);
4238
4239 if (GET_CODE (r) == REG)
4240 {
4241 int regno = REGNO (r);
4242
4243 /* Obvious. */
4244 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4245 return 0;
4246
4247 /* If this is a hard register, verify that subsequent
4248 words are not needed. */
4249 if (regno < FIRST_PSEUDO_REGISTER)
4250 {
4251 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4252
4253 while (--n > 0)
4254 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4255 return 0;
4256 }
4257
4258 /* Don't delete insns to set global regs. */
4259 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4260 return 0;
4261
4262 /* Make sure insns to set the stack pointer aren't deleted. */
4263 if (regno == STACK_POINTER_REGNUM)
4264 return 0;
4265
4266 /* ??? These bits might be redundant with the force live bits
4267 in calculate_global_regs_live. We would delete from
4268 sequential sets; whether this actually affects real code
4269 for anything but the stack pointer I don't know. */
4270 /* Make sure insns to set the frame pointer aren't deleted. */
4271 if (regno == FRAME_POINTER_REGNUM
4272 && (! reload_completed || frame_pointer_needed))
4273 return 0;
4274 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4275 if (regno == HARD_FRAME_POINTER_REGNUM
4276 && (! reload_completed || frame_pointer_needed))
4277 return 0;
4278 #endif
4279
4280 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4281 /* Make sure insns to set arg pointer are never deleted
4282 (if the arg pointer isn't fixed, there will be a USE
4283 for it, so we can treat it normally). */
4284 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4285 return 0;
4286 #endif
4287
4288 /* Otherwise, the set is dead. */
4289 return 1;
4290 }
4291 }
4292 }
4293
4294 /* If performing several activities, insn is dead if each activity
4295 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4296 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4297 worth keeping. */
4298 else if (code == PARALLEL)
4299 {
4300 int i = XVECLEN (x, 0);
4301
4302 for (i--; i >= 0; i--)
4303 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4304 && GET_CODE (XVECEXP (x, 0, i)) != USE
4305 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4306 return 0;
4307
4308 return 1;
4309 }
4310
4311 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4312 is not necessarily true for hard registers. */
4313 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4314 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4315 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4316 return 1;
4317
4318 /* We do not check other CLOBBER or USE here. An insn consisting of just
4319 a CLOBBER or just a USE should not be deleted. */
4320 return 0;
4321 }
4322
4323 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4324 return 1 if the entire library call is dead.
4325 This is true if INSN copies a register (hard or pseudo)
4326 and if the hard return reg of the call insn is dead.
4327 (The caller should have tested the destination of the SET inside
4328 INSN already for death.)
4329
4330 If this insn doesn't just copy a register, then we don't
4331 have an ordinary libcall. In that case, cse could not have
4332 managed to substitute the source for the dest later on,
4333 so we can assume the libcall is dead.
4334
4335 PBI is the block info giving pseudoregs live before this insn.
4336 NOTE is the REG_RETVAL note of the insn. */
4337
4338 static int
4339 libcall_dead_p (pbi, note, insn)
4340 struct propagate_block_info *pbi;
4341 rtx note;
4342 rtx insn;
4343 {
4344 rtx x = single_set (insn);
4345
4346 if (x)
4347 {
4348 register rtx r = SET_SRC (x);
4349 if (GET_CODE (r) == REG)
4350 {
4351 rtx call = XEXP (note, 0);
4352 rtx call_pat;
4353 register int i;
4354
4355 /* Find the call insn. */
4356 while (call != insn && GET_CODE (call) != CALL_INSN)
4357 call = NEXT_INSN (call);
4358
4359 /* If there is none, do nothing special,
4360 since ordinary death handling can understand these insns. */
4361 if (call == insn)
4362 return 0;
4363
4364 /* See if the hard reg holding the value is dead.
4365 If this is a PARALLEL, find the call within it. */
4366 call_pat = PATTERN (call);
4367 if (GET_CODE (call_pat) == PARALLEL)
4368 {
4369 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4370 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4371 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4372 break;
4373
4374 /* This may be a library call that is returning a value
4375 via invisible pointer. Do nothing special, since
4376 ordinary death handling can understand these insns. */
4377 if (i < 0)
4378 return 0;
4379
4380 call_pat = XVECEXP (call_pat, 0, i);
4381 }
4382
4383 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4384 }
4385 }
4386 return 1;
4387 }
4388
4389 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4390 live at function entry. Don't count global register variables, variables
4391 in registers that can be used for function arg passing, or variables in
4392 fixed hard registers. */
4393
4394 int
4395 regno_uninitialized (regno)
4396 int regno;
4397 {
4398 if (n_basic_blocks == 0
4399 || (regno < FIRST_PSEUDO_REGISTER
4400 && (global_regs[regno]
4401 || fixed_regs[regno]
4402 || FUNCTION_ARG_REGNO_P (regno))))
4403 return 0;
4404
4405 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4406 }
4407
4408 /* 1 if register REGNO was alive at a place where `setjmp' was called
4409 and was set more than once or is an argument.
4410 Such regs may be clobbered by `longjmp'. */
4411
4412 int
4413 regno_clobbered_at_setjmp (regno)
4414 int regno;
4415 {
4416 if (n_basic_blocks == 0)
4417 return 0;
4418
4419 return ((REG_N_SETS (regno) > 1
4420 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4421 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4422 }
4423 \f
4424 /* INSN references memory, possibly using autoincrement addressing modes.
4425 Find any entries on the mem_set_list that need to be invalidated due
4426 to an address change. */
4427
4428 static void
4429 invalidate_mems_from_autoinc (pbi, insn)
4430 struct propagate_block_info *pbi;
4431 rtx insn;
4432 {
4433 rtx note = REG_NOTES (insn);
4434 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4435 {
4436 if (REG_NOTE_KIND (note) == REG_INC)
4437 {
4438 rtx temp = pbi->mem_set_list;
4439 rtx prev = NULL_RTX;
4440 rtx next;
4441
4442 while (temp)
4443 {
4444 next = XEXP (temp, 1);
4445 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4446 {
4447 /* Splice temp out of list. */
4448 if (prev)
4449 XEXP (prev, 1) = next;
4450 else
4451 pbi->mem_set_list = next;
4452 free_EXPR_LIST_node (temp);
4453 }
4454 else
4455 prev = temp;
4456 temp = next;
4457 }
4458 }
4459 }
4460 }
4461
4462 /* EXP is either a MEM or a REG. Remove any dependant entries
4463 from pbi->mem_set_list. */
4464
4465 static void
4466 invalidate_mems_from_set (pbi, exp)
4467 struct propagate_block_info *pbi;
4468 rtx exp;
4469 {
4470 rtx temp = pbi->mem_set_list;
4471 rtx prev = NULL_RTX;
4472 rtx next;
4473
4474 while (temp)
4475 {
4476 next = XEXP (temp, 1);
4477 if ((GET_CODE (exp) == MEM
4478 && output_dependence (XEXP (temp, 0), exp))
4479 || (GET_CODE (exp) == REG
4480 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4481 {
4482 /* Splice this entry out of the list. */
4483 if (prev)
4484 XEXP (prev, 1) = next;
4485 else
4486 pbi->mem_set_list = next;
4487 free_EXPR_LIST_node (temp);
4488 }
4489 else
4490 prev = temp;
4491 temp = next;
4492 }
4493 }
4494
4495 /* Process the registers that are set within X. Their bits are set to
4496 1 in the regset DEAD, because they are dead prior to this insn.
4497
4498 If INSN is nonzero, it is the insn being processed.
4499
4500 FLAGS is the set of operations to perform. */
4501
4502 static void
4503 mark_set_regs (pbi, x, insn)
4504 struct propagate_block_info *pbi;
4505 rtx x, insn;
4506 {
4507 rtx cond = NULL_RTX;
4508 rtx link;
4509 enum rtx_code code;
4510
4511 if (insn)
4512 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4513 {
4514 if (REG_NOTE_KIND (link) == REG_INC)
4515 mark_set_1 (pbi, SET, XEXP (link, 0),
4516 (GET_CODE (x) == COND_EXEC
4517 ? COND_EXEC_TEST (x) : NULL_RTX),
4518 insn, pbi->flags);
4519 }
4520 retry:
4521 switch (code = GET_CODE (x))
4522 {
4523 case SET:
4524 case CLOBBER:
4525 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4526 return;
4527
4528 case COND_EXEC:
4529 cond = COND_EXEC_TEST (x);
4530 x = COND_EXEC_CODE (x);
4531 goto retry;
4532
4533 case PARALLEL:
4534 {
4535 register int i;
4536 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4537 {
4538 rtx sub = XVECEXP (x, 0, i);
4539 switch (code = GET_CODE (sub))
4540 {
4541 case COND_EXEC:
4542 if (cond != NULL_RTX)
4543 abort ();
4544
4545 cond = COND_EXEC_TEST (sub);
4546 sub = COND_EXEC_CODE (sub);
4547 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4548 break;
4549 /* Fall through. */
4550
4551 case SET:
4552 case CLOBBER:
4553 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4554 break;
4555
4556 default:
4557 break;
4558 }
4559 }
4560 break;
4561 }
4562
4563 default:
4564 break;
4565 }
4566 }
4567
4568 /* Process a single SET rtx, X. */
4569
4570 static void
4571 mark_set_1 (pbi, code, reg, cond, insn, flags)
4572 struct propagate_block_info *pbi;
4573 enum rtx_code code;
4574 rtx reg, cond, insn;
4575 int flags;
4576 {
4577 int orig_regno = -1;
4578 int regno_first = -1, regno_last = -1;
4579 int not_dead = 0;
4580 int i;
4581
4582 /* Some targets place small structures in registers for
4583 return values of functions. We have to detect this
4584 case specially here to get correct flow information. */
4585 if (GET_CODE (reg) == PARALLEL
4586 && GET_MODE (reg) == BLKmode)
4587 {
4588 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4589 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4590 return;
4591 }
4592
4593 /* Modifying just one hardware register of a multi-reg value or just a
4594 byte field of a register does not mean the value from before this insn
4595 is now dead. Of course, if it was dead after it's unused now. */
4596
4597 switch (GET_CODE (reg))
4598 {
4599 case ZERO_EXTRACT:
4600 case SIGN_EXTRACT:
4601 case STRICT_LOW_PART:
4602 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4603 do
4604 reg = XEXP (reg, 0);
4605 while (GET_CODE (reg) == SUBREG
4606 || GET_CODE (reg) == ZERO_EXTRACT
4607 || GET_CODE (reg) == SIGN_EXTRACT
4608 || GET_CODE (reg) == STRICT_LOW_PART);
4609 if (GET_CODE (reg) == MEM)
4610 break;
4611 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4612 /* Fall through. */
4613
4614 case REG:
4615 orig_regno = ORIGINAL_REGNO (reg);
4616 regno_last = regno_first = REGNO (reg);
4617 if (regno_first < FIRST_PSEUDO_REGISTER)
4618 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4619 break;
4620
4621 case SUBREG:
4622 if (GET_CODE (SUBREG_REG (reg)) == REG)
4623 {
4624 enum machine_mode outer_mode = GET_MODE (reg);
4625 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4626
4627 /* Identify the range of registers affected. This is moderately
4628 tricky for hard registers. See alter_subreg. */
4629
4630 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4631 if (regno_first < FIRST_PSEUDO_REGISTER)
4632 {
4633 #ifdef ALTER_HARD_SUBREG
4634 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4635 inner_mode, regno_first);
4636 #else
4637 regno_first += SUBREG_WORD (reg);
4638 #endif
4639 regno_last = (regno_first
4640 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4641
4642 /* Since we've just adjusted the register number ranges, make
4643 sure REG matches. Otherwise some_was_live will be clear
4644 when it shouldn't have been, and we'll create incorrect
4645 REG_UNUSED notes. */
4646 reg = gen_rtx_REG (outer_mode, regno_first);
4647 }
4648 else
4649 {
4650 /* If the number of words in the subreg is less than the number
4651 of words in the full register, we have a well-defined partial
4652 set. Otherwise the high bits are undefined.
4653
4654 This is only really applicable to pseudos, since we just took
4655 care of multi-word hard registers. */
4656 if (((GET_MODE_SIZE (outer_mode)
4657 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4658 < ((GET_MODE_SIZE (inner_mode)
4659 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4660 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4661
4662 reg = SUBREG_REG (reg);
4663 }
4664 }
4665 else
4666 reg = SUBREG_REG (reg);
4667 break;
4668
4669 default:
4670 break;
4671 }
4672
4673 /* If this set is a MEM, then it kills any aliased writes.
4674 If this set is a REG, then it kills any MEMs which use the reg. */
4675 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4676 {
4677 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4678 invalidate_mems_from_set (pbi, reg);
4679
4680 /* If the memory reference had embedded side effects (autoincrement
4681 address modes. Then we may need to kill some entries on the
4682 memory set list. */
4683 if (insn && GET_CODE (reg) == MEM)
4684 invalidate_mems_from_autoinc (pbi, insn);
4685
4686 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4687 /* ??? With more effort we could track conditional memory life. */
4688 && ! cond
4689 /* We do not know the size of a BLKmode store, so we do not track
4690 them for redundant store elimination. */
4691 && GET_MODE (reg) != BLKmode
4692 /* There are no REG_INC notes for SP, so we can't assume we'll see
4693 everything that invalidates it. To be safe, don't eliminate any
4694 stores though SP; none of them should be redundant anyway. */
4695 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4696 {
4697 #ifdef AUTO_INC_DEC
4698 /* Store a copy of mem, otherwise the address may be
4699 scrogged by find_auto_inc. */
4700 if (flags & PROP_AUTOINC)
4701 reg = shallow_copy_rtx (reg);
4702 #endif
4703 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4704 }
4705 }
4706
4707 if (GET_CODE (reg) == REG
4708 && ! (regno_first == FRAME_POINTER_REGNUM
4709 && (! reload_completed || frame_pointer_needed))
4710 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4711 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4712 && (! reload_completed || frame_pointer_needed))
4713 #endif
4714 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4715 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4716 #endif
4717 )
4718 {
4719 int some_was_live = 0, some_was_dead = 0;
4720
4721 for (i = regno_first; i <= regno_last; ++i)
4722 {
4723 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4724 if (pbi->local_set)
4725 {
4726 /* Order of the set operation matters here since both
4727 sets may be the same. */
4728 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4729 if (cond != NULL_RTX
4730 && ! REGNO_REG_SET_P (pbi->local_set, i))
4731 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4732 else
4733 SET_REGNO_REG_SET (pbi->local_set, i);
4734 }
4735 if (code != CLOBBER)
4736 SET_REGNO_REG_SET (pbi->new_set, i);
4737
4738 some_was_live |= needed_regno;
4739 some_was_dead |= ! needed_regno;
4740 }
4741
4742 #ifdef HAVE_conditional_execution
4743 /* Consider conditional death in deciding that the register needs
4744 a death note. */
4745 if (some_was_live && ! not_dead
4746 /* The stack pointer is never dead. Well, not strictly true,
4747 but it's very difficult to tell from here. Hopefully
4748 combine_stack_adjustments will fix up the most egregious
4749 errors. */
4750 && regno_first != STACK_POINTER_REGNUM)
4751 {
4752 for (i = regno_first; i <= regno_last; ++i)
4753 if (! mark_regno_cond_dead (pbi, i, cond))
4754 not_dead = 1;
4755 }
4756 #endif
4757
4758 /* Additional data to record if this is the final pass. */
4759 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4760 | PROP_DEATH_NOTES | PROP_AUTOINC | PROP_POSTRELOAD))
4761 {
4762 register rtx y;
4763 register int blocknum = pbi->bb->index;
4764
4765 y = NULL_RTX;
4766 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4767 {
4768 y = pbi->reg_next_use[regno_first];
4769
4770 /* The next use is no longer next, since a store intervenes. */
4771 for (i = regno_first; i <= regno_last; ++i)
4772 pbi->reg_next_use[i] = 0;
4773 }
4774
4775 /* After reload has completed, try to keep REG_N_SETS uptodate for
4776 the original pseudos. */
4777 if ((flags & PROP_POSTRELOAD) && orig_regno >= FIRST_PSEUDO_REGISTER)
4778 REG_N_SETS (orig_regno) += 1;
4779
4780 if (flags & PROP_REG_INFO)
4781 {
4782 for (i = regno_first; i <= regno_last; ++i)
4783 {
4784 /* Count (weighted) references, stores, etc. This counts a
4785 register twice if it is modified, but that is correct. */
4786 REG_N_SETS (i) += 1;
4787 REG_N_REFS (i) += (optimize_size ? 1
4788 : pbi->bb->loop_depth + 1);
4789
4790 /* The insns where a reg is live are normally counted
4791 elsewhere, but we want the count to include the insn
4792 where the reg is set, and the normal counting mechanism
4793 would not count it. */
4794 REG_LIVE_LENGTH (i) += 1;
4795 }
4796
4797 /* If this is a hard reg, record this function uses the reg. */
4798 if (regno_first < FIRST_PSEUDO_REGISTER)
4799 {
4800 for (i = regno_first; i <= regno_last; i++)
4801 regs_ever_live[i] = 1;
4802 }
4803 else
4804 {
4805 /* Keep track of which basic blocks each reg appears in. */
4806 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4807 REG_BASIC_BLOCK (regno_first) = blocknum;
4808 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4809 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4810 }
4811 }
4812
4813 if (! some_was_dead)
4814 {
4815 if (flags & PROP_LOG_LINKS)
4816 {
4817 /* Make a logical link from the next following insn
4818 that uses this register, back to this insn.
4819 The following insns have already been processed.
4820
4821 We don't build a LOG_LINK for hard registers containing
4822 in ASM_OPERANDs. If these registers get replaced,
4823 we might wind up changing the semantics of the insn,
4824 even if reload can make what appear to be valid
4825 assignments later. */
4826 if (y && (BLOCK_NUM (y) == blocknum)
4827 && (regno_first >= FIRST_PSEUDO_REGISTER
4828 || asm_noperands (PATTERN (y)) < 0))
4829 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4830 }
4831 }
4832 else if (not_dead)
4833 ;
4834 else if (! some_was_live)
4835 {
4836 if (flags & PROP_REG_INFO)
4837 REG_N_DEATHS (regno_first) += 1;
4838
4839 if (flags & PROP_DEATH_NOTES)
4840 {
4841 /* Note that dead stores have already been deleted
4842 when possible. If we get here, we have found a
4843 dead store that cannot be eliminated (because the
4844 same insn does something useful). Indicate this
4845 by marking the reg being set as dying here. */
4846 REG_NOTES (insn)
4847 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4848 }
4849 }
4850 else
4851 {
4852 if (flags & PROP_DEATH_NOTES)
4853 {
4854 /* This is a case where we have a multi-word hard register
4855 and some, but not all, of the words of the register are
4856 needed in subsequent insns. Write REG_UNUSED notes
4857 for those parts that were not needed. This case should
4858 be rare. */
4859
4860 for (i = regno_first; i <= regno_last; ++i)
4861 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4862 REG_NOTES (insn)
4863 = alloc_EXPR_LIST (REG_UNUSED,
4864 gen_rtx_REG (reg_raw_mode[i], i),
4865 REG_NOTES (insn));
4866 }
4867 }
4868 }
4869
4870 /* Mark the register as being dead. */
4871 if (some_was_live
4872 && ! not_dead
4873 /* The stack pointer is never dead. Well, not strictly true,
4874 but it's very difficult to tell from here. Hopefully
4875 combine_stack_adjustments will fix up the most egregious
4876 errors. */
4877 && regno_first != STACK_POINTER_REGNUM)
4878 {
4879 for (i = regno_first; i <= regno_last; ++i)
4880 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4881 }
4882 }
4883 else if (GET_CODE (reg) == REG)
4884 {
4885 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4886 pbi->reg_next_use[regno_first] = 0;
4887 }
4888
4889 /* If this is the last pass and this is a SCRATCH, show it will be dying
4890 here and count it. */
4891 else if (GET_CODE (reg) == SCRATCH)
4892 {
4893 if (flags & PROP_DEATH_NOTES)
4894 REG_NOTES (insn)
4895 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4896 }
4897 }
4898 \f
4899 #ifdef HAVE_conditional_execution
4900 /* Mark REGNO conditionally dead.
4901 Return true if the register is now unconditionally dead. */
4902
4903 static int
4904 mark_regno_cond_dead (pbi, regno, cond)
4905 struct propagate_block_info *pbi;
4906 int regno;
4907 rtx cond;
4908 {
4909 /* If this is a store to a predicate register, the value of the
4910 predicate is changing, we don't know that the predicate as seen
4911 before is the same as that seen after. Flush all dependent
4912 conditions from reg_cond_dead. This will make all such
4913 conditionally live registers unconditionally live. */
4914 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4915 flush_reg_cond_reg (pbi, regno);
4916
4917 /* If this is an unconditional store, remove any conditional
4918 life that may have existed. */
4919 if (cond == NULL_RTX)
4920 splay_tree_remove (pbi->reg_cond_dead, regno);
4921 else
4922 {
4923 splay_tree_node node;
4924 struct reg_cond_life_info *rcli;
4925 rtx ncond;
4926
4927 /* Otherwise this is a conditional set. Record that fact.
4928 It may have been conditionally used, or there may be a
4929 subsequent set with a complimentary condition. */
4930
4931 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4932 if (node == NULL)
4933 {
4934 /* The register was unconditionally live previously.
4935 Record the current condition as the condition under
4936 which it is dead. */
4937 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4938 rcli->condition = cond;
4939 splay_tree_insert (pbi->reg_cond_dead, regno,
4940 (splay_tree_value) rcli);
4941
4942 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4943
4944 /* Not unconditionaly dead. */
4945 return 0;
4946 }
4947 else
4948 {
4949 /* The register was conditionally live previously.
4950 Add the new condition to the old. */
4951 rcli = (struct reg_cond_life_info *) node->value;
4952 ncond = rcli->condition;
4953 ncond = ior_reg_cond (ncond, cond, 1);
4954
4955 /* If the register is now unconditionally dead,
4956 remove the entry in the splay_tree. */
4957 if (ncond == const1_rtx)
4958 splay_tree_remove (pbi->reg_cond_dead, regno);
4959 else
4960 {
4961 rcli->condition = ncond;
4962
4963 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4964
4965 /* Not unconditionaly dead. */
4966 return 0;
4967 }
4968 }
4969 }
4970
4971 return 1;
4972 }
4973
4974 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4975
4976 static void
4977 free_reg_cond_life_info (value)
4978 splay_tree_value value;
4979 {
4980 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4981 free (rcli);
4982 }
4983
4984 /* Helper function for flush_reg_cond_reg. */
4985
4986 static int
4987 flush_reg_cond_reg_1 (node, data)
4988 splay_tree_node node;
4989 void *data;
4990 {
4991 struct reg_cond_life_info *rcli;
4992 int *xdata = (int *) data;
4993 unsigned int regno = xdata[0];
4994
4995 /* Don't need to search if last flushed value was farther on in
4996 the in-order traversal. */
4997 if (xdata[1] >= (int) node->key)
4998 return 0;
4999
5000 /* Splice out portions of the expression that refer to regno. */
5001 rcli = (struct reg_cond_life_info *) node->value;
5002 rcli->condition = elim_reg_cond (rcli->condition, regno);
5003
5004 /* If the entire condition is now false, signal the node to be removed. */
5005 if (rcli->condition == const0_rtx)
5006 {
5007 xdata[1] = node->key;
5008 return -1;
5009 }
5010 else if (rcli->condition == const1_rtx)
5011 abort ();
5012
5013 return 0;
5014 }
5015
5016 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5017
5018 static void
5019 flush_reg_cond_reg (pbi, regno)
5020 struct propagate_block_info *pbi;
5021 int regno;
5022 {
5023 int pair[2];
5024
5025 pair[0] = regno;
5026 pair[1] = -1;
5027 while (splay_tree_foreach (pbi->reg_cond_dead,
5028 flush_reg_cond_reg_1, pair) == -1)
5029 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5030
5031 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5032 }
5033
5034 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5035 For ior/and, the ADD flag determines whether we want to add the new
5036 condition X to the old one unconditionally. If it is zero, we will
5037 only return a new expression if X allows us to simplify part of
5038 OLD, otherwise we return OLD unchanged to the caller.
5039 If ADD is nonzero, we will return a new condition in all cases. The
5040 toplevel caller of one of these functions should always pass 1 for
5041 ADD. */
5042
5043 static rtx
5044 ior_reg_cond (old, x, add)
5045 rtx old, x;
5046 int add;
5047 {
5048 rtx op0, op1;
5049
5050 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5051 {
5052 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5053 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5054 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5055 return const1_rtx;
5056 if (GET_CODE (x) == GET_CODE (old)
5057 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5058 return old;
5059 if (! add)
5060 return old;
5061 return gen_rtx_IOR (0, old, x);
5062 }
5063
5064 switch (GET_CODE (old))
5065 {
5066 case IOR:
5067 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5068 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5069 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5070 {
5071 if (op0 == const0_rtx)
5072 return op1;
5073 if (op1 == const0_rtx)
5074 return op0;
5075 if (op0 == const1_rtx || op1 == const1_rtx)
5076 return const1_rtx;
5077 if (op0 == XEXP (old, 0))
5078 op0 = gen_rtx_IOR (0, op0, x);
5079 else
5080 op1 = gen_rtx_IOR (0, op1, x);
5081 return gen_rtx_IOR (0, op0, op1);
5082 }
5083 if (! add)
5084 return old;
5085 return gen_rtx_IOR (0, old, x);
5086
5087 case AND:
5088 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5089 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5090 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5091 {
5092 if (op0 == const1_rtx)
5093 return op1;
5094 if (op1 == const1_rtx)
5095 return op0;
5096 if (op0 == const0_rtx || op1 == const0_rtx)
5097 return const0_rtx;
5098 if (op0 == XEXP (old, 0))
5099 op0 = gen_rtx_IOR (0, op0, x);
5100 else
5101 op1 = gen_rtx_IOR (0, op1, x);
5102 return gen_rtx_AND (0, op0, op1);
5103 }
5104 if (! add)
5105 return old;
5106 return gen_rtx_IOR (0, old, x);
5107
5108 case NOT:
5109 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5110 if (op0 != XEXP (old, 0))
5111 return not_reg_cond (op0);
5112 if (! add)
5113 return old;
5114 return gen_rtx_IOR (0, old, x);
5115
5116 default:
5117 abort ();
5118 }
5119 }
5120
5121 static rtx
5122 not_reg_cond (x)
5123 rtx x;
5124 {
5125 enum rtx_code x_code;
5126
5127 if (x == const0_rtx)
5128 return const1_rtx;
5129 else if (x == const1_rtx)
5130 return const0_rtx;
5131 x_code = GET_CODE (x);
5132 if (x_code == NOT)
5133 return XEXP (x, 0);
5134 if (GET_RTX_CLASS (x_code) == '<'
5135 && GET_CODE (XEXP (x, 0)) == REG)
5136 {
5137 if (XEXP (x, 1) != const0_rtx)
5138 abort ();
5139
5140 return gen_rtx_fmt_ee (reverse_condition (x_code),
5141 VOIDmode, XEXP (x, 0), const0_rtx);
5142 }
5143 return gen_rtx_NOT (0, x);
5144 }
5145
5146 static rtx
5147 and_reg_cond (old, x, add)
5148 rtx old, x;
5149 int add;
5150 {
5151 rtx op0, op1;
5152
5153 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5154 {
5155 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5156 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5157 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5158 return const0_rtx;
5159 if (GET_CODE (x) == GET_CODE (old)
5160 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5161 return old;
5162 if (! add)
5163 return old;
5164 return gen_rtx_AND (0, old, x);
5165 }
5166
5167 switch (GET_CODE (old))
5168 {
5169 case IOR:
5170 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5171 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5172 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5173 {
5174 if (op0 == const0_rtx)
5175 return op1;
5176 if (op1 == const0_rtx)
5177 return op0;
5178 if (op0 == const1_rtx || op1 == const1_rtx)
5179 return const1_rtx;
5180 if (op0 == XEXP (old, 0))
5181 op0 = gen_rtx_AND (0, op0, x);
5182 else
5183 op1 = gen_rtx_AND (0, op1, x);
5184 return gen_rtx_IOR (0, op0, op1);
5185 }
5186 if (! add)
5187 return old;
5188 return gen_rtx_AND (0, old, x);
5189
5190 case AND:
5191 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5192 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5193 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5194 {
5195 if (op0 == const1_rtx)
5196 return op1;
5197 if (op1 == const1_rtx)
5198 return op0;
5199 if (op0 == const0_rtx || op1 == const0_rtx)
5200 return const0_rtx;
5201 if (op0 == XEXP (old, 0))
5202 op0 = gen_rtx_AND (0, op0, x);
5203 else
5204 op1 = gen_rtx_AND (0, op1, x);
5205 return gen_rtx_AND (0, op0, op1);
5206 }
5207 if (! add)
5208 return old;
5209 return gen_rtx_AND (0, old, x);
5210
5211 case NOT:
5212 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5213 if (op0 != XEXP (old, 0))
5214 return not_reg_cond (op0);
5215 if (! add)
5216 return old;
5217 return gen_rtx_AND (0, old, x);
5218
5219 default:
5220 abort ();
5221 }
5222 }
5223
5224 /* Given a condition X, remove references to reg REGNO and return the
5225 new condition. The removal will be done so that all conditions
5226 involving REGNO are considered to evaluate to false. This function
5227 is used when the value of REGNO changes. */
5228
5229 static rtx
5230 elim_reg_cond (x, regno)
5231 rtx x;
5232 unsigned int regno;
5233 {
5234 rtx op0, op1;
5235
5236 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5237 {
5238 if (REGNO (XEXP (x, 0)) == regno)
5239 return const0_rtx;
5240 return x;
5241 }
5242
5243 switch (GET_CODE (x))
5244 {
5245 case AND:
5246 op0 = elim_reg_cond (XEXP (x, 0), regno);
5247 op1 = elim_reg_cond (XEXP (x, 1), regno);
5248 if (op0 == const0_rtx || op1 == const0_rtx)
5249 return const0_rtx;
5250 if (op0 == const1_rtx)
5251 return op1;
5252 if (op1 == const1_rtx)
5253 return op0;
5254 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5255 return x;
5256 return gen_rtx_AND (0, op0, op1);
5257
5258 case IOR:
5259 op0 = elim_reg_cond (XEXP (x, 0), regno);
5260 op1 = elim_reg_cond (XEXP (x, 1), regno);
5261 if (op0 == const1_rtx || op1 == const1_rtx)
5262 return const1_rtx;
5263 if (op0 == const0_rtx)
5264 return op1;
5265 if (op1 == const0_rtx)
5266 return op0;
5267 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5268 return x;
5269 return gen_rtx_IOR (0, op0, op1);
5270
5271 case NOT:
5272 op0 = elim_reg_cond (XEXP (x, 0), regno);
5273 if (op0 == const0_rtx)
5274 return const1_rtx;
5275 if (op0 == const1_rtx)
5276 return const0_rtx;
5277 if (op0 != XEXP (x, 0))
5278 return not_reg_cond (op0);
5279 return x;
5280
5281 default:
5282 abort ();
5283 }
5284 }
5285 #endif /* HAVE_conditional_execution */
5286 \f
5287 #ifdef AUTO_INC_DEC
5288
5289 /* Try to substitute the auto-inc expression INC as the address inside
5290 MEM which occurs in INSN. Currently, the address of MEM is an expression
5291 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5292 that has a single set whose source is a PLUS of INCR_REG and something
5293 else. */
5294
5295 static void
5296 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5297 struct propagate_block_info *pbi;
5298 rtx inc, insn, mem, incr, incr_reg;
5299 {
5300 int regno = REGNO (incr_reg);
5301 rtx set = single_set (incr);
5302 rtx q = SET_DEST (set);
5303 rtx y = SET_SRC (set);
5304 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5305
5306 /* Make sure this reg appears only once in this insn. */
5307 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5308 return;
5309
5310 if (dead_or_set_p (incr, incr_reg)
5311 /* Mustn't autoinc an eliminable register. */
5312 && (regno >= FIRST_PSEUDO_REGISTER
5313 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5314 {
5315 /* This is the simple case. Try to make the auto-inc. If
5316 we can't, we are done. Otherwise, we will do any
5317 needed updates below. */
5318 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5319 return;
5320 }
5321 else if (GET_CODE (q) == REG
5322 /* PREV_INSN used here to check the semi-open interval
5323 [insn,incr). */
5324 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5325 /* We must also check for sets of q as q may be
5326 a call clobbered hard register and there may
5327 be a call between PREV_INSN (insn) and incr. */
5328 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5329 {
5330 /* We have *p followed sometime later by q = p+size.
5331 Both p and q must be live afterward,
5332 and q is not used between INSN and its assignment.
5333 Change it to q = p, ...*q..., q = q+size.
5334 Then fall into the usual case. */
5335 rtx insns, temp;
5336
5337 start_sequence ();
5338 emit_move_insn (q, incr_reg);
5339 insns = get_insns ();
5340 end_sequence ();
5341
5342 if (basic_block_for_insn)
5343 for (temp = insns; temp; temp = NEXT_INSN (temp))
5344 set_block_for_insn (temp, pbi->bb);
5345
5346 /* If we can't make the auto-inc, or can't make the
5347 replacement into Y, exit. There's no point in making
5348 the change below if we can't do the auto-inc and doing
5349 so is not correct in the pre-inc case. */
5350
5351 XEXP (inc, 0) = q;
5352 validate_change (insn, &XEXP (mem, 0), inc, 1);
5353 validate_change (incr, &XEXP (y, opnum), q, 1);
5354 if (! apply_change_group ())
5355 return;
5356
5357 /* We now know we'll be doing this change, so emit the
5358 new insn(s) and do the updates. */
5359 emit_insns_before (insns, insn);
5360
5361 if (pbi->bb->head == insn)
5362 pbi->bb->head = insns;
5363
5364 /* INCR will become a NOTE and INSN won't contain a
5365 use of INCR_REG. If a use of INCR_REG was just placed in
5366 the insn before INSN, make that the next use.
5367 Otherwise, invalidate it. */
5368 if (GET_CODE (PREV_INSN (insn)) == INSN
5369 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5370 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5371 pbi->reg_next_use[regno] = PREV_INSN (insn);
5372 else
5373 pbi->reg_next_use[regno] = 0;
5374
5375 incr_reg = q;
5376 regno = REGNO (q);
5377
5378 /* REGNO is now used in INCR which is below INSN, but
5379 it previously wasn't live here. If we don't mark
5380 it as live, we'll put a REG_DEAD note for it
5381 on this insn, which is incorrect. */
5382 SET_REGNO_REG_SET (pbi->reg_live, regno);
5383
5384 /* If there are any calls between INSN and INCR, show
5385 that REGNO now crosses them. */
5386 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5387 if (GET_CODE (temp) == CALL_INSN)
5388 REG_N_CALLS_CROSSED (regno)++;
5389 }
5390 else
5391 return;
5392
5393 /* If we haven't returned, it means we were able to make the
5394 auto-inc, so update the status. First, record that this insn
5395 has an implicit side effect. */
5396
5397 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5398
5399 /* Modify the old increment-insn to simply copy
5400 the already-incremented value of our register. */
5401 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5402 abort ();
5403
5404 /* If that makes it a no-op (copying the register into itself) delete
5405 it so it won't appear to be a "use" and a "set" of this
5406 register. */
5407 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5408 {
5409 /* If the original source was dead, it's dead now. */
5410 rtx note;
5411
5412 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5413 {
5414 remove_note (incr, note);
5415 if (XEXP (note, 0) != incr_reg)
5416 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5417 }
5418
5419 PUT_CODE (incr, NOTE);
5420 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5421 NOTE_SOURCE_FILE (incr) = 0;
5422 }
5423
5424 if (regno >= FIRST_PSEUDO_REGISTER)
5425 {
5426 /* Count an extra reference to the reg. When a reg is
5427 incremented, spilling it is worse, so we want to make
5428 that less likely. */
5429 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5430
5431 /* Count the increment as a setting of the register,
5432 even though it isn't a SET in rtl. */
5433 REG_N_SETS (regno)++;
5434 }
5435 }
5436
5437 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5438 reference. */
5439
5440 static void
5441 find_auto_inc (pbi, x, insn)
5442 struct propagate_block_info *pbi;
5443 rtx x;
5444 rtx insn;
5445 {
5446 rtx addr = XEXP (x, 0);
5447 HOST_WIDE_INT offset = 0;
5448 rtx set, y, incr, inc_val;
5449 int regno;
5450 int size = GET_MODE_SIZE (GET_MODE (x));
5451
5452 if (GET_CODE (insn) == JUMP_INSN)
5453 return;
5454
5455 /* Here we detect use of an index register which might be good for
5456 postincrement, postdecrement, preincrement, or predecrement. */
5457
5458 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5459 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5460
5461 if (GET_CODE (addr) != REG)
5462 return;
5463
5464 regno = REGNO (addr);
5465
5466 /* Is the next use an increment that might make auto-increment? */
5467 incr = pbi->reg_next_use[regno];
5468 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5469 return;
5470 set = single_set (incr);
5471 if (set == 0 || GET_CODE (set) != SET)
5472 return;
5473 y = SET_SRC (set);
5474
5475 if (GET_CODE (y) != PLUS)
5476 return;
5477
5478 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5479 inc_val = XEXP (y, 1);
5480 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5481 inc_val = XEXP (y, 0);
5482 else
5483 return;
5484
5485 if (GET_CODE (inc_val) == CONST_INT)
5486 {
5487 if (HAVE_POST_INCREMENT
5488 && (INTVAL (inc_val) == size && offset == 0))
5489 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5490 incr, addr);
5491 else if (HAVE_POST_DECREMENT
5492 && (INTVAL (inc_val) == -size && offset == 0))
5493 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5494 incr, addr);
5495 else if (HAVE_PRE_INCREMENT
5496 && (INTVAL (inc_val) == size && offset == size))
5497 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5498 incr, addr);
5499 else if (HAVE_PRE_DECREMENT
5500 && (INTVAL (inc_val) == -size && offset == -size))
5501 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5502 incr, addr);
5503 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5504 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5505 gen_rtx_PLUS (Pmode,
5506 addr,
5507 inc_val)),
5508 insn, x, incr, addr);
5509 }
5510 else if (GET_CODE (inc_val) == REG
5511 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5512 NEXT_INSN (incr)))
5513
5514 {
5515 if (HAVE_POST_MODIFY_REG && offset == 0)
5516 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5517 gen_rtx_PLUS (Pmode,
5518 addr,
5519 inc_val)),
5520 insn, x, incr, addr);
5521 }
5522 }
5523
5524 #endif /* AUTO_INC_DEC */
5525 \f
5526 static void
5527 mark_used_reg (pbi, reg, cond, insn)
5528 struct propagate_block_info *pbi;
5529 rtx reg;
5530 rtx cond ATTRIBUTE_UNUSED;
5531 rtx insn;
5532 {
5533 int regno = REGNO (reg);
5534 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5535 int some_was_dead = ! some_was_live;
5536 int some_not_set;
5537 int n;
5538
5539 /* A hard reg in a wide mode may really be multiple registers.
5540 If so, mark all of them just like the first. */
5541 if (regno < FIRST_PSEUDO_REGISTER)
5542 {
5543 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5544 while (--n > 0)
5545 {
5546 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5547 some_was_live |= needed_regno;
5548 some_was_dead |= ! needed_regno;
5549 }
5550 }
5551
5552 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5553 {
5554 /* Record where each reg is used, so when the reg is set we know
5555 the next insn that uses it. */
5556 pbi->reg_next_use[regno] = insn;
5557 }
5558
5559 if (pbi->flags & PROP_REG_INFO)
5560 {
5561 if (regno < FIRST_PSEUDO_REGISTER)
5562 {
5563 /* If this is a register we are going to try to eliminate,
5564 don't mark it live here. If we are successful in
5565 eliminating it, it need not be live unless it is used for
5566 pseudos, in which case it will have been set live when it
5567 was allocated to the pseudos. If the register will not
5568 be eliminated, reload will set it live at that point.
5569
5570 Otherwise, record that this function uses this register. */
5571 /* ??? The PPC backend tries to "eliminate" on the pic
5572 register to itself. This should be fixed. In the mean
5573 time, hack around it. */
5574
5575 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5576 && (regno == FRAME_POINTER_REGNUM
5577 || regno == ARG_POINTER_REGNUM)))
5578 {
5579 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5580 do
5581 regs_ever_live[regno + --n] = 1;
5582 while (n > 0);
5583 }
5584 }
5585 else
5586 {
5587 /* Keep track of which basic block each reg appears in. */
5588
5589 register int blocknum = pbi->bb->index;
5590 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5591 REG_BASIC_BLOCK (regno) = blocknum;
5592 else if (REG_BASIC_BLOCK (regno) != blocknum)
5593 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5594
5595 /* Count (weighted) number of uses of each reg. */
5596 REG_N_REFS (regno) += (optimize_size ? 1
5597 : pbi->bb->loop_depth + 1);
5598 }
5599 }
5600
5601 /* Find out if any of the register was set this insn. */
5602 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5603 if (regno < FIRST_PSEUDO_REGISTER)
5604 {
5605 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5606 while (--n > 0)
5607 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5608 }
5609
5610 /* Record and count the insns in which a reg dies. If it is used in
5611 this insn and was dead below the insn then it dies in this insn.
5612 If it was set in this insn, we do not make a REG_DEAD note;
5613 likewise if we already made such a note. */
5614 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5615 && some_was_dead
5616 && some_not_set)
5617 {
5618 /* Check for the case where the register dying partially
5619 overlaps the register set by this insn. */
5620 if (regno < FIRST_PSEUDO_REGISTER
5621 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5622 {
5623 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5624 while (--n >= 0)
5625 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5626 }
5627
5628 /* If none of the words in X is needed, make a REG_DEAD note.
5629 Otherwise, we must make partial REG_DEAD notes. */
5630 if (! some_was_live)
5631 {
5632 if ((pbi->flags & PROP_DEATH_NOTES)
5633 && ! find_regno_note (insn, REG_DEAD, regno))
5634 REG_NOTES (insn)
5635 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5636
5637 if (pbi->flags & PROP_REG_INFO)
5638 REG_N_DEATHS (regno)++;
5639 }
5640 else
5641 {
5642 /* Don't make a REG_DEAD note for a part of a register
5643 that is set in the insn. */
5644
5645 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5646 for (; n >= regno; n--)
5647 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5648 && ! dead_or_set_regno_p (insn, n))
5649 REG_NOTES (insn)
5650 = alloc_EXPR_LIST (REG_DEAD,
5651 gen_rtx_REG (reg_raw_mode[n], n),
5652 REG_NOTES (insn));
5653 }
5654 }
5655
5656 SET_REGNO_REG_SET (pbi->reg_live, regno);
5657 if (regno < FIRST_PSEUDO_REGISTER)
5658 {
5659 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5660 while (--n > 0)
5661 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5662 }
5663
5664 #ifdef HAVE_conditional_execution
5665 /* If this is a conditional use, record that fact. If it is later
5666 conditionally set, we'll know to kill the register. */
5667 if (cond != NULL_RTX)
5668 {
5669 splay_tree_node node;
5670 struct reg_cond_life_info *rcli;
5671 rtx ncond;
5672
5673 if (some_was_live)
5674 {
5675 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5676 if (node == NULL)
5677 {
5678 /* The register was unconditionally live previously.
5679 No need to do anything. */
5680 }
5681 else
5682 {
5683 /* The register was conditionally live previously.
5684 Subtract the new life cond from the old death cond. */
5685 rcli = (struct reg_cond_life_info *) node->value;
5686 ncond = rcli->condition;
5687 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
5688
5689 /* If the register is now unconditionally live, remove the
5690 entry in the splay_tree. */
5691 if (ncond == const0_rtx)
5692 {
5693 rcli->condition = NULL_RTX;
5694 splay_tree_remove (pbi->reg_cond_dead, regno);
5695 }
5696 else
5697 {
5698 rcli->condition = ncond;
5699 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5700 }
5701 }
5702 }
5703 else
5704 {
5705 /* The register was not previously live at all. Record
5706 the condition under which it is still dead. */
5707 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5708 rcli->condition = not_reg_cond (cond);
5709 splay_tree_insert (pbi->reg_cond_dead, regno,
5710 (splay_tree_value) rcli);
5711
5712 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5713 }
5714 }
5715 else if (some_was_live)
5716 {
5717 splay_tree_node node;
5718 struct reg_cond_life_info *rcli;
5719
5720 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5721 if (node != NULL)
5722 {
5723 /* The register was conditionally live previously, but is now
5724 unconditionally so. Remove it from the conditionally dead
5725 list, so that a conditional set won't cause us to think
5726 it dead. */
5727 rcli = (struct reg_cond_life_info *) node->value;
5728 rcli->condition = NULL_RTX;
5729 splay_tree_remove (pbi->reg_cond_dead, regno);
5730 }
5731 }
5732
5733 #endif
5734 }
5735
5736 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5737 This is done assuming the registers needed from X are those that
5738 have 1-bits in PBI->REG_LIVE.
5739
5740 INSN is the containing instruction. If INSN is dead, this function
5741 is not called. */
5742
5743 static void
5744 mark_used_regs (pbi, x, cond, insn)
5745 struct propagate_block_info *pbi;
5746 rtx x, cond, insn;
5747 {
5748 register RTX_CODE code;
5749 register int regno;
5750 int flags = pbi->flags;
5751
5752 retry:
5753 code = GET_CODE (x);
5754 switch (code)
5755 {
5756 case LABEL_REF:
5757 case SYMBOL_REF:
5758 case CONST_INT:
5759 case CONST:
5760 case CONST_DOUBLE:
5761 case PC:
5762 case ADDR_VEC:
5763 case ADDR_DIFF_VEC:
5764 return;
5765
5766 #ifdef HAVE_cc0
5767 case CC0:
5768 pbi->cc0_live = 1;
5769 return;
5770 #endif
5771
5772 case CLOBBER:
5773 /* If we are clobbering a MEM, mark any registers inside the address
5774 as being used. */
5775 if (GET_CODE (XEXP (x, 0)) == MEM)
5776 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5777 return;
5778
5779 case MEM:
5780 /* Don't bother watching stores to mems if this is not the
5781 final pass. We'll not be deleting dead stores this round. */
5782 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5783 {
5784 /* Invalidate the data for the last MEM stored, but only if MEM is
5785 something that can be stored into. */
5786 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5787 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5788 /* Needn't clear the memory set list. */
5789 ;
5790 else
5791 {
5792 rtx temp = pbi->mem_set_list;
5793 rtx prev = NULL_RTX;
5794 rtx next;
5795
5796 while (temp)
5797 {
5798 next = XEXP (temp, 1);
5799 if (anti_dependence (XEXP (temp, 0), x))
5800 {
5801 /* Splice temp out of the list. */
5802 if (prev)
5803 XEXP (prev, 1) = next;
5804 else
5805 pbi->mem_set_list = next;
5806 free_EXPR_LIST_node (temp);
5807 }
5808 else
5809 prev = temp;
5810 temp = next;
5811 }
5812 }
5813
5814 /* If the memory reference had embedded side effects (autoincrement
5815 address modes. Then we may need to kill some entries on the
5816 memory set list. */
5817 if (insn)
5818 invalidate_mems_from_autoinc (pbi, insn);
5819 }
5820
5821 #ifdef AUTO_INC_DEC
5822 if (flags & PROP_AUTOINC)
5823 find_auto_inc (pbi, x, insn);
5824 #endif
5825 break;
5826
5827 case SUBREG:
5828 #ifdef CLASS_CANNOT_CHANGE_MODE
5829 if (GET_CODE (SUBREG_REG (x)) == REG
5830 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5831 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5832 GET_MODE (SUBREG_REG (x))))
5833 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5834 #endif
5835
5836 /* While we're here, optimize this case. */
5837 x = SUBREG_REG (x);
5838 if (GET_CODE (x) != REG)
5839 goto retry;
5840 /* Fall through. */
5841
5842 case REG:
5843 /* See a register other than being set => mark it as needed. */
5844 mark_used_reg (pbi, x, cond, insn);
5845 return;
5846
5847 case SET:
5848 {
5849 register rtx testreg = SET_DEST (x);
5850 int mark_dest = 0;
5851
5852 /* If storing into MEM, don't show it as being used. But do
5853 show the address as being used. */
5854 if (GET_CODE (testreg) == MEM)
5855 {
5856 #ifdef AUTO_INC_DEC
5857 if (flags & PROP_AUTOINC)
5858 find_auto_inc (pbi, testreg, insn);
5859 #endif
5860 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5861 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5862 return;
5863 }
5864
5865 /* Storing in STRICT_LOW_PART is like storing in a reg
5866 in that this SET might be dead, so ignore it in TESTREG.
5867 but in some other ways it is like using the reg.
5868
5869 Storing in a SUBREG or a bit field is like storing the entire
5870 register in that if the register's value is not used
5871 then this SET is not needed. */
5872 while (GET_CODE (testreg) == STRICT_LOW_PART
5873 || GET_CODE (testreg) == ZERO_EXTRACT
5874 || GET_CODE (testreg) == SIGN_EXTRACT
5875 || GET_CODE (testreg) == SUBREG)
5876 {
5877 #ifdef CLASS_CANNOT_CHANGE_MODE
5878 if (GET_CODE (testreg) == SUBREG
5879 && GET_CODE (SUBREG_REG (testreg)) == REG
5880 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5881 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5882 GET_MODE (testreg)))
5883 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5884 #endif
5885
5886 /* Modifying a single register in an alternate mode
5887 does not use any of the old value. But these other
5888 ways of storing in a register do use the old value. */
5889 if (GET_CODE (testreg) == SUBREG
5890 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5891 ;
5892 else
5893 mark_dest = 1;
5894
5895 testreg = XEXP (testreg, 0);
5896 }
5897
5898 /* If this is a store into a register, recursively scan the
5899 value being stored. */
5900
5901 if ((GET_CODE (testreg) == PARALLEL
5902 && GET_MODE (testreg) == BLKmode)
5903 || (GET_CODE (testreg) == REG
5904 && (regno = REGNO (testreg),
5905 ! (regno == FRAME_POINTER_REGNUM
5906 && (! reload_completed || frame_pointer_needed)))
5907 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5908 && ! (regno == HARD_FRAME_POINTER_REGNUM
5909 && (! reload_completed || frame_pointer_needed))
5910 #endif
5911 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5912 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5913 #endif
5914 ))
5915 {
5916 if (mark_dest)
5917 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5918 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5919 return;
5920 }
5921 }
5922 break;
5923
5924 case ASM_OPERANDS:
5925 case UNSPEC_VOLATILE:
5926 case TRAP_IF:
5927 case ASM_INPUT:
5928 {
5929 /* Traditional and volatile asm instructions must be considered to use
5930 and clobber all hard registers, all pseudo-registers and all of
5931 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5932
5933 Consider for instance a volatile asm that changes the fpu rounding
5934 mode. An insn should not be moved across this even if it only uses
5935 pseudo-regs because it might give an incorrectly rounded result.
5936
5937 ?!? Unfortunately, marking all hard registers as live causes massive
5938 problems for the register allocator and marking all pseudos as live
5939 creates mountains of uninitialized variable warnings.
5940
5941 So for now, just clear the memory set list and mark any regs
5942 we can find in ASM_OPERANDS as used. */
5943 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5944 free_EXPR_LIST_list (&pbi->mem_set_list);
5945
5946 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5947 We can not just fall through here since then we would be confused
5948 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5949 traditional asms unlike their normal usage. */
5950 if (code == ASM_OPERANDS)
5951 {
5952 int j;
5953
5954 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5955 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5956 }
5957 break;
5958 }
5959
5960 case COND_EXEC:
5961 if (cond != NULL_RTX)
5962 abort ();
5963
5964 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5965
5966 cond = COND_EXEC_TEST (x);
5967 x = COND_EXEC_CODE (x);
5968 goto retry;
5969
5970 case PHI:
5971 /* We _do_not_ want to scan operands of phi nodes. Operands of
5972 a phi function are evaluated only when control reaches this
5973 block along a particular edge. Therefore, regs that appear
5974 as arguments to phi should not be added to the global live at
5975 start. */
5976 return;
5977
5978 default:
5979 break;
5980 }
5981
5982 /* Recursively scan the operands of this expression. */
5983
5984 {
5985 register const char *fmt = GET_RTX_FORMAT (code);
5986 register int i;
5987
5988 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5989 {
5990 if (fmt[i] == 'e')
5991 {
5992 /* Tail recursive case: save a function call level. */
5993 if (i == 0)
5994 {
5995 x = XEXP (x, 0);
5996 goto retry;
5997 }
5998 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5999 }
6000 else if (fmt[i] == 'E')
6001 {
6002 register int j;
6003 for (j = 0; j < XVECLEN (x, i); j++)
6004 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6005 }
6006 }
6007 }
6008 }
6009 \f
6010 #ifdef AUTO_INC_DEC
6011
6012 static int
6013 try_pre_increment_1 (pbi, insn)
6014 struct propagate_block_info *pbi;
6015 rtx insn;
6016 {
6017 /* Find the next use of this reg. If in same basic block,
6018 make it do pre-increment or pre-decrement if appropriate. */
6019 rtx x = single_set (insn);
6020 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6021 * INTVAL (XEXP (SET_SRC (x), 1)));
6022 int regno = REGNO (SET_DEST (x));
6023 rtx y = pbi->reg_next_use[regno];
6024 if (y != 0
6025 && SET_DEST (x) != stack_pointer_rtx
6026 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6027 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6028 mode would be better. */
6029 && ! dead_or_set_p (y, SET_DEST (x))
6030 && try_pre_increment (y, SET_DEST (x), amount))
6031 {
6032 /* We have found a suitable auto-increment and already changed
6033 insn Y to do it. So flush this increment instruction. */
6034 propagate_block_delete_insn (pbi->bb, insn);
6035
6036 /* Count a reference to this reg for the increment insn we are
6037 deleting. When a reg is incremented, spilling it is worse,
6038 so we want to make that less likely. */
6039 if (regno >= FIRST_PSEUDO_REGISTER)
6040 {
6041 REG_N_REFS (regno) += (optimize_size ? 1
6042 : pbi->bb->loop_depth + 1);
6043 REG_N_SETS (regno)++;
6044 }
6045
6046 /* Flush any remembered memories depending on the value of
6047 the incremented register. */
6048 invalidate_mems_from_set (pbi, SET_DEST (x));
6049
6050 return 1;
6051 }
6052 return 0;
6053 }
6054
6055 /* Try to change INSN so that it does pre-increment or pre-decrement
6056 addressing on register REG in order to add AMOUNT to REG.
6057 AMOUNT is negative for pre-decrement.
6058 Returns 1 if the change could be made.
6059 This checks all about the validity of the result of modifying INSN. */
6060
6061 static int
6062 try_pre_increment (insn, reg, amount)
6063 rtx insn, reg;
6064 HOST_WIDE_INT amount;
6065 {
6066 register rtx use;
6067
6068 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6069 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6070 int pre_ok = 0;
6071 /* Nonzero if we can try to make a post-increment or post-decrement.
6072 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6073 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6074 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6075 int post_ok = 0;
6076
6077 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6078 int do_post = 0;
6079
6080 /* From the sign of increment, see which possibilities are conceivable
6081 on this target machine. */
6082 if (HAVE_PRE_INCREMENT && amount > 0)
6083 pre_ok = 1;
6084 if (HAVE_POST_INCREMENT && amount > 0)
6085 post_ok = 1;
6086
6087 if (HAVE_PRE_DECREMENT && amount < 0)
6088 pre_ok = 1;
6089 if (HAVE_POST_DECREMENT && amount < 0)
6090 post_ok = 1;
6091
6092 if (! (pre_ok || post_ok))
6093 return 0;
6094
6095 /* It is not safe to add a side effect to a jump insn
6096 because if the incremented register is spilled and must be reloaded
6097 there would be no way to store the incremented value back in memory. */
6098
6099 if (GET_CODE (insn) == JUMP_INSN)
6100 return 0;
6101
6102 use = 0;
6103 if (pre_ok)
6104 use = find_use_as_address (PATTERN (insn), reg, 0);
6105 if (post_ok && (use == 0 || use == (rtx) 1))
6106 {
6107 use = find_use_as_address (PATTERN (insn), reg, -amount);
6108 do_post = 1;
6109 }
6110
6111 if (use == 0 || use == (rtx) 1)
6112 return 0;
6113
6114 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6115 return 0;
6116
6117 /* See if this combination of instruction and addressing mode exists. */
6118 if (! validate_change (insn, &XEXP (use, 0),
6119 gen_rtx_fmt_e (amount > 0
6120 ? (do_post ? POST_INC : PRE_INC)
6121 : (do_post ? POST_DEC : PRE_DEC),
6122 Pmode, reg), 0))
6123 return 0;
6124
6125 /* Record that this insn now has an implicit side effect on X. */
6126 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6127 return 1;
6128 }
6129
6130 #endif /* AUTO_INC_DEC */
6131 \f
6132 /* Find the place in the rtx X where REG is used as a memory address.
6133 Return the MEM rtx that so uses it.
6134 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6135 (plus REG (const_int PLUSCONST)).
6136
6137 If such an address does not appear, return 0.
6138 If REG appears more than once, or is used other than in such an address,
6139 return (rtx)1. */
6140
6141 rtx
6142 find_use_as_address (x, reg, plusconst)
6143 register rtx x;
6144 rtx reg;
6145 HOST_WIDE_INT plusconst;
6146 {
6147 enum rtx_code code = GET_CODE (x);
6148 const char *fmt = GET_RTX_FORMAT (code);
6149 register int i;
6150 register rtx value = 0;
6151 register rtx tem;
6152
6153 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6154 return x;
6155
6156 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6157 && XEXP (XEXP (x, 0), 0) == reg
6158 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6159 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6160 return x;
6161
6162 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6163 {
6164 /* If REG occurs inside a MEM used in a bit-field reference,
6165 that is unacceptable. */
6166 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6167 return (rtx) (HOST_WIDE_INT) 1;
6168 }
6169
6170 if (x == reg)
6171 return (rtx) (HOST_WIDE_INT) 1;
6172
6173 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6174 {
6175 if (fmt[i] == 'e')
6176 {
6177 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6178 if (value == 0)
6179 value = tem;
6180 else if (tem != 0)
6181 return (rtx) (HOST_WIDE_INT) 1;
6182 }
6183 else if (fmt[i] == 'E')
6184 {
6185 register int j;
6186 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6187 {
6188 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6189 if (value == 0)
6190 value = tem;
6191 else if (tem != 0)
6192 return (rtx) (HOST_WIDE_INT) 1;
6193 }
6194 }
6195 }
6196
6197 return value;
6198 }
6199 \f
6200 /* Write information about registers and basic blocks into FILE.
6201 This is part of making a debugging dump. */
6202
6203 void
6204 dump_regset (r, outf)
6205 regset r;
6206 FILE *outf;
6207 {
6208 int i;
6209 if (r == NULL)
6210 {
6211 fputs (" (nil)", outf);
6212 return;
6213 }
6214
6215 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6216 {
6217 fprintf (outf, " %d", i);
6218 if (i < FIRST_PSEUDO_REGISTER)
6219 fprintf (outf, " [%s]",
6220 reg_names[i]);
6221 });
6222 }
6223
6224 void
6225 debug_regset (r)
6226 regset r;
6227 {
6228 dump_regset (r, stderr);
6229 putc ('\n', stderr);
6230 }
6231
6232 void
6233 dump_flow_info (file)
6234 FILE *file;
6235 {
6236 register int i;
6237 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6238
6239 fprintf (file, "%d registers.\n", max_regno);
6240 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6241 if (REG_N_REFS (i))
6242 {
6243 enum reg_class class, altclass;
6244 fprintf (file, "\nRegister %d used %d times across %d insns",
6245 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6246 if (REG_BASIC_BLOCK (i) >= 0)
6247 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6248 if (REG_N_SETS (i))
6249 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6250 (REG_N_SETS (i) == 1) ? "" : "s");
6251 if (REG_USERVAR_P (regno_reg_rtx[i]))
6252 fprintf (file, "; user var");
6253 if (REG_N_DEATHS (i) != 1)
6254 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6255 if (REG_N_CALLS_CROSSED (i) == 1)
6256 fprintf (file, "; crosses 1 call");
6257 else if (REG_N_CALLS_CROSSED (i))
6258 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6259 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6260 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6261 class = reg_preferred_class (i);
6262 altclass = reg_alternate_class (i);
6263 if (class != GENERAL_REGS || altclass != ALL_REGS)
6264 {
6265 if (altclass == ALL_REGS || class == ALL_REGS)
6266 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6267 else if (altclass == NO_REGS)
6268 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6269 else
6270 fprintf (file, "; pref %s, else %s",
6271 reg_class_names[(int) class],
6272 reg_class_names[(int) altclass]);
6273 }
6274 if (REG_POINTER (regno_reg_rtx[i]))
6275 fprintf (file, "; pointer");
6276 fprintf (file, ".\n");
6277 }
6278
6279 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6280 for (i = 0; i < n_basic_blocks; i++)
6281 {
6282 register basic_block bb = BASIC_BLOCK (i);
6283 register edge e;
6284
6285 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6286 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
6287
6288 fprintf (file, "Predecessors: ");
6289 for (e = bb->pred; e; e = e->pred_next)
6290 dump_edge_info (file, e, 0);
6291
6292 fprintf (file, "\nSuccessors: ");
6293 for (e = bb->succ; e; e = e->succ_next)
6294 dump_edge_info (file, e, 1);
6295
6296 fprintf (file, "\nRegisters live at start:");
6297 dump_regset (bb->global_live_at_start, file);
6298
6299 fprintf (file, "\nRegisters live at end:");
6300 dump_regset (bb->global_live_at_end, file);
6301
6302 putc ('\n', file);
6303 }
6304
6305 putc ('\n', file);
6306 }
6307
6308 void
6309 debug_flow_info ()
6310 {
6311 dump_flow_info (stderr);
6312 }
6313
6314 static void
6315 dump_edge_info (file, e, do_succ)
6316 FILE *file;
6317 edge e;
6318 int do_succ;
6319 {
6320 basic_block side = (do_succ ? e->dest : e->src);
6321
6322 if (side == ENTRY_BLOCK_PTR)
6323 fputs (" ENTRY", file);
6324 else if (side == EXIT_BLOCK_PTR)
6325 fputs (" EXIT", file);
6326 else
6327 fprintf (file, " %d", side->index);
6328
6329 if (e->count)
6330 fprintf (file, " count:%d", e->count);
6331
6332 if (e->flags)
6333 {
6334 static const char * const bitnames[] = {
6335 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6336 };
6337 int comma = 0;
6338 int i, flags = e->flags;
6339
6340 fputc (' ', file);
6341 fputc ('(', file);
6342 for (i = 0; flags; i++)
6343 if (flags & (1 << i))
6344 {
6345 flags &= ~(1 << i);
6346
6347 if (comma)
6348 fputc (',', file);
6349 if (i < (int) ARRAY_SIZE (bitnames))
6350 fputs (bitnames[i], file);
6351 else
6352 fprintf (file, "%d", i);
6353 comma = 1;
6354 }
6355 fputc (')', file);
6356 }
6357 }
6358 \f
6359 /* Print out one basic block with live information at start and end. */
6360
6361 void
6362 dump_bb (bb, outf)
6363 basic_block bb;
6364 FILE *outf;
6365 {
6366 rtx insn;
6367 rtx last;
6368 edge e;
6369
6370 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6371 bb->index, bb->loop_depth, bb->count);
6372 if (bb->eh_beg != -1 || bb->eh_end != -1)
6373 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
6374 putc ('\n', outf);
6375
6376 fputs (";; Predecessors: ", outf);
6377 for (e = bb->pred; e; e = e->pred_next)
6378 dump_edge_info (outf, e, 0);
6379 putc ('\n', outf);
6380
6381 fputs (";; Registers live at start:", outf);
6382 dump_regset (bb->global_live_at_start, outf);
6383 putc ('\n', outf);
6384
6385 for (insn = bb->head, last = NEXT_INSN (bb->end);
6386 insn != last;
6387 insn = NEXT_INSN (insn))
6388 print_rtl_single (outf, insn);
6389
6390 fputs (";; Registers live at end:", outf);
6391 dump_regset (bb->global_live_at_end, outf);
6392 putc ('\n', outf);
6393
6394 fputs (";; Successors: ", outf);
6395 for (e = bb->succ; e; e = e->succ_next)
6396 dump_edge_info (outf, e, 1);
6397 putc ('\n', outf);
6398 }
6399
6400 void
6401 debug_bb (bb)
6402 basic_block bb;
6403 {
6404 dump_bb (bb, stderr);
6405 }
6406
6407 void
6408 debug_bb_n (n)
6409 int n;
6410 {
6411 dump_bb (BASIC_BLOCK (n), stderr);
6412 }
6413
6414 /* Like print_rtl, but also print out live information for the start of each
6415 basic block. */
6416
6417 void
6418 print_rtl_with_bb (outf, rtx_first)
6419 FILE *outf;
6420 rtx rtx_first;
6421 {
6422 register rtx tmp_rtx;
6423
6424 if (rtx_first == 0)
6425 fprintf (outf, "(nil)\n");
6426 else
6427 {
6428 int i;
6429 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6430 int max_uid = get_max_uid ();
6431 basic_block *start = (basic_block *)
6432 xcalloc (max_uid, sizeof (basic_block));
6433 basic_block *end = (basic_block *)
6434 xcalloc (max_uid, sizeof (basic_block));
6435 enum bb_state *in_bb_p = (enum bb_state *)
6436 xcalloc (max_uid, sizeof (enum bb_state));
6437
6438 for (i = n_basic_blocks - 1; i >= 0; i--)
6439 {
6440 basic_block bb = BASIC_BLOCK (i);
6441 rtx x;
6442
6443 start[INSN_UID (bb->head)] = bb;
6444 end[INSN_UID (bb->end)] = bb;
6445 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6446 {
6447 enum bb_state state = IN_MULTIPLE_BB;
6448 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6449 state = IN_ONE_BB;
6450 in_bb_p[INSN_UID (x)] = state;
6451
6452 if (x == bb->end)
6453 break;
6454 }
6455 }
6456
6457 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6458 {
6459 int did_output;
6460 basic_block bb;
6461
6462 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6463 {
6464 fprintf (outf, ";; Start of basic block %d, registers live:",
6465 bb->index);
6466 dump_regset (bb->global_live_at_start, outf);
6467 putc ('\n', outf);
6468 }
6469
6470 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6471 && GET_CODE (tmp_rtx) != NOTE
6472 && GET_CODE (tmp_rtx) != BARRIER)
6473 fprintf (outf, ";; Insn is not within a basic block\n");
6474 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6475 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6476
6477 did_output = print_rtl_single (outf, tmp_rtx);
6478
6479 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6480 {
6481 fprintf (outf, ";; End of basic block %d, registers live:\n",
6482 bb->index);
6483 dump_regset (bb->global_live_at_end, outf);
6484 putc ('\n', outf);
6485 }
6486
6487 if (did_output)
6488 putc ('\n', outf);
6489 }
6490
6491 free (start);
6492 free (end);
6493 free (in_bb_p);
6494 }
6495
6496 if (current_function_epilogue_delay_list != 0)
6497 {
6498 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6499 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6500 tmp_rtx = XEXP (tmp_rtx, 1))
6501 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6502 }
6503 }
6504
6505 /* Dump the rtl into the current debugging dump file, then abort. */
6506 static void
6507 print_rtl_and_abort ()
6508 {
6509 if (rtl_dump_file)
6510 {
6511 print_rtl_with_bb (rtl_dump_file, get_insns ());
6512 fclose (rtl_dump_file);
6513 }
6514 abort ();
6515 }
6516
6517 /* Recompute register set/reference counts immediately prior to register
6518 allocation.
6519
6520 This avoids problems with set/reference counts changing to/from values
6521 which have special meanings to the register allocators.
6522
6523 Additionally, the reference counts are the primary component used by the
6524 register allocators to prioritize pseudos for allocation to hard regs.
6525 More accurate reference counts generally lead to better register allocation.
6526
6527 F is the first insn to be scanned.
6528
6529 LOOP_STEP denotes how much loop_depth should be incremented per
6530 loop nesting level in order to increase the ref count more for
6531 references in a loop.
6532
6533 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6534 possibly other information which is used by the register allocators. */
6535
6536 void
6537 recompute_reg_usage (f, loop_step)
6538 rtx f ATTRIBUTE_UNUSED;
6539 int loop_step ATTRIBUTE_UNUSED;
6540 {
6541 allocate_reg_life_data ();
6542 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6543 }
6544
6545 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6546 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6547 of the number of registers that died. */
6548
6549 int
6550 count_or_remove_death_notes (blocks, kill)
6551 sbitmap blocks;
6552 int kill;
6553 {
6554 int i, count = 0;
6555
6556 for (i = n_basic_blocks - 1; i >= 0; --i)
6557 {
6558 basic_block bb;
6559 rtx insn;
6560
6561 if (blocks && ! TEST_BIT (blocks, i))
6562 continue;
6563
6564 bb = BASIC_BLOCK (i);
6565
6566 for (insn = bb->head;; insn = NEXT_INSN (insn))
6567 {
6568 if (INSN_P (insn))
6569 {
6570 rtx *pprev = &REG_NOTES (insn);
6571 rtx link = *pprev;
6572
6573 while (link)
6574 {
6575 switch (REG_NOTE_KIND (link))
6576 {
6577 case REG_DEAD:
6578 if (GET_CODE (XEXP (link, 0)) == REG)
6579 {
6580 rtx reg = XEXP (link, 0);
6581 int n;
6582
6583 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6584 n = 1;
6585 else
6586 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6587 count += n;
6588 }
6589 /* Fall through. */
6590
6591 case REG_UNUSED:
6592 if (kill)
6593 {
6594 rtx next = XEXP (link, 1);
6595 free_EXPR_LIST_node (link);
6596 *pprev = link = next;
6597 break;
6598 }
6599 /* Fall through. */
6600
6601 default:
6602 pprev = &XEXP (link, 1);
6603 link = *pprev;
6604 break;
6605 }
6606 }
6607 }
6608
6609 if (insn == bb->end)
6610 break;
6611 }
6612 }
6613
6614 return count;
6615 }
6616
6617
6618 /* Update insns block within BB. */
6619
6620 void
6621 update_bb_for_insn (bb)
6622 basic_block bb;
6623 {
6624 rtx insn;
6625
6626 if (! basic_block_for_insn)
6627 return;
6628
6629 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6630 {
6631 set_block_for_insn (insn, bb);
6632
6633 if (insn == bb->end)
6634 break;
6635 }
6636 }
6637
6638
6639 /* Record INSN's block as BB. */
6640
6641 void
6642 set_block_for_insn (insn, bb)
6643 rtx insn;
6644 basic_block bb;
6645 {
6646 size_t uid = INSN_UID (insn);
6647 if (uid >= basic_block_for_insn->num_elements)
6648 {
6649 int new_size;
6650
6651 /* Add one-eighth the size so we don't keep calling xrealloc. */
6652 new_size = uid + (uid + 7) / 8;
6653
6654 VARRAY_GROW (basic_block_for_insn, new_size);
6655 }
6656 VARRAY_BB (basic_block_for_insn, uid) = bb;
6657 }
6658
6659 /* Record INSN's block number as BB. */
6660 /* ??? This has got to go. */
6661
6662 void
6663 set_block_num (insn, bb)
6664 rtx insn;
6665 int bb;
6666 {
6667 set_block_for_insn (insn, BASIC_BLOCK (bb));
6668 }
6669 \f
6670 /* Verify the CFG consistency. This function check some CFG invariants and
6671 aborts when something is wrong. Hope that this function will help to
6672 convert many optimization passes to preserve CFG consistent.
6673
6674 Currently it does following checks:
6675
6676 - test head/end pointers
6677 - overlapping of basic blocks
6678 - edge list corectness
6679 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6680 - tails of basic blocks (ensure that boundary is necesary)
6681 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6682 and NOTE_INSN_BASIC_BLOCK
6683 - check that all insns are in the basic blocks
6684 (except the switch handling code, barriers and notes)
6685 - check that all returns are followed by barriers
6686
6687 In future it can be extended check a lot of other stuff as well
6688 (reachability of basic blocks, life information, etc. etc.). */
6689
6690 void
6691 verify_flow_info ()
6692 {
6693 const int max_uid = get_max_uid ();
6694 const rtx rtx_first = get_insns ();
6695 rtx last_head = get_last_insn ();
6696 basic_block *bb_info;
6697 rtx x;
6698 int i, last_bb_num_seen, num_bb_notes, err = 0;
6699
6700 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6701
6702 for (i = n_basic_blocks - 1; i >= 0; i--)
6703 {
6704 basic_block bb = BASIC_BLOCK (i);
6705 rtx head = bb->head;
6706 rtx end = bb->end;
6707
6708 /* Verify the end of the basic block is in the INSN chain. */
6709 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6710 if (x == end)
6711 break;
6712 if (!x)
6713 {
6714 error ("End insn %d for block %d not found in the insn stream.",
6715 INSN_UID (end), bb->index);
6716 err = 1;
6717 }
6718
6719 /* Work backwards from the end to the head of the basic block
6720 to verify the head is in the RTL chain. */
6721 for (; x != NULL_RTX; x = PREV_INSN (x))
6722 {
6723 /* While walking over the insn chain, verify insns appear
6724 in only one basic block and initialize the BB_INFO array
6725 used by other passes. */
6726 if (bb_info[INSN_UID (x)] != NULL)
6727 {
6728 error ("Insn %d is in multiple basic blocks (%d and %d)",
6729 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6730 err = 1;
6731 }
6732 bb_info[INSN_UID (x)] = bb;
6733
6734 if (x == head)
6735 break;
6736 }
6737 if (!x)
6738 {
6739 error ("Head insn %d for block %d not found in the insn stream.",
6740 INSN_UID (head), bb->index);
6741 err = 1;
6742 }
6743
6744 last_head = x;
6745 }
6746
6747 /* Now check the basic blocks (boundaries etc.) */
6748 for (i = n_basic_blocks - 1; i >= 0; i--)
6749 {
6750 basic_block bb = BASIC_BLOCK (i);
6751 /* Check corectness of edge lists */
6752 edge e;
6753
6754 e = bb->succ;
6755 while (e)
6756 {
6757 if (e->src != bb)
6758 {
6759 fprintf (stderr,
6760 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6761 bb->index);
6762 fprintf (stderr, "Predecessor: ");
6763 dump_edge_info (stderr, e, 0);
6764 fprintf (stderr, "\nSuccessor: ");
6765 dump_edge_info (stderr, e, 1);
6766 fflush (stderr);
6767 err = 1;
6768 }
6769 if (e->dest != EXIT_BLOCK_PTR)
6770 {
6771 edge e2 = e->dest->pred;
6772 while (e2 && e2 != e)
6773 e2 = e2->pred_next;
6774 if (!e2)
6775 {
6776 error ("Basic block %i edge lists are corrupted", bb->index);
6777 err = 1;
6778 }
6779 }
6780 e = e->succ_next;
6781 }
6782
6783 e = bb->pred;
6784 while (e)
6785 {
6786 if (e->dest != bb)
6787 {
6788 error ("Basic block %d pred edge is corrupted", bb->index);
6789 fputs ("Predecessor: ", stderr);
6790 dump_edge_info (stderr, e, 0);
6791 fputs ("\nSuccessor: ", stderr);
6792 dump_edge_info (stderr, e, 1);
6793 fputc ('\n', stderr);
6794 err = 1;
6795 }
6796 if (e->src != ENTRY_BLOCK_PTR)
6797 {
6798 edge e2 = e->src->succ;
6799 while (e2 && e2 != e)
6800 e2 = e2->succ_next;
6801 if (!e2)
6802 {
6803 error ("Basic block %i edge lists are corrupted", bb->index);
6804 err = 1;
6805 }
6806 }
6807 e = e->pred_next;
6808 }
6809
6810 /* OK pointers are correct. Now check the header of basic
6811 block. It ought to contain optional CODE_LABEL followed
6812 by NOTE_BASIC_BLOCK. */
6813 x = bb->head;
6814 if (GET_CODE (x) == CODE_LABEL)
6815 {
6816 if (bb->end == x)
6817 {
6818 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6819 bb->index);
6820 err = 1;
6821 }
6822 x = NEXT_INSN (x);
6823 }
6824 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6825 {
6826 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6827 bb->index);
6828 err = 1;
6829 }
6830
6831 if (bb->end == x)
6832 {
6833 /* Do checks for empty blocks here */
6834 }
6835 else
6836 {
6837 x = NEXT_INSN (x);
6838 while (x)
6839 {
6840 if (NOTE_INSN_BASIC_BLOCK_P (x))
6841 {
6842 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6843 INSN_UID (x), bb->index);
6844 err = 1;
6845 }
6846
6847 if (x == bb->end)
6848 break;
6849
6850 if (GET_CODE (x) == JUMP_INSN
6851 || GET_CODE (x) == CODE_LABEL
6852 || GET_CODE (x) == BARRIER)
6853 {
6854 error ("In basic block %d:", bb->index);
6855 fatal_insn ("Flow control insn inside a basic block", x);
6856 }
6857
6858 x = NEXT_INSN (x);
6859 }
6860 }
6861 }
6862
6863 last_bb_num_seen = -1;
6864 num_bb_notes = 0;
6865 x = rtx_first;
6866 while (x)
6867 {
6868 if (NOTE_INSN_BASIC_BLOCK_P (x))
6869 {
6870 basic_block bb = NOTE_BASIC_BLOCK (x);
6871 num_bb_notes++;
6872 if (bb->index != last_bb_num_seen + 1)
6873 fatal ("Basic blocks not numbered consecutively");
6874 last_bb_num_seen = bb->index;
6875 }
6876
6877 if (!bb_info[INSN_UID (x)])
6878 {
6879 switch (GET_CODE (x))
6880 {
6881 case BARRIER:
6882 case NOTE:
6883 break;
6884
6885 case CODE_LABEL:
6886 /* An addr_vec is placed outside any block block. */
6887 if (NEXT_INSN (x)
6888 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6889 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6890 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6891 {
6892 x = NEXT_INSN (x);
6893 }
6894
6895 /* But in any case, non-deletable labels can appear anywhere. */
6896 break;
6897
6898 default:
6899 fatal_insn ("Insn outside basic block", x);
6900 }
6901 }
6902
6903 if (INSN_P (x)
6904 && GET_CODE (x) == JUMP_INSN
6905 && returnjump_p (x) && ! condjump_p (x)
6906 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6907 fatal_insn ("Return not followed by barrier", x);
6908
6909 x = NEXT_INSN (x);
6910 }
6911
6912 if (num_bb_notes != n_basic_blocks)
6913 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6914 num_bb_notes, n_basic_blocks);
6915
6916 if (err)
6917 abort ();
6918
6919 /* Clean up. */
6920 free (bb_info);
6921 }
6922 \f
6923 /* Functions to access an edge list with a vector representation.
6924 Enough data is kept such that given an index number, the
6925 pred and succ that edge represents can be determined, or
6926 given a pred and a succ, its index number can be returned.
6927 This allows algorithms which consume a lot of memory to
6928 represent the normally full matrix of edge (pred,succ) with a
6929 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6930 wasted space in the client code due to sparse flow graphs. */
6931
6932 /* This functions initializes the edge list. Basically the entire
6933 flowgraph is processed, and all edges are assigned a number,
6934 and the data structure is filled in. */
6935
6936 struct edge_list *
6937 create_edge_list ()
6938 {
6939 struct edge_list *elist;
6940 edge e;
6941 int num_edges;
6942 int x;
6943 int block_count;
6944
6945 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6946
6947 num_edges = 0;
6948
6949 /* Determine the number of edges in the flow graph by counting successor
6950 edges on each basic block. */
6951 for (x = 0; x < n_basic_blocks; x++)
6952 {
6953 basic_block bb = BASIC_BLOCK (x);
6954
6955 for (e = bb->succ; e; e = e->succ_next)
6956 num_edges++;
6957 }
6958 /* Don't forget successors of the entry block. */
6959 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6960 num_edges++;
6961
6962 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6963 elist->num_blocks = block_count;
6964 elist->num_edges = num_edges;
6965 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6966
6967 num_edges = 0;
6968
6969 /* Follow successors of the entry block, and register these edges. */
6970 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6971 {
6972 elist->index_to_edge[num_edges] = e;
6973 num_edges++;
6974 }
6975
6976 for (x = 0; x < n_basic_blocks; x++)
6977 {
6978 basic_block bb = BASIC_BLOCK (x);
6979
6980 /* Follow all successors of blocks, and register these edges. */
6981 for (e = bb->succ; e; e = e->succ_next)
6982 {
6983 elist->index_to_edge[num_edges] = e;
6984 num_edges++;
6985 }
6986 }
6987 return elist;
6988 }
6989
6990 /* This function free's memory associated with an edge list. */
6991
6992 void
6993 free_edge_list (elist)
6994 struct edge_list *elist;
6995 {
6996 if (elist)
6997 {
6998 free (elist->index_to_edge);
6999 free (elist);
7000 }
7001 }
7002
7003 /* This function provides debug output showing an edge list. */
7004
7005 void
7006 print_edge_list (f, elist)
7007 FILE *f;
7008 struct edge_list *elist;
7009 {
7010 int x;
7011 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7012 elist->num_blocks - 2, elist->num_edges);
7013
7014 for (x = 0; x < elist->num_edges; x++)
7015 {
7016 fprintf (f, " %-4d - edge(", x);
7017 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7018 fprintf (f, "entry,");
7019 else
7020 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7021
7022 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7023 fprintf (f, "exit)\n");
7024 else
7025 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7026 }
7027 }
7028
7029 /* This function provides an internal consistency check of an edge list,
7030 verifying that all edges are present, and that there are no
7031 extra edges. */
7032
7033 void
7034 verify_edge_list (f, elist)
7035 FILE *f;
7036 struct edge_list *elist;
7037 {
7038 int x, pred, succ, index;
7039 edge e;
7040
7041 for (x = 0; x < n_basic_blocks; x++)
7042 {
7043 basic_block bb = BASIC_BLOCK (x);
7044
7045 for (e = bb->succ; e; e = e->succ_next)
7046 {
7047 pred = e->src->index;
7048 succ = e->dest->index;
7049 index = EDGE_INDEX (elist, e->src, e->dest);
7050 if (index == EDGE_INDEX_NO_EDGE)
7051 {
7052 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7053 continue;
7054 }
7055 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7056 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7057 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7058 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7059 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7060 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7061 }
7062 }
7063 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7064 {
7065 pred = e->src->index;
7066 succ = e->dest->index;
7067 index = EDGE_INDEX (elist, e->src, e->dest);
7068 if (index == EDGE_INDEX_NO_EDGE)
7069 {
7070 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7071 continue;
7072 }
7073 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7074 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7075 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7076 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7077 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7078 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7079 }
7080 /* We've verified that all the edges are in the list, no lets make sure
7081 there are no spurious edges in the list. */
7082
7083 for (pred = 0; pred < n_basic_blocks; pred++)
7084 for (succ = 0; succ < n_basic_blocks; succ++)
7085 {
7086 basic_block p = BASIC_BLOCK (pred);
7087 basic_block s = BASIC_BLOCK (succ);
7088
7089 int found_edge = 0;
7090
7091 for (e = p->succ; e; e = e->succ_next)
7092 if (e->dest == s)
7093 {
7094 found_edge = 1;
7095 break;
7096 }
7097 for (e = s->pred; e; e = e->pred_next)
7098 if (e->src == p)
7099 {
7100 found_edge = 1;
7101 break;
7102 }
7103 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7104 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7105 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7106 pred, succ);
7107 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7108 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7109 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7110 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7111 BASIC_BLOCK (succ)));
7112 }
7113 for (succ = 0; succ < n_basic_blocks; succ++)
7114 {
7115 basic_block p = ENTRY_BLOCK_PTR;
7116 basic_block s = BASIC_BLOCK (succ);
7117
7118 int found_edge = 0;
7119
7120 for (e = p->succ; e; e = e->succ_next)
7121 if (e->dest == s)
7122 {
7123 found_edge = 1;
7124 break;
7125 }
7126 for (e = s->pred; e; e = e->pred_next)
7127 if (e->src == p)
7128 {
7129 found_edge = 1;
7130 break;
7131 }
7132 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7133 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7134 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7135 succ);
7136 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7137 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7138 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7139 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7140 BASIC_BLOCK (succ)));
7141 }
7142 for (pred = 0; pred < n_basic_blocks; pred++)
7143 {
7144 basic_block p = BASIC_BLOCK (pred);
7145 basic_block s = EXIT_BLOCK_PTR;
7146
7147 int found_edge = 0;
7148
7149 for (e = p->succ; e; e = e->succ_next)
7150 if (e->dest == s)
7151 {
7152 found_edge = 1;
7153 break;
7154 }
7155 for (e = s->pred; e; e = e->pred_next)
7156 if (e->src == p)
7157 {
7158 found_edge = 1;
7159 break;
7160 }
7161 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7162 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7163 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7164 pred);
7165 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7166 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7167 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7168 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7169 EXIT_BLOCK_PTR));
7170 }
7171 }
7172
7173 /* This routine will determine what, if any, edge there is between
7174 a specified predecessor and successor. */
7175
7176 int
7177 find_edge_index (edge_list, pred, succ)
7178 struct edge_list *edge_list;
7179 basic_block pred, succ;
7180 {
7181 int x;
7182 for (x = 0; x < NUM_EDGES (edge_list); x++)
7183 {
7184 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7185 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7186 return x;
7187 }
7188 return (EDGE_INDEX_NO_EDGE);
7189 }
7190
7191 /* This function will remove an edge from the flow graph. */
7192
7193 void
7194 remove_edge (e)
7195 edge e;
7196 {
7197 edge last_pred = NULL;
7198 edge last_succ = NULL;
7199 edge tmp;
7200 basic_block src, dest;
7201 src = e->src;
7202 dest = e->dest;
7203 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7204 last_succ = tmp;
7205
7206 if (!tmp)
7207 abort ();
7208 if (last_succ)
7209 last_succ->succ_next = e->succ_next;
7210 else
7211 src->succ = e->succ_next;
7212
7213 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7214 last_pred = tmp;
7215
7216 if (!tmp)
7217 abort ();
7218 if (last_pred)
7219 last_pred->pred_next = e->pred_next;
7220 else
7221 dest->pred = e->pred_next;
7222
7223 n_edges--;
7224 free (e);
7225 }
7226
7227 /* This routine will remove any fake successor edges for a basic block.
7228 When the edge is removed, it is also removed from whatever predecessor
7229 list it is in. */
7230
7231 static void
7232 remove_fake_successors (bb)
7233 basic_block bb;
7234 {
7235 edge e;
7236 for (e = bb->succ; e;)
7237 {
7238 edge tmp = e;
7239 e = e->succ_next;
7240 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7241 remove_edge (tmp);
7242 }
7243 }
7244
7245 /* This routine will remove all fake edges from the flow graph. If
7246 we remove all fake successors, it will automatically remove all
7247 fake predecessors. */
7248
7249 void
7250 remove_fake_edges ()
7251 {
7252 int x;
7253
7254 for (x = 0; x < n_basic_blocks; x++)
7255 remove_fake_successors (BASIC_BLOCK (x));
7256
7257 /* We've handled all successors except the entry block's. */
7258 remove_fake_successors (ENTRY_BLOCK_PTR);
7259 }
7260
7261 /* This function will add a fake edge between any block which has no
7262 successors, and the exit block. Some data flow equations require these
7263 edges to exist. */
7264
7265 void
7266 add_noreturn_fake_exit_edges ()
7267 {
7268 int x;
7269
7270 for (x = 0; x < n_basic_blocks; x++)
7271 if (BASIC_BLOCK (x)->succ == NULL)
7272 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7273 }
7274
7275 /* This function adds a fake edge between any infinite loops to the
7276 exit block. Some optimizations require a path from each node to
7277 the exit node.
7278
7279 See also Morgan, Figure 3.10, pp. 82-83.
7280
7281 The current implementation is ugly, not attempting to minimize the
7282 number of inserted fake edges. To reduce the number of fake edges
7283 to insert, add fake edges from _innermost_ loops containing only
7284 nodes not reachable from the exit block. */
7285
7286 void
7287 connect_infinite_loops_to_exit ()
7288 {
7289 basic_block unvisited_block;
7290
7291 /* Perform depth-first search in the reverse graph to find nodes
7292 reachable from the exit block. */
7293 struct depth_first_search_dsS dfs_ds;
7294
7295 flow_dfs_compute_reverse_init (&dfs_ds);
7296 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7297
7298 /* Repeatedly add fake edges, updating the unreachable nodes. */
7299 while (1)
7300 {
7301 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7302 if (!unvisited_block)
7303 break;
7304 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7305 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7306 }
7307
7308 flow_dfs_compute_reverse_finish (&dfs_ds);
7309
7310 return;
7311 }
7312
7313 /* Redirect an edge's successor from one block to another. */
7314
7315 void
7316 redirect_edge_succ (e, new_succ)
7317 edge e;
7318 basic_block new_succ;
7319 {
7320 edge *pe;
7321
7322 /* Disconnect the edge from the old successor block. */
7323 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7324 continue;
7325 *pe = (*pe)->pred_next;
7326
7327 /* Reconnect the edge to the new successor block. */
7328 e->pred_next = new_succ->pred;
7329 new_succ->pred = e;
7330 e->dest = new_succ;
7331 }
7332
7333 /* Redirect an edge's predecessor from one block to another. */
7334
7335 void
7336 redirect_edge_pred (e, new_pred)
7337 edge e;
7338 basic_block new_pred;
7339 {
7340 edge *pe;
7341
7342 /* Disconnect the edge from the old predecessor block. */
7343 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7344 continue;
7345 *pe = (*pe)->succ_next;
7346
7347 /* Reconnect the edge to the new predecessor block. */
7348 e->succ_next = new_pred->succ;
7349 new_pred->succ = e;
7350 e->src = new_pred;
7351 }
7352 \f
7353 /* Dump the list of basic blocks in the bitmap NODES. */
7354
7355 static void
7356 flow_nodes_print (str, nodes, file)
7357 const char *str;
7358 const sbitmap nodes;
7359 FILE *file;
7360 {
7361 int node;
7362
7363 if (! nodes)
7364 return;
7365
7366 fprintf (file, "%s { ", str);
7367 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7368 fputs ("}\n", file);
7369 }
7370
7371
7372 /* Dump the list of edges in the array EDGE_LIST. */
7373
7374 static void
7375 flow_edge_list_print (str, edge_list, num_edges, file)
7376 const char *str;
7377 const edge *edge_list;
7378 int num_edges;
7379 FILE *file;
7380 {
7381 int i;
7382
7383 if (! edge_list)
7384 return;
7385
7386 fprintf (file, "%s { ", str);
7387 for (i = 0; i < num_edges; i++)
7388 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7389 edge_list[i]->dest->index);
7390 fputs ("}\n", file);
7391 }
7392
7393
7394 /* Dump loop related CFG information. */
7395
7396 static void
7397 flow_loops_cfg_dump (loops, file)
7398 const struct loops *loops;
7399 FILE *file;
7400 {
7401 int i;
7402
7403 if (! loops->num || ! file || ! loops->cfg.dom)
7404 return;
7405
7406 for (i = 0; i < n_basic_blocks; i++)
7407 {
7408 edge succ;
7409
7410 fprintf (file, ";; %d succs { ", i);
7411 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7412 fprintf (file, "%d ", succ->dest->index);
7413 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7414 }
7415
7416 /* Dump the DFS node order. */
7417 if (loops->cfg.dfs_order)
7418 {
7419 fputs (";; DFS order: ", file);
7420 for (i = 0; i < n_basic_blocks; i++)
7421 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7422 fputs ("\n", file);
7423 }
7424 /* Dump the reverse completion node order. */
7425 if (loops->cfg.rc_order)
7426 {
7427 fputs (";; RC order: ", file);
7428 for (i = 0; i < n_basic_blocks; i++)
7429 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7430 fputs ("\n", file);
7431 }
7432 }
7433
7434 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7435
7436 static int
7437 flow_loop_nested_p (outer, loop)
7438 struct loop *outer;
7439 struct loop *loop;
7440 {
7441 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7442 }
7443
7444
7445 /* Dump the loop information specified by LOOP to the stream FILE
7446 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7447 void
7448 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7449 const struct loop *loop;
7450 FILE *file;
7451 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7452 int verbose;
7453 {
7454 if (! loop || ! loop->header)
7455 return;
7456
7457 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7458 loop->num, INSN_UID (loop->first->head),
7459 INSN_UID (loop->last->end),
7460 loop->shared ? " shared" : "",
7461 loop->invalid ? " invalid" : "");
7462 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7463 loop->header->index, loop->latch->index,
7464 loop->pre_header ? loop->pre_header->index : -1,
7465 loop->first->index, loop->last->index);
7466 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7467 loop->depth, loop->level,
7468 (long) (loop->outer ? loop->outer->num : -1));
7469
7470 if (loop->pre_header_edges)
7471 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7472 loop->num_pre_header_edges, file);
7473 flow_edge_list_print (";; entry edges", loop->entry_edges,
7474 loop->num_entries, file);
7475 fprintf (file, ";; %d", loop->num_nodes);
7476 flow_nodes_print (" nodes", loop->nodes, file);
7477 flow_edge_list_print (";; exit edges", loop->exit_edges,
7478 loop->num_exits, file);
7479 if (loop->exits_doms)
7480 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7481 if (loop_dump_aux)
7482 loop_dump_aux (loop, file, verbose);
7483 }
7484
7485
7486 /* Dump the loop information specified by LOOPS to the stream FILE,
7487 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7488 void
7489 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7490 const struct loops *loops;
7491 FILE *file;
7492 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7493 int verbose;
7494 {
7495 int i;
7496 int num_loops;
7497
7498 num_loops = loops->num;
7499 if (! num_loops || ! file)
7500 return;
7501
7502 fprintf (file, ";; %d loops found, %d levels\n",
7503 num_loops, loops->levels);
7504
7505 for (i = 0; i < num_loops; i++)
7506 {
7507 struct loop *loop = &loops->array[i];
7508
7509 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7510
7511 if (loop->shared)
7512 {
7513 int j;
7514
7515 for (j = 0; j < i; j++)
7516 {
7517 struct loop *oloop = &loops->array[j];
7518
7519 if (loop->header == oloop->header)
7520 {
7521 int disjoint;
7522 int smaller;
7523
7524 smaller = loop->num_nodes < oloop->num_nodes;
7525
7526 /* If the union of LOOP and OLOOP is different than
7527 the larger of LOOP and OLOOP then LOOP and OLOOP
7528 must be disjoint. */
7529 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7530 smaller ? oloop : loop);
7531 fprintf (file,
7532 ";; loop header %d shared by loops %d, %d %s\n",
7533 loop->header->index, i, j,
7534 disjoint ? "disjoint" : "nested");
7535 }
7536 }
7537 }
7538 }
7539
7540 if (verbose)
7541 flow_loops_cfg_dump (loops, file);
7542 }
7543
7544
7545 /* Free all the memory allocated for LOOPS. */
7546
7547 void
7548 flow_loops_free (loops)
7549 struct loops *loops;
7550 {
7551 if (loops->array)
7552 {
7553 int i;
7554
7555 if (! loops->num)
7556 abort ();
7557
7558 /* Free the loop descriptors. */
7559 for (i = 0; i < loops->num; i++)
7560 {
7561 struct loop *loop = &loops->array[i];
7562
7563 if (loop->pre_header_edges)
7564 free (loop->pre_header_edges);
7565 if (loop->nodes)
7566 sbitmap_free (loop->nodes);
7567 if (loop->entry_edges)
7568 free (loop->entry_edges);
7569 if (loop->exit_edges)
7570 free (loop->exit_edges);
7571 if (loop->exits_doms)
7572 sbitmap_free (loop->exits_doms);
7573 }
7574 free (loops->array);
7575 loops->array = NULL;
7576
7577 if (loops->cfg.dom)
7578 sbitmap_vector_free (loops->cfg.dom);
7579 if (loops->cfg.dfs_order)
7580 free (loops->cfg.dfs_order);
7581
7582 if (loops->shared_headers)
7583 sbitmap_free (loops->shared_headers);
7584 }
7585 }
7586
7587
7588 /* Find the entry edges into the loop with header HEADER and nodes
7589 NODES and store in ENTRY_EDGES array. Return the number of entry
7590 edges from the loop. */
7591
7592 static int
7593 flow_loop_entry_edges_find (header, nodes, entry_edges)
7594 basic_block header;
7595 const sbitmap nodes;
7596 edge **entry_edges;
7597 {
7598 edge e;
7599 int num_entries;
7600
7601 *entry_edges = NULL;
7602
7603 num_entries = 0;
7604 for (e = header->pred; e; e = e->pred_next)
7605 {
7606 basic_block src = e->src;
7607
7608 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7609 num_entries++;
7610 }
7611
7612 if (! num_entries)
7613 abort ();
7614
7615 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7616
7617 num_entries = 0;
7618 for (e = header->pred; e; e = e->pred_next)
7619 {
7620 basic_block src = e->src;
7621
7622 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7623 (*entry_edges)[num_entries++] = e;
7624 }
7625
7626 return num_entries;
7627 }
7628
7629
7630 /* Find the exit edges from the loop using the bitmap of loop nodes
7631 NODES and store in EXIT_EDGES array. Return the number of
7632 exit edges from the loop. */
7633
7634 static int
7635 flow_loop_exit_edges_find (nodes, exit_edges)
7636 const sbitmap nodes;
7637 edge **exit_edges;
7638 {
7639 edge e;
7640 int node;
7641 int num_exits;
7642
7643 *exit_edges = NULL;
7644
7645 /* Check all nodes within the loop to see if there are any
7646 successors not in the loop. Note that a node may have multiple
7647 exiting edges ????? A node can have one jumping edge and one fallthru
7648 edge so only one of these can exit the loop. */
7649 num_exits = 0;
7650 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7651 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7652 {
7653 basic_block dest = e->dest;
7654
7655 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7656 num_exits++;
7657 }
7658 });
7659
7660 if (! num_exits)
7661 return 0;
7662
7663 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7664
7665 /* Store all exiting edges into an array. */
7666 num_exits = 0;
7667 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7668 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7669 {
7670 basic_block dest = e->dest;
7671
7672 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7673 (*exit_edges)[num_exits++] = e;
7674 }
7675 });
7676
7677 return num_exits;
7678 }
7679
7680
7681 /* Find the nodes contained within the loop with header HEADER and
7682 latch LATCH and store in NODES. Return the number of nodes within
7683 the loop. */
7684
7685 static int
7686 flow_loop_nodes_find (header, latch, nodes)
7687 basic_block header;
7688 basic_block latch;
7689 sbitmap nodes;
7690 {
7691 basic_block *stack;
7692 int sp;
7693 int num_nodes = 0;
7694
7695 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7696 sp = 0;
7697
7698 /* Start with only the loop header in the set of loop nodes. */
7699 sbitmap_zero (nodes);
7700 SET_BIT (nodes, header->index);
7701 num_nodes++;
7702 header->loop_depth++;
7703
7704 /* Push the loop latch on to the stack. */
7705 if (! TEST_BIT (nodes, latch->index))
7706 {
7707 SET_BIT (nodes, latch->index);
7708 latch->loop_depth++;
7709 num_nodes++;
7710 stack[sp++] = latch;
7711 }
7712
7713 while (sp)
7714 {
7715 basic_block node;
7716 edge e;
7717
7718 node = stack[--sp];
7719 for (e = node->pred; e; e = e->pred_next)
7720 {
7721 basic_block ancestor = e->src;
7722
7723 /* If each ancestor not marked as part of loop, add to set of
7724 loop nodes and push on to stack. */
7725 if (ancestor != ENTRY_BLOCK_PTR
7726 && ! TEST_BIT (nodes, ancestor->index))
7727 {
7728 SET_BIT (nodes, ancestor->index);
7729 ancestor->loop_depth++;
7730 num_nodes++;
7731 stack[sp++] = ancestor;
7732 }
7733 }
7734 }
7735 free (stack);
7736 return num_nodes;
7737 }
7738
7739 /* Compute the depth first search order and store in the array
7740 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7741 RC_ORDER is non-zero, return the reverse completion number for each
7742 node. Returns the number of nodes visited. A depth first search
7743 tries to get as far away from the starting point as quickly as
7744 possible. */
7745
7746 static int
7747 flow_depth_first_order_compute (dfs_order, rc_order)
7748 int *dfs_order;
7749 int *rc_order;
7750 {
7751 edge *stack;
7752 int sp;
7753 int dfsnum = 0;
7754 int rcnum = n_basic_blocks - 1;
7755 sbitmap visited;
7756
7757 /* Allocate stack for back-tracking up CFG. */
7758 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7759 sp = 0;
7760
7761 /* Allocate bitmap to track nodes that have been visited. */
7762 visited = sbitmap_alloc (n_basic_blocks);
7763
7764 /* None of the nodes in the CFG have been visited yet. */
7765 sbitmap_zero (visited);
7766
7767 /* Push the first edge on to the stack. */
7768 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7769
7770 while (sp)
7771 {
7772 edge e;
7773 basic_block src;
7774 basic_block dest;
7775
7776 /* Look at the edge on the top of the stack. */
7777 e = stack[sp - 1];
7778 src = e->src;
7779 dest = e->dest;
7780
7781 /* Check if the edge destination has been visited yet. */
7782 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7783 {
7784 /* Mark that we have visited the destination. */
7785 SET_BIT (visited, dest->index);
7786
7787 if (dfs_order)
7788 dfs_order[dfsnum++] = dest->index;
7789
7790 if (dest->succ)
7791 {
7792 /* Since the DEST node has been visited for the first
7793 time, check its successors. */
7794 stack[sp++] = dest->succ;
7795 }
7796 else
7797 {
7798 /* There are no successors for the DEST node so assign
7799 its reverse completion number. */
7800 if (rc_order)
7801 rc_order[rcnum--] = dest->index;
7802 }
7803 }
7804 else
7805 {
7806 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7807 {
7808 /* There are no more successors for the SRC node
7809 so assign its reverse completion number. */
7810 if (rc_order)
7811 rc_order[rcnum--] = src->index;
7812 }
7813
7814 if (e->succ_next)
7815 stack[sp - 1] = e->succ_next;
7816 else
7817 sp--;
7818 }
7819 }
7820
7821 free (stack);
7822 sbitmap_free (visited);
7823
7824 /* The number of nodes visited should not be greater than
7825 n_basic_blocks. */
7826 if (dfsnum > n_basic_blocks)
7827 abort ();
7828
7829 /* There are some nodes left in the CFG that are unreachable. */
7830 if (dfsnum < n_basic_blocks)
7831 abort ();
7832 return dfsnum;
7833 }
7834
7835 /* Compute the depth first search order on the _reverse_ graph and
7836 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7837 Returns the number of nodes visited.
7838
7839 The computation is split into three pieces:
7840
7841 flow_dfs_compute_reverse_init () creates the necessary data
7842 structures.
7843
7844 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7845 structures. The block will start the search.
7846
7847 flow_dfs_compute_reverse_execute () continues (or starts) the
7848 search using the block on the top of the stack, stopping when the
7849 stack is empty.
7850
7851 flow_dfs_compute_reverse_finish () destroys the necessary data
7852 structures.
7853
7854 Thus, the user will probably call ..._init(), call ..._add_bb() to
7855 add a beginning basic block to the stack, call ..._execute(),
7856 possibly add another bb to the stack and again call ..._execute(),
7857 ..., and finally call _finish(). */
7858
7859 /* Initialize the data structures used for depth-first search on the
7860 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7861 added to the basic block stack. DATA is the current depth-first
7862 search context. If INITIALIZE_STACK is non-zero, there is an
7863 element on the stack. */
7864
7865 static void
7866 flow_dfs_compute_reverse_init (data)
7867 depth_first_search_ds data;
7868 {
7869 /* Allocate stack for back-tracking up CFG. */
7870 data->stack =
7871 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7872 * sizeof (basic_block));
7873 data->sp = 0;
7874
7875 /* Allocate bitmap to track nodes that have been visited. */
7876 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7877
7878 /* None of the nodes in the CFG have been visited yet. */
7879 sbitmap_zero (data->visited_blocks);
7880
7881 return;
7882 }
7883
7884 /* Add the specified basic block to the top of the dfs data
7885 structures. When the search continues, it will start at the
7886 block. */
7887
7888 static void
7889 flow_dfs_compute_reverse_add_bb (data, bb)
7890 depth_first_search_ds data;
7891 basic_block bb;
7892 {
7893 data->stack[data->sp++] = bb;
7894 return;
7895 }
7896
7897 /* Continue the depth-first search through the reverse graph starting
7898 with the block at the stack's top and ending when the stack is
7899 empty. Visited nodes are marked. Returns an unvisited basic
7900 block, or NULL if there is none available. */
7901
7902 static basic_block
7903 flow_dfs_compute_reverse_execute (data)
7904 depth_first_search_ds data;
7905 {
7906 basic_block bb;
7907 edge e;
7908 int i;
7909
7910 while (data->sp > 0)
7911 {
7912 bb = data->stack[--data->sp];
7913
7914 /* Mark that we have visited this node. */
7915 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7916 {
7917 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7918
7919 /* Perform depth-first search on adjacent vertices. */
7920 for (e = bb->pred; e; e = e->pred_next)
7921 flow_dfs_compute_reverse_add_bb (data, e->src);
7922 }
7923 }
7924
7925 /* Determine if there are unvisited basic blocks. */
7926 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7927 if (!TEST_BIT (data->visited_blocks, i))
7928 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7929 return NULL;
7930 }
7931
7932 /* Destroy the data structures needed for depth-first search on the
7933 reverse graph. */
7934
7935 static void
7936 flow_dfs_compute_reverse_finish (data)
7937 depth_first_search_ds data;
7938 {
7939 free (data->stack);
7940 sbitmap_free (data->visited_blocks);
7941 return;
7942 }
7943
7944
7945 /* Find the root node of the loop pre-header extended basic block and
7946 the edges along the trace from the root node to the loop header. */
7947
7948 static void
7949 flow_loop_pre_header_scan (loop)
7950 struct loop *loop;
7951 {
7952 int num = 0;
7953 basic_block ebb;
7954
7955 loop->num_pre_header_edges = 0;
7956
7957 if (loop->num_entries != 1)
7958 return;
7959
7960 ebb = loop->entry_edges[0]->src;
7961
7962 if (ebb != ENTRY_BLOCK_PTR)
7963 {
7964 edge e;
7965
7966 /* Count number of edges along trace from loop header to
7967 root of pre-header extended basic block. Usually this is
7968 only one or two edges. */
7969 num++;
7970 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
7971 {
7972 ebb = ebb->pred->src;
7973 num++;
7974 }
7975
7976 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
7977 loop->num_pre_header_edges = num;
7978
7979 /* Store edges in order that they are followed. The source
7980 of the first edge is the root node of the pre-header extended
7981 basic block and the destination of the last last edge is
7982 the loop header. */
7983 for (e = loop->entry_edges[0]; num; e = e->src->pred)
7984 {
7985 loop->pre_header_edges[--num] = e;
7986 }
7987 }
7988 }
7989
7990
7991 /* Return the block for the pre-header of the loop with header
7992 HEADER where DOM specifies the dominator information. Return NULL if
7993 there is no pre-header. */
7994
7995 static basic_block
7996 flow_loop_pre_header_find (header, dom)
7997 basic_block header;
7998 const sbitmap *dom;
7999 {
8000 basic_block pre_header;
8001 edge e;
8002
8003 /* If block p is a predecessor of the header and is the only block
8004 that the header does not dominate, then it is the pre-header. */
8005 pre_header = NULL;
8006 for (e = header->pred; e; e = e->pred_next)
8007 {
8008 basic_block node = e->src;
8009
8010 if (node != ENTRY_BLOCK_PTR
8011 && ! TEST_BIT (dom[node->index], header->index))
8012 {
8013 if (pre_header == NULL)
8014 pre_header = node;
8015 else
8016 {
8017 /* There are multiple edges into the header from outside
8018 the loop so there is no pre-header block. */
8019 pre_header = NULL;
8020 break;
8021 }
8022 }
8023 }
8024 return pre_header;
8025 }
8026
8027 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8028 previously added. The insertion algorithm assumes that the loops
8029 are added in the order found by a depth first search of the CFG. */
8030
8031 static void
8032 flow_loop_tree_node_add (prevloop, loop)
8033 struct loop *prevloop;
8034 struct loop *loop;
8035 {
8036
8037 if (flow_loop_nested_p (prevloop, loop))
8038 {
8039 prevloop->inner = loop;
8040 loop->outer = prevloop;
8041 return;
8042 }
8043
8044 while (prevloop->outer)
8045 {
8046 if (flow_loop_nested_p (prevloop->outer, loop))
8047 {
8048 prevloop->next = loop;
8049 loop->outer = prevloop->outer;
8050 return;
8051 }
8052 prevloop = prevloop->outer;
8053 }
8054
8055 prevloop->next = loop;
8056 loop->outer = NULL;
8057 }
8058
8059 /* Build the loop hierarchy tree for LOOPS. */
8060
8061 static void
8062 flow_loops_tree_build (loops)
8063 struct loops *loops;
8064 {
8065 int i;
8066 int num_loops;
8067
8068 num_loops = loops->num;
8069 if (! num_loops)
8070 return;
8071
8072 /* Root the loop hierarchy tree with the first loop found.
8073 Since we used a depth first search this should be the
8074 outermost loop. */
8075 loops->tree = &loops->array[0];
8076 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8077
8078 /* Add the remaining loops to the tree. */
8079 for (i = 1; i < num_loops; i++)
8080 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8081 }
8082
8083 /* Helper function to compute loop nesting depth and enclosed loop level
8084 for the natural loop specified by LOOP at the loop depth DEPTH.
8085 Returns the loop level. */
8086
8087 static int
8088 flow_loop_level_compute (loop, depth)
8089 struct loop *loop;
8090 int depth;
8091 {
8092 struct loop *inner;
8093 int level = 1;
8094
8095 if (! loop)
8096 return 0;
8097
8098 /* Traverse loop tree assigning depth and computing level as the
8099 maximum level of all the inner loops of this loop. The loop
8100 level is equivalent to the height of the loop in the loop tree
8101 and corresponds to the number of enclosed loop levels (including
8102 itself). */
8103 for (inner = loop->inner; inner; inner = inner->next)
8104 {
8105 int ilevel;
8106
8107 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8108
8109 if (ilevel > level)
8110 level = ilevel;
8111 }
8112 loop->level = level;
8113 loop->depth = depth;
8114 return level;
8115 }
8116
8117 /* Compute the loop nesting depth and enclosed loop level for the loop
8118 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8119 level. */
8120
8121 static int
8122 flow_loops_level_compute (loops)
8123 struct loops *loops;
8124 {
8125 struct loop *loop;
8126 int level;
8127 int levels = 0;
8128
8129 /* Traverse all the outer level loops. */
8130 for (loop = loops->tree; loop; loop = loop->next)
8131 {
8132 level = flow_loop_level_compute (loop, 1);
8133 if (level > levels)
8134 levels = level;
8135 }
8136 return levels;
8137 }
8138
8139
8140 /* Find all the natural loops in the function and save in LOOPS structure
8141 and recalculate loop_depth information in basic block structures.
8142 FLAGS controls which loop information is collected.
8143 Return the number of natural loops found. */
8144
8145 int
8146 flow_loops_find (loops, flags)
8147 struct loops *loops;
8148 int flags;
8149 {
8150 int i;
8151 int b;
8152 int num_loops;
8153 edge e;
8154 sbitmap headers;
8155 sbitmap *dom;
8156 int *dfs_order;
8157 int *rc_order;
8158
8159 /* This function cannot be repeatedly called with different
8160 flags to build up the loop information. The loop tree
8161 must always be built if this function is called. */
8162 if (! (flags & LOOP_TREE))
8163 abort ();
8164
8165 memset (loops, 0, sizeof (*loops));
8166
8167 /* Taking care of this degenerate case makes the rest of
8168 this code simpler. */
8169 if (n_basic_blocks == 0)
8170 return 0;
8171
8172 dfs_order = NULL;
8173 rc_order = NULL;
8174
8175 /* Compute the dominators. */
8176 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8177 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8178
8179 /* Count the number of loop edges (back edges). This should be the
8180 same as the number of natural loops. */
8181
8182 num_loops = 0;
8183 for (b = 0; b < n_basic_blocks; b++)
8184 {
8185 basic_block header;
8186
8187 header = BASIC_BLOCK (b);
8188 header->loop_depth = 0;
8189
8190 for (e = header->pred; e; e = e->pred_next)
8191 {
8192 basic_block latch = e->src;
8193
8194 /* Look for back edges where a predecessor is dominated
8195 by this block. A natural loop has a single entry
8196 node (header) that dominates all the nodes in the
8197 loop. It also has single back edge to the header
8198 from a latch node. Note that multiple natural loops
8199 may share the same header. */
8200 if (b != header->index)
8201 abort ();
8202
8203 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8204 num_loops++;
8205 }
8206 }
8207
8208 if (num_loops)
8209 {
8210 /* Compute depth first search order of the CFG so that outer
8211 natural loops will be found before inner natural loops. */
8212 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8213 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8214 flow_depth_first_order_compute (dfs_order, rc_order);
8215
8216 /* Allocate loop structures. */
8217 loops->array
8218 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8219
8220 headers = sbitmap_alloc (n_basic_blocks);
8221 sbitmap_zero (headers);
8222
8223 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8224 sbitmap_zero (loops->shared_headers);
8225
8226 /* Find and record information about all the natural loops
8227 in the CFG. */
8228 num_loops = 0;
8229 for (b = 0; b < n_basic_blocks; b++)
8230 {
8231 basic_block header;
8232
8233 /* Search the nodes of the CFG in reverse completion order
8234 so that we can find outer loops first. */
8235 header = BASIC_BLOCK (rc_order[b]);
8236
8237 /* Look for all the possible latch blocks for this header. */
8238 for (e = header->pred; e; e = e->pred_next)
8239 {
8240 basic_block latch = e->src;
8241
8242 /* Look for back edges where a predecessor is dominated
8243 by this block. A natural loop has a single entry
8244 node (header) that dominates all the nodes in the
8245 loop. It also has single back edge to the header
8246 from a latch node. Note that multiple natural loops
8247 may share the same header. */
8248 if (latch != ENTRY_BLOCK_PTR
8249 && TEST_BIT (dom[latch->index], header->index))
8250 {
8251 struct loop *loop;
8252
8253 loop = loops->array + num_loops;
8254
8255 loop->header = header;
8256 loop->latch = latch;
8257 loop->num = num_loops;
8258
8259 num_loops++;
8260 }
8261 }
8262 }
8263
8264 for (i = 0; i < num_loops; i++)
8265 {
8266 struct loop *loop = &loops->array[i];
8267 int j;
8268
8269 /* Keep track of blocks that are loop headers so
8270 that we can tell which loops should be merged. */
8271 if (TEST_BIT (headers, loop->header->index))
8272 SET_BIT (loops->shared_headers, loop->header->index);
8273 SET_BIT (headers, loop->header->index);
8274
8275 /* Find nodes contained within the loop. */
8276 loop->nodes = sbitmap_alloc (n_basic_blocks);
8277 loop->num_nodes
8278 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8279
8280 /* Compute first and last blocks within the loop.
8281 These are often the same as the loop header and
8282 loop latch respectively, but this is not always
8283 the case. */
8284 loop->first
8285 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8286 loop->last
8287 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8288
8289 if (flags & LOOP_EDGES)
8290 {
8291 /* Find edges which enter the loop header.
8292 Note that the entry edges should only
8293 enter the header of a natural loop. */
8294 loop->num_entries
8295 = flow_loop_entry_edges_find (loop->header,
8296 loop->nodes,
8297 &loop->entry_edges);
8298
8299 /* Find edges which exit the loop. */
8300 loop->num_exits
8301 = flow_loop_exit_edges_find (loop->nodes,
8302 &loop->exit_edges);
8303
8304 /* Determine which loop nodes dominate all the exits
8305 of the loop. */
8306 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8307 sbitmap_copy (loop->exits_doms, loop->nodes);
8308 for (j = 0; j < loop->num_exits; j++)
8309 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8310 dom[loop->exit_edges[j]->src->index]);
8311
8312 /* The header of a natural loop must dominate
8313 all exits. */
8314 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8315 abort ();
8316 }
8317
8318 if (flags & LOOP_PRE_HEADER)
8319 {
8320 /* Look to see if the loop has a pre-header node. */
8321 loop->pre_header
8322 = flow_loop_pre_header_find (loop->header, dom);
8323
8324 flow_loop_pre_header_scan (loop);
8325 }
8326 }
8327
8328 /* Natural loops with shared headers may either be disjoint or
8329 nested. Disjoint loops with shared headers cannot be inner
8330 loops and should be merged. For now just mark loops that share
8331 headers. */
8332 for (i = 0; i < num_loops; i++)
8333 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8334 loops->array[i].shared = 1;
8335
8336 sbitmap_free (headers);
8337 }
8338
8339 loops->num = num_loops;
8340
8341 /* Save CFG derived information to avoid recomputing it. */
8342 loops->cfg.dom = dom;
8343 loops->cfg.dfs_order = dfs_order;
8344 loops->cfg.rc_order = rc_order;
8345
8346 /* Build the loop hierarchy tree. */
8347 flow_loops_tree_build (loops);
8348
8349 /* Assign the loop nesting depth and enclosed loop level for each
8350 loop. */
8351 loops->levels = flow_loops_level_compute (loops);
8352
8353 return num_loops;
8354 }
8355
8356
8357 /* Update the information regarding the loops in the CFG
8358 specified by LOOPS. */
8359 int
8360 flow_loops_update (loops, flags)
8361 struct loops *loops;
8362 int flags;
8363 {
8364 /* One day we may want to update the current loop data. For now
8365 throw away the old stuff and rebuild what we need. */
8366 if (loops->array)
8367 flow_loops_free (loops);
8368
8369 return flow_loops_find (loops, flags);
8370 }
8371
8372
8373 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8374
8375 int
8376 flow_loop_outside_edge_p (loop, e)
8377 const struct loop *loop;
8378 edge e;
8379 {
8380 if (e->dest != loop->header)
8381 abort ();
8382 return (e->src == ENTRY_BLOCK_PTR)
8383 || ! TEST_BIT (loop->nodes, e->src->index);
8384 }
8385
8386 /* Clear LOG_LINKS fields of insns in a chain.
8387 Also clear the global_live_at_{start,end} fields of the basic block
8388 structures. */
8389
8390 void
8391 clear_log_links (insns)
8392 rtx insns;
8393 {
8394 rtx i;
8395 int b;
8396
8397 for (i = insns; i; i = NEXT_INSN (i))
8398 if (INSN_P (i))
8399 LOG_LINKS (i) = 0;
8400
8401 for (b = 0; b < n_basic_blocks; b++)
8402 {
8403 basic_block bb = BASIC_BLOCK (b);
8404
8405 bb->global_live_at_start = NULL;
8406 bb->global_live_at_end = NULL;
8407 }
8408
8409 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8410 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8411 }
8412
8413 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8414 correspond to the hard registers, if any, set in that map. This
8415 could be done far more efficiently by having all sorts of special-cases
8416 with moving single words, but probably isn't worth the trouble. */
8417
8418 void
8419 reg_set_to_hard_reg_set (to, from)
8420 HARD_REG_SET *to;
8421 bitmap from;
8422 {
8423 int i;
8424
8425 EXECUTE_IF_SET_IN_BITMAP
8426 (from, 0, i,
8427 {
8428 if (i >= FIRST_PSEUDO_REGISTER)
8429 return;
8430 SET_HARD_REG_BIT (*to, i);
8431 });
8432 }
8433
8434 /* Called once at intialization time. */
8435
8436 void
8437 init_flow ()
8438 {
8439 static int initialized;
8440
8441 if (!initialized)
8442 {
8443 gcc_obstack_init (&flow_obstack);
8444 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8445 initialized = 1;
8446 }
8447 else
8448 {
8449 obstack_free (&flow_obstack, flow_firstobj);
8450 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8451 }
8452 }