ad3403974a653308edfffe7eac9f1f8506d05c09
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137 #include "expr.h"
138 #include "ssa.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 /* The contents of the current function definition are allocated
172 in this obstack, and all are freed at the end of the function.
173 For top-level functions, this is temporary_obstack.
174 Separate obstacks are made for nested functions. */
175
176 extern struct obstack *function_obstack;
177
178 /* Number of basic blocks in the current function. */
179
180 int n_basic_blocks;
181
182 /* Number of edges in the current function. */
183
184 int n_edges;
185
186 /* The basic block array. */
187
188 varray_type basic_block_info;
189
190 /* The special entry and exit blocks. */
191
192 struct basic_block_def entry_exit_blocks[2]
193 = {{NULL, /* head */
194 NULL, /* end */
195 NULL, /* pred */
196 NULL, /* succ */
197 NULL, /* local_set */
198 NULL, /* global_live_at_start */
199 NULL, /* global_live_at_end */
200 NULL, /* aux */
201 ENTRY_BLOCK, /* index */
202 0, /* loop_depth */
203 -1, -1, /* eh_beg, eh_end */
204 0 /* count */
205 },
206 {
207 NULL, /* head */
208 NULL, /* end */
209 NULL, /* pred */
210 NULL, /* succ */
211 NULL, /* local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
214 NULL, /* aux */
215 EXIT_BLOCK, /* index */
216 0, /* loop_depth */
217 -1, -1, /* eh_beg, eh_end */
218 0 /* count */
219 }
220 };
221
222 /* Nonzero if the second flow pass has completed. */
223 int flow2_completed;
224
225 /* Maximum register number used in this function, plus one. */
226
227 int max_regno;
228
229 /* Indexed by n, giving various register information */
230
231 varray_type reg_n_info;
232
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
235
236 int regset_bytes;
237 int regset_size;
238
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241
242 regset regs_live_at_setjmp;
243
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
248 rtx regs_may_share;
249
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
252
253 static HARD_REG_SET elim_reg_set;
254
255 /* The basic block structure for every insn, indexed by uid. */
256
257 varray_type basic_block_for_insn;
258
259 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
260 /* ??? Should probably be using LABEL_NUSES instead. It would take a
261 bit of surgery to be able to use or co-opt the routines in jump. */
262
263 static rtx label_value_list;
264 static rtx tail_recursion_label_list;
265
266 /* Holds information for tracking conditional register life information. */
267 struct reg_cond_life_info
268 {
269 /* An EXPR_LIST of conditions under which a register is dead. */
270 rtx condition;
271
272 /* ??? Could store mask of bytes that are dead, so that we could finally
273 track lifetimes of multi-word registers accessed via subregs. */
274 };
275
276 /* For use in communicating between propagate_block and its subroutines.
277 Holds all information needed to compute life and def-use information. */
278
279 struct propagate_block_info
280 {
281 /* The basic block we're considering. */
282 basic_block bb;
283
284 /* Bit N is set if register N is conditionally or unconditionally live. */
285 regset reg_live;
286
287 /* Bit N is set if register N is set this insn. */
288 regset new_set;
289
290 /* Element N is the next insn that uses (hard or pseudo) register N
291 within the current basic block; or zero, if there is no such insn. */
292 rtx *reg_next_use;
293
294 /* Contains a list of all the MEMs we are tracking for dead store
295 elimination. */
296 rtx mem_set_list;
297
298 /* If non-null, record the set of registers set in the basic block. */
299 regset local_set;
300
301 #ifdef HAVE_conditional_execution
302 /* Indexed by register number, holds a reg_cond_life_info for each
303 register that is not unconditionally live or dead. */
304 splay_tree reg_cond_dead;
305
306 /* Bit N is set if register N is in an expression in reg_cond_dead. */
307 regset reg_cond_reg;
308 #endif
309
310 /* Non-zero if the value of CC0 is live. */
311 int cc0_live;
312
313 /* Flags controling the set of information propagate_block collects. */
314 int flags;
315 };
316
317 /* Store the data structures necessary for depth-first search. */
318 struct depth_first_search_dsS {
319 /* stack for backtracking during the algorithm */
320 basic_block *stack;
321
322 /* number of edges in the stack. That is, positions 0, ..., sp-1
323 have edges. */
324 unsigned int sp;
325
326 /* record of basic blocks already seen by depth-first search */
327 sbitmap visited_blocks;
328 };
329 typedef struct depth_first_search_dsS *depth_first_search_ds;
330
331 /* Forward declarations */
332 static int count_basic_blocks PARAMS ((rtx));
333 static void find_basic_blocks_1 PARAMS ((rtx));
334 static rtx find_label_refs PARAMS ((rtx, rtx));
335 static void clear_edges PARAMS ((void));
336 static void make_edges PARAMS ((rtx));
337 static void make_label_edge PARAMS ((sbitmap *, basic_block,
338 rtx, int));
339 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
340 basic_block, rtx, int));
341 static void mark_critical_edges PARAMS ((void));
342 static void move_stray_eh_region_notes PARAMS ((void));
343 static void record_active_eh_regions PARAMS ((rtx));
344
345 static void commit_one_edge_insertion PARAMS ((edge));
346
347 static void delete_unreachable_blocks PARAMS ((void));
348 static void delete_eh_regions PARAMS ((void));
349 static int can_delete_note_p PARAMS ((rtx));
350 static void expunge_block PARAMS ((basic_block));
351 static int can_delete_label_p PARAMS ((rtx));
352 static int tail_recursion_label_p PARAMS ((rtx));
353 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
354 basic_block));
355 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
356 basic_block));
357 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
358 static void try_merge_blocks PARAMS ((void));
359 static void tidy_fallthru_edges PARAMS ((void));
360 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
361 static void verify_wide_reg PARAMS ((int, rtx, rtx));
362 static void verify_local_live_at_start PARAMS ((regset, basic_block));
363 static int set_noop_p PARAMS ((rtx));
364 static int noop_move_p PARAMS ((rtx));
365 static void delete_noop_moves PARAMS ((rtx));
366 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
367 static void notice_stack_pointer_modification PARAMS ((rtx));
368 static void mark_reg PARAMS ((rtx, void *));
369 static void mark_regs_live_at_end PARAMS ((regset));
370 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
371 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
372 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
373 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
374 static int insn_dead_p PARAMS ((struct propagate_block_info *,
375 rtx, int, rtx));
376 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
377 rtx, rtx));
378 static void mark_set_regs PARAMS ((struct propagate_block_info *,
379 rtx, rtx));
380 static void mark_set_1 PARAMS ((struct propagate_block_info *,
381 enum rtx_code, rtx, rtx,
382 rtx, int));
383 #ifdef HAVE_conditional_execution
384 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
385 int, rtx));
386 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
387 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
388 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
389 int));
390 static rtx ior_reg_cond PARAMS ((rtx, rtx));
391 static rtx not_reg_cond PARAMS ((rtx));
392 static rtx nand_reg_cond PARAMS ((rtx, rtx));
393 #endif
394 #ifdef AUTO_INC_DEC
395 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
396 rtx, rtx, rtx, rtx, rtx));
397 static void find_auto_inc PARAMS ((struct propagate_block_info *,
398 rtx, rtx));
399 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
400 rtx));
401 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
402 #endif
403 static void mark_used_reg PARAMS ((struct propagate_block_info *,
404 rtx, rtx, rtx));
405 static void mark_used_regs PARAMS ((struct propagate_block_info *,
406 rtx, rtx, rtx));
407 void dump_flow_info PARAMS ((FILE *));
408 void debug_flow_info PARAMS ((void));
409 static void dump_edge_info PARAMS ((FILE *, edge, int));
410
411 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
412 rtx));
413 static void remove_fake_successors PARAMS ((basic_block));
414 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
415 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
416 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
417 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
418 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
419 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
420 static int flow_depth_first_order_compute PARAMS ((int *, int *));
421 static void flow_dfs_compute_reverse_init
422 PARAMS ((depth_first_search_ds));
423 static void flow_dfs_compute_reverse_add_bb
424 PARAMS ((depth_first_search_ds, basic_block));
425 static basic_block flow_dfs_compute_reverse_execute
426 PARAMS ((depth_first_search_ds));
427 static void flow_dfs_compute_reverse_finish
428 PARAMS ((depth_first_search_ds));
429 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
430 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
431 static void flow_loops_tree_build PARAMS ((struct loops *));
432 static int flow_loop_level_compute PARAMS ((struct loop *, int));
433 static int flow_loops_level_compute PARAMS ((struct loops *));
434 \f
435 /* Find basic blocks of the current function.
436 F is the first insn of the function and NREGS the number of register
437 numbers in use. */
438
439 void
440 find_basic_blocks (f, nregs, file)
441 rtx f;
442 int nregs ATTRIBUTE_UNUSED;
443 FILE *file ATTRIBUTE_UNUSED;
444 {
445 int max_uid;
446
447 /* Flush out existing data. */
448 if (basic_block_info != NULL)
449 {
450 int i;
451
452 clear_edges ();
453
454 /* Clear bb->aux on all extant basic blocks. We'll use this as a
455 tag for reuse during create_basic_block, just in case some pass
456 copies around basic block notes improperly. */
457 for (i = 0; i < n_basic_blocks; ++i)
458 BASIC_BLOCK (i)->aux = NULL;
459
460 VARRAY_FREE (basic_block_info);
461 }
462
463 n_basic_blocks = count_basic_blocks (f);
464
465 /* Size the basic block table. The actual structures will be allocated
466 by find_basic_blocks_1, since we want to keep the structure pointers
467 stable across calls to find_basic_blocks. */
468 /* ??? This whole issue would be much simpler if we called find_basic_blocks
469 exactly once, and thereafter we don't have a single long chain of
470 instructions at all until close to the end of compilation when we
471 actually lay them out. */
472
473 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
474
475 find_basic_blocks_1 (f);
476
477 /* Record the block to which an insn belongs. */
478 /* ??? This should be done another way, by which (perhaps) a label is
479 tagged directly with the basic block that it starts. It is used for
480 more than that currently, but IMO that is the only valid use. */
481
482 max_uid = get_max_uid ();
483 #ifdef AUTO_INC_DEC
484 /* Leave space for insns life_analysis makes in some cases for auto-inc.
485 These cases are rare, so we don't need too much space. */
486 max_uid += max_uid / 10;
487 #endif
488
489 compute_bb_for_insn (max_uid);
490
491 /* Discover the edges of our cfg. */
492 record_active_eh_regions (f);
493 make_edges (label_value_list);
494
495 /* Do very simple cleanup now, for the benefit of code that runs between
496 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
497 tidy_fallthru_edges ();
498
499 mark_critical_edges ();
500
501 #ifdef ENABLE_CHECKING
502 verify_flow_info ();
503 #endif
504 }
505
506 /* Count the basic blocks of the function. */
507
508 static int
509 count_basic_blocks (f)
510 rtx f;
511 {
512 register rtx insn;
513 register RTX_CODE prev_code;
514 register int count = 0;
515 int eh_region = 0;
516 int call_had_abnormal_edge = 0;
517
518 prev_code = JUMP_INSN;
519 for (insn = f; insn; insn = NEXT_INSN (insn))
520 {
521 register RTX_CODE code = GET_CODE (insn);
522
523 if (code == CODE_LABEL
524 || (GET_RTX_CLASS (code) == 'i'
525 && (prev_code == JUMP_INSN
526 || prev_code == BARRIER
527 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
528 count++;
529
530 /* Record whether this call created an edge. */
531 if (code == CALL_INSN)
532 {
533 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
534 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
535
536 call_had_abnormal_edge = 0;
537
538 /* If there is an EH region or rethrow, we have an edge. */
539 if ((eh_region && region > 0)
540 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
541 call_had_abnormal_edge = 1;
542 else if (nonlocal_goto_handler_labels && region >= 0)
543 /* If there is a nonlocal goto label and the specified
544 region number isn't -1, we have an edge. (0 means
545 no throw, but might have a nonlocal goto). */
546 call_had_abnormal_edge = 1;
547 }
548
549 if (code != NOTE)
550 prev_code = code;
551 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
552 ++eh_region;
553 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
554 --eh_region;
555 }
556
557 /* The rest of the compiler works a bit smoother when we don't have to
558 check for the edge case of do-nothing functions with no basic blocks. */
559 if (count == 0)
560 {
561 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
562 count = 1;
563 }
564
565 return count;
566 }
567
568 /* Scan a list of insns for labels referred to other than by jumps.
569 This is used to scan the alternatives of a call placeholder. */
570 static rtx
571 find_label_refs (f, lvl)
572 rtx f;
573 rtx lvl;
574 {
575 rtx insn;
576
577 for (insn = f; insn; insn = NEXT_INSN (insn))
578 if (INSN_P (insn))
579 {
580 rtx note;
581
582 /* Make a list of all labels referred to other than by jumps
583 (which just don't have the REG_LABEL notes).
584
585 Make a special exception for labels followed by an ADDR*VEC,
586 as this would be a part of the tablejump setup code.
587
588 Make a special exception for the eh_return_stub_label, which
589 we know isn't part of any otherwise visible control flow. */
590
591 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
592 if (REG_NOTE_KIND (note) == REG_LABEL)
593 {
594 rtx lab = XEXP (note, 0), next;
595
596 if (lab == eh_return_stub_label)
597 ;
598 else if ((next = next_nonnote_insn (lab)) != NULL
599 && GET_CODE (next) == JUMP_INSN
600 && (GET_CODE (PATTERN (next)) == ADDR_VEC
601 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
602 ;
603 else if (GET_CODE (lab) == NOTE)
604 ;
605 else
606 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
607 }
608 }
609
610 return lvl;
611 }
612
613 /* Find all basic blocks of the function whose first insn is F.
614
615 Collect and return a list of labels whose addresses are taken. This
616 will be used in make_edges for use with computed gotos. */
617
618 static void
619 find_basic_blocks_1 (f)
620 rtx f;
621 {
622 register rtx insn, next;
623 int i = 0;
624 rtx bb_note = NULL_RTX;
625 rtx eh_list = NULL_RTX;
626 rtx lvl = NULL_RTX;
627 rtx trll = NULL_RTX;
628 rtx head = NULL_RTX;
629 rtx end = NULL_RTX;
630
631 /* We process the instructions in a slightly different way than we did
632 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
633 closed out the previous block, so that it gets attached at the proper
634 place. Since this form should be equivalent to the previous,
635 count_basic_blocks continues to use the old form as a check. */
636
637 for (insn = f; insn; insn = next)
638 {
639 enum rtx_code code = GET_CODE (insn);
640
641 next = NEXT_INSN (insn);
642
643 switch (code)
644 {
645 case NOTE:
646 {
647 int kind = NOTE_LINE_NUMBER (insn);
648
649 /* Keep a LIFO list of the currently active exception notes. */
650 if (kind == NOTE_INSN_EH_REGION_BEG)
651 eh_list = alloc_INSN_LIST (insn, eh_list);
652 else if (kind == NOTE_INSN_EH_REGION_END)
653 {
654 rtx t = eh_list;
655
656 eh_list = XEXP (eh_list, 1);
657 free_INSN_LIST_node (t);
658 }
659
660 /* Look for basic block notes with which to keep the
661 basic_block_info pointers stable. Unthread the note now;
662 we'll put it back at the right place in create_basic_block.
663 Or not at all if we've already found a note in this block. */
664 else if (kind == NOTE_INSN_BASIC_BLOCK)
665 {
666 if (bb_note == NULL_RTX)
667 bb_note = insn;
668 else
669 next = flow_delete_insn (insn);
670 }
671 break;
672 }
673
674 case CODE_LABEL:
675 /* A basic block starts at a label. If we've closed one off due
676 to a barrier or some such, no need to do it again. */
677 if (head != NULL_RTX)
678 {
679 /* While we now have edge lists with which other portions of
680 the compiler might determine a call ending a basic block
681 does not imply an abnormal edge, it will be a bit before
682 everything can be updated. So continue to emit a noop at
683 the end of such a block. */
684 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
685 {
686 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
687 end = emit_insn_after (nop, end);
688 }
689
690 create_basic_block (i++, head, end, bb_note);
691 bb_note = NULL_RTX;
692 }
693
694 head = end = insn;
695 break;
696
697 case JUMP_INSN:
698 /* A basic block ends at a jump. */
699 if (head == NULL_RTX)
700 head = insn;
701 else
702 {
703 /* ??? Make a special check for table jumps. The way this
704 happens is truly and amazingly gross. We are about to
705 create a basic block that contains just a code label and
706 an addr*vec jump insn. Worse, an addr_diff_vec creates
707 its own natural loop.
708
709 Prevent this bit of brain damage, pasting things together
710 correctly in make_edges.
711
712 The correct solution involves emitting the table directly
713 on the tablejump instruction as a note, or JUMP_LABEL. */
714
715 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
716 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
717 {
718 head = end = NULL;
719 n_basic_blocks--;
720 break;
721 }
722 }
723 end = insn;
724 goto new_bb_inclusive;
725
726 case BARRIER:
727 /* A basic block ends at a barrier. It may be that an unconditional
728 jump already closed the basic block -- no need to do it again. */
729 if (head == NULL_RTX)
730 break;
731
732 /* While we now have edge lists with which other portions of the
733 compiler might determine a call ending a basic block does not
734 imply an abnormal edge, it will be a bit before everything can
735 be updated. So continue to emit a noop at the end of such a
736 block. */
737 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
738 {
739 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
740 end = emit_insn_after (nop, end);
741 }
742 goto new_bb_exclusive;
743
744 case CALL_INSN:
745 {
746 /* Record whether this call created an edge. */
747 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
748 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
749 int call_has_abnormal_edge = 0;
750
751 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
752 {
753 /* Scan each of the alternatives for label refs. */
754 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
755 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
756 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
757 /* Record its tail recursion label, if any. */
758 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
759 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
760 }
761
762 /* If there is an EH region or rethrow, we have an edge. */
763 if ((eh_list && region > 0)
764 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
765 call_has_abnormal_edge = 1;
766 else if (nonlocal_goto_handler_labels && region >= 0)
767 /* If there is a nonlocal goto label and the specified
768 region number isn't -1, we have an edge. (0 means
769 no throw, but might have a nonlocal goto). */
770 call_has_abnormal_edge = 1;
771
772 /* A basic block ends at a call that can either throw or
773 do a non-local goto. */
774 if (call_has_abnormal_edge)
775 {
776 new_bb_inclusive:
777 if (head == NULL_RTX)
778 head = insn;
779 end = insn;
780
781 new_bb_exclusive:
782 create_basic_block (i++, head, end, bb_note);
783 head = end = NULL_RTX;
784 bb_note = NULL_RTX;
785 break;
786 }
787 }
788 /* Fall through. */
789
790 default:
791 if (GET_RTX_CLASS (code) == 'i')
792 {
793 if (head == NULL_RTX)
794 head = insn;
795 end = insn;
796 }
797 break;
798 }
799
800 if (GET_RTX_CLASS (code) == 'i')
801 {
802 rtx note;
803
804 /* Make a list of all labels referred to other than by jumps
805 (which just don't have the REG_LABEL notes).
806
807 Make a special exception for labels followed by an ADDR*VEC,
808 as this would be a part of the tablejump setup code.
809
810 Make a special exception for the eh_return_stub_label, which
811 we know isn't part of any otherwise visible control flow. */
812
813 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
814 if (REG_NOTE_KIND (note) == REG_LABEL)
815 {
816 rtx lab = XEXP (note, 0), next;
817
818 if (lab == eh_return_stub_label)
819 ;
820 else if ((next = next_nonnote_insn (lab)) != NULL
821 && GET_CODE (next) == JUMP_INSN
822 && (GET_CODE (PATTERN (next)) == ADDR_VEC
823 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
824 ;
825 else if (GET_CODE (lab) == NOTE)
826 ;
827 else
828 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
829 }
830 }
831 }
832
833 if (head != NULL_RTX)
834 create_basic_block (i++, head, end, bb_note);
835 else if (bb_note)
836 flow_delete_insn (bb_note);
837
838 if (i != n_basic_blocks)
839 abort ();
840
841 label_value_list = lvl;
842 tail_recursion_label_list = trll;
843 }
844
845 /* Tidy the CFG by deleting unreachable code and whatnot. */
846
847 void
848 cleanup_cfg (f)
849 rtx f;
850 {
851 delete_unreachable_blocks ();
852 move_stray_eh_region_notes ();
853 record_active_eh_regions (f);
854 try_merge_blocks ();
855 mark_critical_edges ();
856
857 /* Kill the data we won't maintain. */
858 free_EXPR_LIST_list (&label_value_list);
859 free_EXPR_LIST_list (&tail_recursion_label_list);
860 }
861
862 /* Create a new basic block consisting of the instructions between
863 HEAD and END inclusive. Reuses the note and basic block struct
864 in BB_NOTE, if any. */
865
866 void
867 create_basic_block (index, head, end, bb_note)
868 int index;
869 rtx head, end, bb_note;
870 {
871 basic_block bb;
872
873 if (bb_note
874 && ! RTX_INTEGRATED_P (bb_note)
875 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
876 && bb->aux == NULL)
877 {
878 /* If we found an existing note, thread it back onto the chain. */
879
880 rtx after;
881
882 if (GET_CODE (head) == CODE_LABEL)
883 after = head;
884 else
885 {
886 after = PREV_INSN (head);
887 head = bb_note;
888 }
889
890 if (after != bb_note && NEXT_INSN (after) != bb_note)
891 reorder_insns (bb_note, bb_note, after);
892 }
893 else
894 {
895 /* Otherwise we must create a note and a basic block structure.
896 Since we allow basic block structs in rtl, give the struct
897 the same lifetime by allocating it off the function obstack
898 rather than using malloc. */
899
900 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
901 memset (bb, 0, sizeof (*bb));
902
903 if (GET_CODE (head) == CODE_LABEL)
904 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
905 else
906 {
907 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
908 head = bb_note;
909 }
910 NOTE_BASIC_BLOCK (bb_note) = bb;
911 }
912
913 /* Always include the bb note in the block. */
914 if (NEXT_INSN (end) == bb_note)
915 end = bb_note;
916
917 bb->head = head;
918 bb->end = end;
919 bb->index = index;
920 BASIC_BLOCK (index) = bb;
921
922 /* Tag the block so that we know it has been used when considering
923 other basic block notes. */
924 bb->aux = bb;
925 }
926 \f
927 /* Records the basic block struct in BB_FOR_INSN, for every instruction
928 indexed by INSN_UID. MAX is the size of the array. */
929
930 void
931 compute_bb_for_insn (max)
932 int max;
933 {
934 int i;
935
936 if (basic_block_for_insn)
937 VARRAY_FREE (basic_block_for_insn);
938 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
939
940 for (i = 0; i < n_basic_blocks; ++i)
941 {
942 basic_block bb = BASIC_BLOCK (i);
943 rtx insn, end;
944
945 end = bb->end;
946 insn = bb->head;
947 while (1)
948 {
949 int uid = INSN_UID (insn);
950 if (uid < max)
951 VARRAY_BB (basic_block_for_insn, uid) = bb;
952 if (insn == end)
953 break;
954 insn = NEXT_INSN (insn);
955 }
956 }
957 }
958
959 /* Free the memory associated with the edge structures. */
960
961 static void
962 clear_edges ()
963 {
964 int i;
965 edge n, e;
966
967 for (i = 0; i < n_basic_blocks; ++i)
968 {
969 basic_block bb = BASIC_BLOCK (i);
970
971 for (e = bb->succ; e; e = n)
972 {
973 n = e->succ_next;
974 free (e);
975 }
976
977 bb->succ = 0;
978 bb->pred = 0;
979 }
980
981 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
982 {
983 n = e->succ_next;
984 free (e);
985 }
986
987 ENTRY_BLOCK_PTR->succ = 0;
988 EXIT_BLOCK_PTR->pred = 0;
989
990 n_edges = 0;
991 }
992
993 /* Identify the edges between basic blocks.
994
995 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
996 that are otherwise unreachable may be reachable with a non-local goto.
997
998 BB_EH_END is an array indexed by basic block number in which we record
999 the list of exception regions active at the end of the basic block. */
1000
1001 static void
1002 make_edges (label_value_list)
1003 rtx label_value_list;
1004 {
1005 int i;
1006 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1007 sbitmap *edge_cache = NULL;
1008
1009 /* Assume no computed jump; revise as we create edges. */
1010 current_function_has_computed_jump = 0;
1011
1012 /* Heavy use of computed goto in machine-generated code can lead to
1013 nearly fully-connected CFGs. In that case we spend a significant
1014 amount of time searching the edge lists for duplicates. */
1015 if (forced_labels || label_value_list)
1016 {
1017 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1018 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1019 }
1020
1021 /* By nature of the way these get numbered, block 0 is always the entry. */
1022 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1023
1024 for (i = 0; i < n_basic_blocks; ++i)
1025 {
1026 basic_block bb = BASIC_BLOCK (i);
1027 rtx insn, x;
1028 enum rtx_code code;
1029 int force_fallthru = 0;
1030
1031 /* Examine the last instruction of the block, and discover the
1032 ways we can leave the block. */
1033
1034 insn = bb->end;
1035 code = GET_CODE (insn);
1036
1037 /* A branch. */
1038 if (code == JUMP_INSN)
1039 {
1040 rtx tmp;
1041
1042 /* ??? Recognize a tablejump and do the right thing. */
1043 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1044 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1045 && GET_CODE (tmp) == JUMP_INSN
1046 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1047 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1048 {
1049 rtvec vec;
1050 int j;
1051
1052 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1053 vec = XVEC (PATTERN (tmp), 0);
1054 else
1055 vec = XVEC (PATTERN (tmp), 1);
1056
1057 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1058 make_label_edge (edge_cache, bb,
1059 XEXP (RTVEC_ELT (vec, j), 0), 0);
1060
1061 /* Some targets (eg, ARM) emit a conditional jump that also
1062 contains the out-of-range target. Scan for these and
1063 add an edge if necessary. */
1064 if ((tmp = single_set (insn)) != NULL
1065 && SET_DEST (tmp) == pc_rtx
1066 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1067 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1068 make_label_edge (edge_cache, bb,
1069 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1070
1071 #ifdef CASE_DROPS_THROUGH
1072 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1073 us naturally detecting fallthru into the next block. */
1074 force_fallthru = 1;
1075 #endif
1076 }
1077
1078 /* If this is a computed jump, then mark it as reaching
1079 everything on the label_value_list and forced_labels list. */
1080 else if (computed_jump_p (insn))
1081 {
1082 current_function_has_computed_jump = 1;
1083
1084 for (x = label_value_list; x; x = XEXP (x, 1))
1085 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1086
1087 for (x = forced_labels; x; x = XEXP (x, 1))
1088 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1089 }
1090
1091 /* Returns create an exit out. */
1092 else if (returnjump_p (insn))
1093 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1094
1095 /* Otherwise, we have a plain conditional or unconditional jump. */
1096 else
1097 {
1098 if (! JUMP_LABEL (insn))
1099 abort ();
1100 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1101 }
1102 }
1103
1104 /* If this is a sibling call insn, then this is in effect a
1105 combined call and return, and so we need an edge to the
1106 exit block. No need to worry about EH edges, since we
1107 wouldn't have created the sibling call in the first place. */
1108
1109 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1110 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1111 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1112 else
1113
1114 /* If this is a CALL_INSN, then mark it as reaching the active EH
1115 handler for this CALL_INSN. If we're handling asynchronous
1116 exceptions then any insn can reach any of the active handlers.
1117
1118 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1119
1120 if (code == CALL_INSN || asynchronous_exceptions)
1121 {
1122 /* Add any appropriate EH edges. We do this unconditionally
1123 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1124 on the call, and this needn't be within an EH region. */
1125 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1126
1127 /* If we have asynchronous exceptions, do the same for *all*
1128 exception regions active in the block. */
1129 if (asynchronous_exceptions
1130 && bb->eh_beg != bb->eh_end)
1131 {
1132 if (bb->eh_beg >= 0)
1133 make_eh_edge (edge_cache, eh_nest_info, bb,
1134 NULL_RTX, bb->eh_beg);
1135
1136 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1137 if (GET_CODE (x) == NOTE
1138 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1139 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1140 {
1141 int region = NOTE_EH_HANDLER (x);
1142 make_eh_edge (edge_cache, eh_nest_info, bb,
1143 NULL_RTX, region);
1144 }
1145 }
1146
1147 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1148 {
1149 /* ??? This could be made smarter: in some cases it's possible
1150 to tell that certain calls will not do a nonlocal goto.
1151
1152 For example, if the nested functions that do the nonlocal
1153 gotos do not have their addresses taken, then only calls to
1154 those functions or to other nested functions that use them
1155 could possibly do nonlocal gotos. */
1156 /* We do know that a REG_EH_REGION note with a value less
1157 than 0 is guaranteed not to perform a non-local goto. */
1158 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1159 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1160 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1161 make_label_edge (edge_cache, bb, XEXP (x, 0),
1162 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1163 }
1164 }
1165
1166 /* We know something about the structure of the function __throw in
1167 libgcc2.c. It is the only function that ever contains eh_stub
1168 labels. It modifies its return address so that the last block
1169 returns to one of the eh_stub labels within it. So we have to
1170 make additional edges in the flow graph. */
1171 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1172 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1173
1174 /* Find out if we can drop through to the next block. */
1175 insn = next_nonnote_insn (insn);
1176 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1177 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1178 else if (i + 1 < n_basic_blocks)
1179 {
1180 rtx tmp = BLOCK_HEAD (i + 1);
1181 if (GET_CODE (tmp) == NOTE)
1182 tmp = next_nonnote_insn (tmp);
1183 if (force_fallthru || insn == tmp)
1184 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1185 }
1186 }
1187
1188 free_eh_nesting_info (eh_nest_info);
1189 if (edge_cache)
1190 sbitmap_vector_free (edge_cache);
1191 }
1192
1193 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1194 about the edge that is accumulated between calls. */
1195
1196 void
1197 make_edge (edge_cache, src, dst, flags)
1198 sbitmap *edge_cache;
1199 basic_block src, dst;
1200 int flags;
1201 {
1202 int use_edge_cache;
1203 edge e;
1204
1205 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1206 many edges to them, and we didn't allocate memory for it. */
1207 use_edge_cache = (edge_cache
1208 && src != ENTRY_BLOCK_PTR
1209 && dst != EXIT_BLOCK_PTR);
1210
1211 /* Make sure we don't add duplicate edges. */
1212 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1213 for (e = src->succ; e; e = e->succ_next)
1214 if (e->dest == dst)
1215 {
1216 e->flags |= flags;
1217 return;
1218 }
1219
1220 e = (edge) xcalloc (1, sizeof (*e));
1221 n_edges++;
1222
1223 e->succ_next = src->succ;
1224 e->pred_next = dst->pred;
1225 e->src = src;
1226 e->dest = dst;
1227 e->flags = flags;
1228
1229 src->succ = e;
1230 dst->pred = e;
1231
1232 if (use_edge_cache)
1233 SET_BIT (edge_cache[src->index], dst->index);
1234 }
1235
1236 /* Create an edge from a basic block to a label. */
1237
1238 static void
1239 make_label_edge (edge_cache, src, label, flags)
1240 sbitmap *edge_cache;
1241 basic_block src;
1242 rtx label;
1243 int flags;
1244 {
1245 if (GET_CODE (label) != CODE_LABEL)
1246 abort ();
1247
1248 /* If the label was never emitted, this insn is junk, but avoid a
1249 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1250 as a result of a syntax error and a diagnostic has already been
1251 printed. */
1252
1253 if (INSN_UID (label) == 0)
1254 return;
1255
1256 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1257 }
1258
1259 /* Create the edges generated by INSN in REGION. */
1260
1261 static void
1262 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1263 sbitmap *edge_cache;
1264 eh_nesting_info *eh_nest_info;
1265 basic_block src;
1266 rtx insn;
1267 int region;
1268 {
1269 handler_info **handler_list;
1270 int num, is_call;
1271
1272 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1273 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1274 while (--num >= 0)
1275 {
1276 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1277 EDGE_ABNORMAL | EDGE_EH | is_call);
1278 }
1279 }
1280
1281 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1282 dangerous if we intend to move basic blocks around. Move such notes
1283 into the following block. */
1284
1285 static void
1286 move_stray_eh_region_notes ()
1287 {
1288 int i;
1289 basic_block b1, b2;
1290
1291 if (n_basic_blocks < 2)
1292 return;
1293
1294 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1295 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1296 {
1297 rtx insn, next, list = NULL_RTX;
1298
1299 b1 = BASIC_BLOCK (i);
1300 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1301 {
1302 next = NEXT_INSN (insn);
1303 if (GET_CODE (insn) == NOTE
1304 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1305 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1306 {
1307 /* Unlink from the insn chain. */
1308 NEXT_INSN (PREV_INSN (insn)) = next;
1309 PREV_INSN (next) = PREV_INSN (insn);
1310
1311 /* Queue it. */
1312 NEXT_INSN (insn) = list;
1313 list = insn;
1314 }
1315 }
1316
1317 if (list == NULL_RTX)
1318 continue;
1319
1320 /* Find where to insert these things. */
1321 insn = b2->head;
1322 if (GET_CODE (insn) == CODE_LABEL)
1323 insn = NEXT_INSN (insn);
1324
1325 while (list)
1326 {
1327 next = NEXT_INSN (list);
1328 add_insn_after (list, insn);
1329 list = next;
1330 }
1331 }
1332 }
1333
1334 /* Recompute eh_beg/eh_end for each basic block. */
1335
1336 static void
1337 record_active_eh_regions (f)
1338 rtx f;
1339 {
1340 rtx insn, eh_list = NULL_RTX;
1341 int i = 0;
1342 basic_block bb = BASIC_BLOCK (0);
1343
1344 for (insn = f; insn; insn = NEXT_INSN (insn))
1345 {
1346 if (bb->head == insn)
1347 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1348
1349 if (GET_CODE (insn) == NOTE)
1350 {
1351 int kind = NOTE_LINE_NUMBER (insn);
1352 if (kind == NOTE_INSN_EH_REGION_BEG)
1353 eh_list = alloc_INSN_LIST (insn, eh_list);
1354 else if (kind == NOTE_INSN_EH_REGION_END)
1355 {
1356 rtx t = XEXP (eh_list, 1);
1357 free_INSN_LIST_node (eh_list);
1358 eh_list = t;
1359 }
1360 }
1361
1362 if (bb->end == insn)
1363 {
1364 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1365 i += 1;
1366 if (i == n_basic_blocks)
1367 break;
1368 bb = BASIC_BLOCK (i);
1369 }
1370 }
1371 }
1372
1373 /* Identify critical edges and set the bits appropriately. */
1374
1375 static void
1376 mark_critical_edges ()
1377 {
1378 int i, n = n_basic_blocks;
1379 basic_block bb;
1380
1381 /* We begin with the entry block. This is not terribly important now,
1382 but could be if a front end (Fortran) implemented alternate entry
1383 points. */
1384 bb = ENTRY_BLOCK_PTR;
1385 i = -1;
1386
1387 while (1)
1388 {
1389 edge e;
1390
1391 /* (1) Critical edges must have a source with multiple successors. */
1392 if (bb->succ && bb->succ->succ_next)
1393 {
1394 for (e = bb->succ; e; e = e->succ_next)
1395 {
1396 /* (2) Critical edges must have a destination with multiple
1397 predecessors. Note that we know there is at least one
1398 predecessor -- the edge we followed to get here. */
1399 if (e->dest->pred->pred_next)
1400 e->flags |= EDGE_CRITICAL;
1401 else
1402 e->flags &= ~EDGE_CRITICAL;
1403 }
1404 }
1405 else
1406 {
1407 for (e = bb->succ; e; e = e->succ_next)
1408 e->flags &= ~EDGE_CRITICAL;
1409 }
1410
1411 if (++i >= n)
1412 break;
1413 bb = BASIC_BLOCK (i);
1414 }
1415 }
1416 \f
1417 /* Split a (typically critical) edge. Return the new block.
1418 Abort on abnormal edges.
1419
1420 ??? The code generally expects to be called on critical edges.
1421 The case of a block ending in an unconditional jump to a
1422 block with multiple predecessors is not handled optimally. */
1423
1424 basic_block
1425 split_edge (edge_in)
1426 edge edge_in;
1427 {
1428 basic_block old_pred, bb, old_succ;
1429 edge edge_out;
1430 rtx bb_note;
1431 int i, j;
1432
1433 /* Abnormal edges cannot be split. */
1434 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1435 abort ();
1436
1437 old_pred = edge_in->src;
1438 old_succ = edge_in->dest;
1439
1440 /* Remove the existing edge from the destination's pred list. */
1441 {
1442 edge *pp;
1443 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1444 continue;
1445 *pp = edge_in->pred_next;
1446 edge_in->pred_next = NULL;
1447 }
1448
1449 /* Create the new structures. */
1450 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1451 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1452 n_edges++;
1453
1454 memset (bb, 0, sizeof (*bb));
1455
1456 /* ??? This info is likely going to be out of date very soon. */
1457 if (old_succ->global_live_at_start)
1458 {
1459 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1460 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1461 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1462 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1463 }
1464
1465 /* Wire them up. */
1466 bb->pred = edge_in;
1467 bb->succ = edge_out;
1468 bb->count = edge_in->count;
1469
1470 edge_in->dest = bb;
1471 edge_in->flags &= ~EDGE_CRITICAL;
1472
1473 edge_out->pred_next = old_succ->pred;
1474 edge_out->succ_next = NULL;
1475 edge_out->src = bb;
1476 edge_out->dest = old_succ;
1477 edge_out->flags = EDGE_FALLTHRU;
1478 edge_out->probability = REG_BR_PROB_BASE;
1479 edge_out->count = edge_in->count;
1480
1481 old_succ->pred = edge_out;
1482
1483 /* Tricky case -- if there existed a fallthru into the successor
1484 (and we're not it) we must add a new unconditional jump around
1485 the new block we're actually interested in.
1486
1487 Further, if that edge is critical, this means a second new basic
1488 block must be created to hold it. In order to simplify correct
1489 insn placement, do this before we touch the existing basic block
1490 ordering for the block we were really wanting. */
1491 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1492 {
1493 edge e;
1494 for (e = edge_out->pred_next; e; e = e->pred_next)
1495 if (e->flags & EDGE_FALLTHRU)
1496 break;
1497
1498 if (e)
1499 {
1500 basic_block jump_block;
1501 rtx pos;
1502
1503 if ((e->flags & EDGE_CRITICAL) == 0
1504 && e->src != ENTRY_BLOCK_PTR)
1505 {
1506 /* Non critical -- we can simply add a jump to the end
1507 of the existing predecessor. */
1508 jump_block = e->src;
1509 }
1510 else
1511 {
1512 /* We need a new block to hold the jump. The simplest
1513 way to do the bulk of the work here is to recursively
1514 call ourselves. */
1515 jump_block = split_edge (e);
1516 e = jump_block->succ;
1517 }
1518
1519 /* Now add the jump insn ... */
1520 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1521 jump_block->end);
1522 jump_block->end = pos;
1523 if (basic_block_for_insn)
1524 set_block_for_insn (pos, jump_block);
1525 emit_barrier_after (pos);
1526
1527 /* ... let jump know that label is in use, ... */
1528 JUMP_LABEL (pos) = old_succ->head;
1529 ++LABEL_NUSES (old_succ->head);
1530
1531 /* ... and clear fallthru on the outgoing edge. */
1532 e->flags &= ~EDGE_FALLTHRU;
1533
1534 /* Continue splitting the interesting edge. */
1535 }
1536 }
1537
1538 /* Place the new block just in front of the successor. */
1539 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1540 if (old_succ == EXIT_BLOCK_PTR)
1541 j = n_basic_blocks - 1;
1542 else
1543 j = old_succ->index;
1544 for (i = n_basic_blocks - 1; i > j; --i)
1545 {
1546 basic_block tmp = BASIC_BLOCK (i - 1);
1547 BASIC_BLOCK (i) = tmp;
1548 tmp->index = i;
1549 }
1550 BASIC_BLOCK (i) = bb;
1551 bb->index = i;
1552
1553 /* Create the basic block note.
1554
1555 Where we place the note can have a noticable impact on the generated
1556 code. Consider this cfg:
1557
1558 E
1559 |
1560 0
1561 / \
1562 +->1-->2--->E
1563 | |
1564 +--+
1565
1566 If we need to insert an insn on the edge from block 0 to block 1,
1567 we want to ensure the instructions we insert are outside of any
1568 loop notes that physically sit between block 0 and block 1. Otherwise
1569 we confuse the loop optimizer into thinking the loop is a phony. */
1570 if (old_succ != EXIT_BLOCK_PTR
1571 && PREV_INSN (old_succ->head)
1572 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1573 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1574 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1575 PREV_INSN (old_succ->head));
1576 else if (old_succ != EXIT_BLOCK_PTR)
1577 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1578 else
1579 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1580 NOTE_BASIC_BLOCK (bb_note) = bb;
1581 bb->head = bb->end = bb_note;
1582
1583 /* Not quite simple -- for non-fallthru edges, we must adjust the
1584 predecessor's jump instruction to target our new block. */
1585 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1586 {
1587 rtx tmp, insn = old_pred->end;
1588 rtx old_label = old_succ->head;
1589 rtx new_label = gen_label_rtx ();
1590
1591 if (GET_CODE (insn) != JUMP_INSN)
1592 abort ();
1593
1594 /* ??? Recognize a tablejump and adjust all matching cases. */
1595 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1596 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1597 && GET_CODE (tmp) == JUMP_INSN
1598 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1599 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1600 {
1601 rtvec vec;
1602 int j;
1603
1604 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1605 vec = XVEC (PATTERN (tmp), 0);
1606 else
1607 vec = XVEC (PATTERN (tmp), 1);
1608
1609 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1610 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1611 {
1612 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1613 --LABEL_NUSES (old_label);
1614 ++LABEL_NUSES (new_label);
1615 }
1616
1617 /* Handle casesi dispatch insns */
1618 if ((tmp = single_set (insn)) != NULL
1619 && SET_DEST (tmp) == pc_rtx
1620 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1621 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1622 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1623 {
1624 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1625 new_label);
1626 --LABEL_NUSES (old_label);
1627 ++LABEL_NUSES (new_label);
1628 }
1629 }
1630 else
1631 {
1632 /* This would have indicated an abnormal edge. */
1633 if (computed_jump_p (insn))
1634 abort ();
1635
1636 /* A return instruction can't be redirected. */
1637 if (returnjump_p (insn))
1638 abort ();
1639
1640 /* If the insn doesn't go where we think, we're confused. */
1641 if (JUMP_LABEL (insn) != old_label)
1642 abort ();
1643
1644 redirect_jump (insn, new_label, 0);
1645 }
1646
1647 emit_label_before (new_label, bb_note);
1648 bb->head = new_label;
1649 }
1650
1651 return bb;
1652 }
1653
1654 /* Queue instructions for insertion on an edge between two basic blocks.
1655 The new instructions and basic blocks (if any) will not appear in the
1656 CFG until commit_edge_insertions is called. */
1657
1658 void
1659 insert_insn_on_edge (pattern, e)
1660 rtx pattern;
1661 edge e;
1662 {
1663 /* We cannot insert instructions on an abnormal critical edge.
1664 It will be easier to find the culprit if we die now. */
1665 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1666 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1667 abort ();
1668
1669 if (e->insns == NULL_RTX)
1670 start_sequence ();
1671 else
1672 push_to_sequence (e->insns);
1673
1674 emit_insn (pattern);
1675
1676 e->insns = get_insns ();
1677 end_sequence ();
1678 }
1679
1680 /* Update the CFG for the instructions queued on edge E. */
1681
1682 static void
1683 commit_one_edge_insertion (e)
1684 edge e;
1685 {
1686 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1687 basic_block bb;
1688
1689 /* Pull the insns off the edge now since the edge might go away. */
1690 insns = e->insns;
1691 e->insns = NULL_RTX;
1692
1693 /* Figure out where to put these things. If the destination has
1694 one predecessor, insert there. Except for the exit block. */
1695 if (e->dest->pred->pred_next == NULL
1696 && e->dest != EXIT_BLOCK_PTR)
1697 {
1698 bb = e->dest;
1699
1700 /* Get the location correct wrt a code label, and "nice" wrt
1701 a basic block note, and before everything else. */
1702 tmp = bb->head;
1703 if (GET_CODE (tmp) == CODE_LABEL)
1704 tmp = NEXT_INSN (tmp);
1705 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1706 tmp = NEXT_INSN (tmp);
1707 if (tmp == bb->head)
1708 before = tmp;
1709 else
1710 after = PREV_INSN (tmp);
1711 }
1712
1713 /* If the source has one successor and the edge is not abnormal,
1714 insert there. Except for the entry block. */
1715 else if ((e->flags & EDGE_ABNORMAL) == 0
1716 && e->src->succ->succ_next == NULL
1717 && e->src != ENTRY_BLOCK_PTR)
1718 {
1719 bb = e->src;
1720 /* It is possible to have a non-simple jump here. Consider a target
1721 where some forms of unconditional jumps clobber a register. This
1722 happens on the fr30 for example.
1723
1724 We know this block has a single successor, so we can just emit
1725 the queued insns before the jump. */
1726 if (GET_CODE (bb->end) == JUMP_INSN)
1727 {
1728 before = bb->end;
1729 }
1730 else
1731 {
1732 /* We'd better be fallthru, or we've lost track of what's what. */
1733 if ((e->flags & EDGE_FALLTHRU) == 0)
1734 abort ();
1735
1736 after = bb->end;
1737 }
1738 }
1739
1740 /* Otherwise we must split the edge. */
1741 else
1742 {
1743 bb = split_edge (e);
1744 after = bb->end;
1745 }
1746
1747 /* Now that we've found the spot, do the insertion. */
1748
1749 /* Set the new block number for these insns, if structure is allocated. */
1750 if (basic_block_for_insn)
1751 {
1752 rtx i;
1753 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1754 set_block_for_insn (i, bb);
1755 }
1756
1757 if (before)
1758 {
1759 emit_insns_before (insns, before);
1760 if (before == bb->head)
1761 bb->head = insns;
1762
1763 last = prev_nonnote_insn (before);
1764 }
1765 else
1766 {
1767 last = emit_insns_after (insns, after);
1768 if (after == bb->end)
1769 bb->end = last;
1770 }
1771
1772 if (returnjump_p (last))
1773 {
1774 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1775 This is not currently a problem because this only happens
1776 for the (single) epilogue, which already has a fallthru edge
1777 to EXIT. */
1778
1779 e = bb->succ;
1780 if (e->dest != EXIT_BLOCK_PTR
1781 || e->succ_next != NULL
1782 || (e->flags & EDGE_FALLTHRU) == 0)
1783 abort ();
1784 e->flags &= ~EDGE_FALLTHRU;
1785
1786 emit_barrier_after (last);
1787 bb->end = last;
1788
1789 if (before)
1790 flow_delete_insn (before);
1791 }
1792 else if (GET_CODE (last) == JUMP_INSN)
1793 abort ();
1794 }
1795
1796 /* Update the CFG for all queued instructions. */
1797
1798 void
1799 commit_edge_insertions ()
1800 {
1801 int i;
1802 basic_block bb;
1803
1804 #ifdef ENABLE_CHECKING
1805 verify_flow_info ();
1806 #endif
1807
1808 i = -1;
1809 bb = ENTRY_BLOCK_PTR;
1810 while (1)
1811 {
1812 edge e, next;
1813
1814 for (e = bb->succ; e; e = next)
1815 {
1816 next = e->succ_next;
1817 if (e->insns)
1818 commit_one_edge_insertion (e);
1819 }
1820
1821 if (++i >= n_basic_blocks)
1822 break;
1823 bb = BASIC_BLOCK (i);
1824 }
1825 }
1826 \f
1827 /* Delete all unreachable basic blocks. */
1828
1829 static void
1830 delete_unreachable_blocks ()
1831 {
1832 basic_block *worklist, *tos;
1833 int deleted_handler;
1834 edge e;
1835 int i, n;
1836
1837 n = n_basic_blocks;
1838 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1839
1840 /* Use basic_block->aux as a marker. Clear them all. */
1841
1842 for (i = 0; i < n; ++i)
1843 BASIC_BLOCK (i)->aux = NULL;
1844
1845 /* Add our starting points to the worklist. Almost always there will
1846 be only one. It isn't inconcievable that we might one day directly
1847 support Fortran alternate entry points. */
1848
1849 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1850 {
1851 *tos++ = e->dest;
1852
1853 /* Mark the block with a handy non-null value. */
1854 e->dest->aux = e;
1855 }
1856
1857 /* Iterate: find everything reachable from what we've already seen. */
1858
1859 while (tos != worklist)
1860 {
1861 basic_block b = *--tos;
1862
1863 for (e = b->succ; e; e = e->succ_next)
1864 if (!e->dest->aux)
1865 {
1866 *tos++ = e->dest;
1867 e->dest->aux = e;
1868 }
1869 }
1870
1871 /* Delete all unreachable basic blocks. Count down so that we don't
1872 interfere with the block renumbering that happens in flow_delete_block. */
1873
1874 deleted_handler = 0;
1875
1876 for (i = n - 1; i >= 0; --i)
1877 {
1878 basic_block b = BASIC_BLOCK (i);
1879
1880 if (b->aux != NULL)
1881 /* This block was found. Tidy up the mark. */
1882 b->aux = NULL;
1883 else
1884 deleted_handler |= flow_delete_block (b);
1885 }
1886
1887 tidy_fallthru_edges ();
1888
1889 /* If we deleted an exception handler, we may have EH region begin/end
1890 blocks to remove as well. */
1891 if (deleted_handler)
1892 delete_eh_regions ();
1893
1894 free (worklist);
1895 }
1896
1897 /* Find EH regions for which there is no longer a handler, and delete them. */
1898
1899 static void
1900 delete_eh_regions ()
1901 {
1902 rtx insn;
1903
1904 update_rethrow_references ();
1905
1906 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1907 if (GET_CODE (insn) == NOTE)
1908 {
1909 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1910 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1911 {
1912 int num = NOTE_EH_HANDLER (insn);
1913 /* A NULL handler indicates a region is no longer needed,
1914 as long as its rethrow label isn't used. */
1915 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1916 {
1917 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1918 NOTE_SOURCE_FILE (insn) = 0;
1919 }
1920 }
1921 }
1922 }
1923
1924 /* Return true if NOTE is not one of the ones that must be kept paired,
1925 so that we may simply delete them. */
1926
1927 static int
1928 can_delete_note_p (note)
1929 rtx note;
1930 {
1931 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1932 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1933 }
1934
1935 /* Unlink a chain of insns between START and FINISH, leaving notes
1936 that must be paired. */
1937
1938 void
1939 flow_delete_insn_chain (start, finish)
1940 rtx start, finish;
1941 {
1942 /* Unchain the insns one by one. It would be quicker to delete all
1943 of these with a single unchaining, rather than one at a time, but
1944 we need to keep the NOTE's. */
1945
1946 rtx next;
1947
1948 while (1)
1949 {
1950 next = NEXT_INSN (start);
1951 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1952 ;
1953 else if (GET_CODE (start) == CODE_LABEL
1954 && ! can_delete_label_p (start))
1955 {
1956 const char *name = LABEL_NAME (start);
1957 PUT_CODE (start, NOTE);
1958 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1959 NOTE_SOURCE_FILE (start) = name;
1960 }
1961 else
1962 next = flow_delete_insn (start);
1963
1964 if (start == finish)
1965 break;
1966 start = next;
1967 }
1968 }
1969
1970 /* Delete the insns in a (non-live) block. We physically delete every
1971 non-deleted-note insn, and update the flow graph appropriately.
1972
1973 Return nonzero if we deleted an exception handler. */
1974
1975 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1976 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1977
1978 int
1979 flow_delete_block (b)
1980 basic_block b;
1981 {
1982 int deleted_handler = 0;
1983 rtx insn, end, tmp;
1984
1985 /* If the head of this block is a CODE_LABEL, then it might be the
1986 label for an exception handler which can't be reached.
1987
1988 We need to remove the label from the exception_handler_label list
1989 and remove the associated NOTE_INSN_EH_REGION_BEG and
1990 NOTE_INSN_EH_REGION_END notes. */
1991
1992 insn = b->head;
1993
1994 never_reached_warning (insn);
1995
1996 if (GET_CODE (insn) == CODE_LABEL)
1997 {
1998 rtx x, *prev = &exception_handler_labels;
1999
2000 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2001 {
2002 if (XEXP (x, 0) == insn)
2003 {
2004 /* Found a match, splice this label out of the EH label list. */
2005 *prev = XEXP (x, 1);
2006 XEXP (x, 1) = NULL_RTX;
2007 XEXP (x, 0) = NULL_RTX;
2008
2009 /* Remove the handler from all regions */
2010 remove_handler (insn);
2011 deleted_handler = 1;
2012 break;
2013 }
2014 prev = &XEXP (x, 1);
2015 }
2016 }
2017
2018 /* Include any jump table following the basic block. */
2019 end = b->end;
2020 if (GET_CODE (end) == JUMP_INSN
2021 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2022 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2023 && GET_CODE (tmp) == JUMP_INSN
2024 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2025 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2026 end = tmp;
2027
2028 /* Include any barrier that may follow the basic block. */
2029 tmp = next_nonnote_insn (end);
2030 if (tmp && GET_CODE (tmp) == BARRIER)
2031 end = tmp;
2032
2033 /* Selectively delete the entire chain. */
2034 flow_delete_insn_chain (insn, end);
2035
2036 /* Remove the edges into and out of this block. Note that there may
2037 indeed be edges in, if we are removing an unreachable loop. */
2038 {
2039 edge e, next, *q;
2040
2041 for (e = b->pred; e; e = next)
2042 {
2043 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2044 continue;
2045 *q = e->succ_next;
2046 next = e->pred_next;
2047 n_edges--;
2048 free (e);
2049 }
2050 for (e = b->succ; e; e = next)
2051 {
2052 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2053 continue;
2054 *q = e->pred_next;
2055 next = e->succ_next;
2056 n_edges--;
2057 free (e);
2058 }
2059
2060 b->pred = NULL;
2061 b->succ = NULL;
2062 }
2063
2064 /* Remove the basic block from the array, and compact behind it. */
2065 expunge_block (b);
2066
2067 return deleted_handler;
2068 }
2069
2070 /* Remove block B from the basic block array and compact behind it. */
2071
2072 static void
2073 expunge_block (b)
2074 basic_block b;
2075 {
2076 int i, n = n_basic_blocks;
2077
2078 for (i = b->index; i + 1 < n; ++i)
2079 {
2080 basic_block x = BASIC_BLOCK (i + 1);
2081 BASIC_BLOCK (i) = x;
2082 x->index = i;
2083 }
2084
2085 basic_block_info->num_elements--;
2086 n_basic_blocks--;
2087 }
2088
2089 /* Delete INSN by patching it out. Return the next insn. */
2090
2091 rtx
2092 flow_delete_insn (insn)
2093 rtx insn;
2094 {
2095 rtx prev = PREV_INSN (insn);
2096 rtx next = NEXT_INSN (insn);
2097 rtx note;
2098
2099 PREV_INSN (insn) = NULL_RTX;
2100 NEXT_INSN (insn) = NULL_RTX;
2101 INSN_DELETED_P (insn) = 1;
2102
2103 if (prev)
2104 NEXT_INSN (prev) = next;
2105 if (next)
2106 PREV_INSN (next) = prev;
2107 else
2108 set_last_insn (prev);
2109
2110 if (GET_CODE (insn) == CODE_LABEL)
2111 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2112
2113 /* If deleting a jump, decrement the use count of the label. Deleting
2114 the label itself should happen in the normal course of block merging. */
2115 if (GET_CODE (insn) == JUMP_INSN
2116 && JUMP_LABEL (insn)
2117 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2118 LABEL_NUSES (JUMP_LABEL (insn))--;
2119
2120 /* Also if deleting an insn that references a label. */
2121 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2122 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2123 LABEL_NUSES (XEXP (note, 0))--;
2124
2125 return next;
2126 }
2127
2128 /* True if a given label can be deleted. */
2129
2130 static int
2131 can_delete_label_p (label)
2132 rtx label;
2133 {
2134 rtx x;
2135
2136 if (LABEL_PRESERVE_P (label))
2137 return 0;
2138
2139 for (x = forced_labels; x; x = XEXP (x, 1))
2140 if (label == XEXP (x, 0))
2141 return 0;
2142 for (x = label_value_list; x; x = XEXP (x, 1))
2143 if (label == XEXP (x, 0))
2144 return 0;
2145 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2146 if (label == XEXP (x, 0))
2147 return 0;
2148
2149 /* User declared labels must be preserved. */
2150 if (LABEL_NAME (label) != 0)
2151 return 0;
2152
2153 return 1;
2154 }
2155
2156 static int
2157 tail_recursion_label_p (label)
2158 rtx label;
2159 {
2160 rtx x;
2161
2162 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2163 if (label == XEXP (x, 0))
2164 return 1;
2165
2166 return 0;
2167 }
2168
2169 /* Blocks A and B are to be merged into a single block A. The insns
2170 are already contiguous, hence `nomove'. */
2171
2172 void
2173 merge_blocks_nomove (a, b)
2174 basic_block a, b;
2175 {
2176 edge e;
2177 rtx b_head, b_end, a_end;
2178 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2179 int b_empty = 0;
2180
2181 /* If there was a CODE_LABEL beginning B, delete it. */
2182 b_head = b->head;
2183 b_end = b->end;
2184 if (GET_CODE (b_head) == CODE_LABEL)
2185 {
2186 /* Detect basic blocks with nothing but a label. This can happen
2187 in particular at the end of a function. */
2188 if (b_head == b_end)
2189 b_empty = 1;
2190 del_first = del_last = b_head;
2191 b_head = NEXT_INSN (b_head);
2192 }
2193
2194 /* Delete the basic block note. */
2195 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2196 {
2197 if (b_head == b_end)
2198 b_empty = 1;
2199 if (! del_last)
2200 del_first = b_head;
2201 del_last = b_head;
2202 b_head = NEXT_INSN (b_head);
2203 }
2204
2205 /* If there was a jump out of A, delete it. */
2206 a_end = a->end;
2207 if (GET_CODE (a_end) == JUMP_INSN)
2208 {
2209 rtx prev;
2210
2211 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2212 if (GET_CODE (prev) != NOTE
2213 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2214 || prev == a->head)
2215 break;
2216
2217 del_first = a_end;
2218
2219 #ifdef HAVE_cc0
2220 /* If this was a conditional jump, we need to also delete
2221 the insn that set cc0. */
2222 if (prev && sets_cc0_p (prev))
2223 {
2224 rtx tmp = prev;
2225 prev = prev_nonnote_insn (prev);
2226 if (!prev)
2227 prev = a->head;
2228 del_first = tmp;
2229 }
2230 #endif
2231
2232 a_end = prev;
2233 }
2234 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2235 del_first = NEXT_INSN (a_end);
2236
2237 /* Delete everything marked above as well as crap that might be
2238 hanging out between the two blocks. */
2239 flow_delete_insn_chain (del_first, del_last);
2240
2241 /* Normally there should only be one successor of A and that is B, but
2242 partway though the merge of blocks for conditional_execution we'll
2243 be merging a TEST block with THEN and ELSE successors. Free the
2244 whole lot of them and hope the caller knows what they're doing. */
2245 while (a->succ)
2246 remove_edge (a->succ);
2247
2248 /* Adjust the edges out of B for the new owner. */
2249 for (e = b->succ; e; e = e->succ_next)
2250 e->src = a;
2251 a->succ = b->succ;
2252
2253 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2254 b->pred = b->succ = NULL;
2255
2256 /* Reassociate the insns of B with A. */
2257 if (!b_empty)
2258 {
2259 if (basic_block_for_insn)
2260 {
2261 BLOCK_FOR_INSN (b_head) = a;
2262 while (b_head != b_end)
2263 {
2264 b_head = NEXT_INSN (b_head);
2265 BLOCK_FOR_INSN (b_head) = a;
2266 }
2267 }
2268 a_end = b_end;
2269 }
2270 a->end = a_end;
2271
2272 expunge_block (b);
2273 }
2274
2275 /* Blocks A and B are to be merged into a single block. A has no incoming
2276 fallthru edge, so it can be moved before B without adding or modifying
2277 any jumps (aside from the jump from A to B). */
2278
2279 static int
2280 merge_blocks_move_predecessor_nojumps (a, b)
2281 basic_block a, b;
2282 {
2283 rtx start, end, barrier;
2284 int index;
2285
2286 start = a->head;
2287 end = a->end;
2288
2289 barrier = next_nonnote_insn (end);
2290 if (GET_CODE (barrier) != BARRIER)
2291 abort ();
2292 flow_delete_insn (barrier);
2293
2294 /* Move block and loop notes out of the chain so that we do not
2295 disturb their order.
2296
2297 ??? A better solution would be to squeeze out all the non-nested notes
2298 and adjust the block trees appropriately. Even better would be to have
2299 a tighter connection between block trees and rtl so that this is not
2300 necessary. */
2301 start = squeeze_notes (start, end);
2302
2303 /* Scramble the insn chain. */
2304 if (end != PREV_INSN (b->head))
2305 reorder_insns (start, end, PREV_INSN (b->head));
2306
2307 if (rtl_dump_file)
2308 {
2309 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2310 a->index, b->index);
2311 }
2312
2313 /* Swap the records for the two blocks around. Although we are deleting B,
2314 A is now where B was and we want to compact the BB array from where
2315 A used to be. */
2316 BASIC_BLOCK (a->index) = b;
2317 BASIC_BLOCK (b->index) = a;
2318 index = a->index;
2319 a->index = b->index;
2320 b->index = index;
2321
2322 /* Now blocks A and B are contiguous. Merge them. */
2323 merge_blocks_nomove (a, b);
2324
2325 return 1;
2326 }
2327
2328 /* Blocks A and B are to be merged into a single block. B has no outgoing
2329 fallthru edge, so it can be moved after A without adding or modifying
2330 any jumps (aside from the jump from A to B). */
2331
2332 static int
2333 merge_blocks_move_successor_nojumps (a, b)
2334 basic_block a, b;
2335 {
2336 rtx start, end, barrier;
2337
2338 start = b->head;
2339 end = b->end;
2340 barrier = NEXT_INSN (end);
2341
2342 /* Recognize a jump table following block B. */
2343 if (GET_CODE (barrier) == CODE_LABEL
2344 && NEXT_INSN (barrier)
2345 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2346 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2347 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2348 {
2349 end = NEXT_INSN (barrier);
2350 barrier = NEXT_INSN (end);
2351 }
2352
2353 /* There had better have been a barrier there. Delete it. */
2354 if (GET_CODE (barrier) != BARRIER)
2355 abort ();
2356 flow_delete_insn (barrier);
2357
2358 /* Move block and loop notes out of the chain so that we do not
2359 disturb their order.
2360
2361 ??? A better solution would be to squeeze out all the non-nested notes
2362 and adjust the block trees appropriately. Even better would be to have
2363 a tighter connection between block trees and rtl so that this is not
2364 necessary. */
2365 start = squeeze_notes (start, end);
2366
2367 /* Scramble the insn chain. */
2368 reorder_insns (start, end, a->end);
2369
2370 /* Now blocks A and B are contiguous. Merge them. */
2371 merge_blocks_nomove (a, b);
2372
2373 if (rtl_dump_file)
2374 {
2375 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2376 b->index, a->index);
2377 }
2378
2379 return 1;
2380 }
2381
2382 /* Attempt to merge basic blocks that are potentially non-adjacent.
2383 Return true iff the attempt succeeded. */
2384
2385 static int
2386 merge_blocks (e, b, c)
2387 edge e;
2388 basic_block b, c;
2389 {
2390 /* If C has a tail recursion label, do not merge. There is no
2391 edge recorded from the call_placeholder back to this label, as
2392 that would make optimize_sibling_and_tail_recursive_calls more
2393 complex for no gain. */
2394 if (GET_CODE (c->head) == CODE_LABEL
2395 && tail_recursion_label_p (c->head))
2396 return 0;
2397
2398 /* If B has a fallthru edge to C, no need to move anything. */
2399 if (e->flags & EDGE_FALLTHRU)
2400 {
2401 merge_blocks_nomove (b, c);
2402
2403 if (rtl_dump_file)
2404 {
2405 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2406 b->index, c->index);
2407 }
2408
2409 return 1;
2410 }
2411 else
2412 {
2413 edge tmp_edge;
2414 basic_block d;
2415 int c_has_outgoing_fallthru;
2416 int b_has_incoming_fallthru;
2417
2418 /* We must make sure to not munge nesting of exception regions,
2419 lexical blocks, and loop notes.
2420
2421 The first is taken care of by requiring that the active eh
2422 region at the end of one block always matches the active eh
2423 region at the beginning of the next block.
2424
2425 The later two are taken care of by squeezing out all the notes. */
2426
2427 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2428 executed and we may want to treat blocks which have two out
2429 edges, one normal, one abnormal as only having one edge for
2430 block merging purposes. */
2431
2432 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2433 if (tmp_edge->flags & EDGE_FALLTHRU)
2434 break;
2435 c_has_outgoing_fallthru = (tmp_edge != NULL);
2436
2437 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2438 if (tmp_edge->flags & EDGE_FALLTHRU)
2439 break;
2440 b_has_incoming_fallthru = (tmp_edge != NULL);
2441
2442 /* If B does not have an incoming fallthru, and the exception regions
2443 match, then it can be moved immediately before C without introducing
2444 or modifying jumps.
2445
2446 C can not be the first block, so we do not have to worry about
2447 accessing a non-existent block. */
2448 d = BASIC_BLOCK (c->index - 1);
2449 if (! b_has_incoming_fallthru
2450 && d->eh_end == b->eh_beg
2451 && b->eh_end == c->eh_beg)
2452 return merge_blocks_move_predecessor_nojumps (b, c);
2453
2454 /* Otherwise, we're going to try to move C after B. Make sure the
2455 exception regions match.
2456
2457 If B is the last basic block, then we must not try to access the
2458 block structure for block B + 1. Luckily in that case we do not
2459 need to worry about matching exception regions. */
2460 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2461 if (b->eh_end == c->eh_beg
2462 && (d == NULL || c->eh_end == d->eh_beg))
2463 {
2464 /* If C does not have an outgoing fallthru, then it can be moved
2465 immediately after B without introducing or modifying jumps. */
2466 if (! c_has_outgoing_fallthru)
2467 return merge_blocks_move_successor_nojumps (b, c);
2468
2469 /* Otherwise, we'll need to insert an extra jump, and possibly
2470 a new block to contain it. */
2471 /* ??? Not implemented yet. */
2472 }
2473
2474 return 0;
2475 }
2476 }
2477
2478 /* Top level driver for merge_blocks. */
2479
2480 static void
2481 try_merge_blocks ()
2482 {
2483 int i;
2484
2485 /* Attempt to merge blocks as made possible by edge removal. If a block
2486 has only one successor, and the successor has only one predecessor,
2487 they may be combined. */
2488
2489 for (i = 0; i < n_basic_blocks;)
2490 {
2491 basic_block c, b = BASIC_BLOCK (i);
2492 edge s;
2493
2494 /* A loop because chains of blocks might be combineable. */
2495 while ((s = b->succ) != NULL
2496 && s->succ_next == NULL
2497 && (s->flags & EDGE_EH) == 0
2498 && (c = s->dest) != EXIT_BLOCK_PTR
2499 && c->pred->pred_next == NULL
2500 /* If the jump insn has side effects, we can't kill the edge. */
2501 && (GET_CODE (b->end) != JUMP_INSN
2502 || onlyjump_p (b->end))
2503 && merge_blocks (s, b, c))
2504 continue;
2505
2506 /* Don't get confused by the index shift caused by deleting blocks. */
2507 i = b->index + 1;
2508 }
2509 }
2510
2511 /* The given edge should potentially be a fallthru edge. If that is in
2512 fact true, delete the jump and barriers that are in the way. */
2513
2514 void
2515 tidy_fallthru_edge (e, b, c)
2516 edge e;
2517 basic_block b, c;
2518 {
2519 rtx q;
2520
2521 /* ??? In a late-running flow pass, other folks may have deleted basic
2522 blocks by nopping out blocks, leaving multiple BARRIERs between here
2523 and the target label. They ought to be chastized and fixed.
2524
2525 We can also wind up with a sequence of undeletable labels between
2526 one block and the next.
2527
2528 So search through a sequence of barriers, labels, and notes for
2529 the head of block C and assert that we really do fall through. */
2530
2531 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2532 return;
2533
2534 /* Remove what will soon cease being the jump insn from the source block.
2535 If block B consisted only of this single jump, turn it into a deleted
2536 note. */
2537 q = b->end;
2538 if (GET_CODE (q) == JUMP_INSN
2539 && onlyjump_p (q)
2540 && (any_uncondjump_p (q)
2541 || (b->succ == e && e->succ_next == NULL)))
2542 {
2543 #ifdef HAVE_cc0
2544 /* If this was a conditional jump, we need to also delete
2545 the insn that set cc0. */
2546 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2547 q = PREV_INSN (q);
2548 #endif
2549
2550 if (b->head == q)
2551 {
2552 PUT_CODE (q, NOTE);
2553 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2554 NOTE_SOURCE_FILE (q) = 0;
2555 }
2556 else
2557 b->end = q = PREV_INSN (q);
2558 }
2559
2560 /* Selectively unlink the sequence. */
2561 if (q != PREV_INSN (c->head))
2562 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2563
2564 e->flags |= EDGE_FALLTHRU;
2565 }
2566
2567 /* Fix up edges that now fall through, or rather should now fall through
2568 but previously required a jump around now deleted blocks. Simplify
2569 the search by only examining blocks numerically adjacent, since this
2570 is how find_basic_blocks created them. */
2571
2572 static void
2573 tidy_fallthru_edges ()
2574 {
2575 int i;
2576
2577 for (i = 1; i < n_basic_blocks; ++i)
2578 {
2579 basic_block b = BASIC_BLOCK (i - 1);
2580 basic_block c = BASIC_BLOCK (i);
2581 edge s;
2582
2583 /* We care about simple conditional or unconditional jumps with
2584 a single successor.
2585
2586 If we had a conditional branch to the next instruction when
2587 find_basic_blocks was called, then there will only be one
2588 out edge for the block which ended with the conditional
2589 branch (since we do not create duplicate edges).
2590
2591 Furthermore, the edge will be marked as a fallthru because we
2592 merge the flags for the duplicate edges. So we do not want to
2593 check that the edge is not a FALLTHRU edge. */
2594 if ((s = b->succ) != NULL
2595 && s->succ_next == NULL
2596 && s->dest == c
2597 /* If the jump insn has side effects, we can't tidy the edge. */
2598 && (GET_CODE (b->end) != JUMP_INSN
2599 || onlyjump_p (b->end)))
2600 tidy_fallthru_edge (s, b, c);
2601 }
2602 }
2603 \f
2604 /* Perform data flow analysis.
2605 F is the first insn of the function; FLAGS is a set of PROP_* flags
2606 to be used in accumulating flow info. */
2607
2608 void
2609 life_analysis (f, file, flags)
2610 rtx f;
2611 FILE *file;
2612 int flags;
2613 {
2614 #ifdef ELIMINABLE_REGS
2615 register int i;
2616 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2617 #endif
2618
2619 /* Record which registers will be eliminated. We use this in
2620 mark_used_regs. */
2621
2622 CLEAR_HARD_REG_SET (elim_reg_set);
2623
2624 #ifdef ELIMINABLE_REGS
2625 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2626 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2627 #else
2628 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2629 #endif
2630
2631 if (! optimize)
2632 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2633
2634 /* The post-reload life analysis have (on a global basis) the same
2635 registers live as was computed by reload itself. elimination
2636 Otherwise offsets and such may be incorrect.
2637
2638 Reload will make some registers as live even though they do not
2639 appear in the rtl.
2640
2641 We don't want to create new auto-incs after reload, since they
2642 are unlikely to be useful and can cause problems with shared
2643 stack slots. */
2644 if (reload_completed)
2645 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2646
2647 /* We want alias analysis information for local dead store elimination. */
2648 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2649 init_alias_analysis ();
2650
2651 /* Always remove no-op moves. Do this before other processing so
2652 that we don't have to keep re-scanning them. */
2653 delete_noop_moves (f);
2654
2655 /* Some targets can emit simpler epilogues if they know that sp was
2656 not ever modified during the function. After reload, of course,
2657 we've already emitted the epilogue so there's no sense searching. */
2658 if (! reload_completed)
2659 notice_stack_pointer_modification (f);
2660
2661 /* Allocate and zero out data structures that will record the
2662 data from lifetime analysis. */
2663 allocate_reg_life_data ();
2664 allocate_bb_life_data ();
2665
2666 /* Find the set of registers live on function exit. */
2667 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2668
2669 /* "Update" life info from zero. It'd be nice to begin the
2670 relaxation with just the exit and noreturn blocks, but that set
2671 is not immediately handy. */
2672
2673 if (flags & PROP_REG_INFO)
2674 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2675 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2676
2677 /* Clean up. */
2678 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2679 end_alias_analysis ();
2680
2681 if (file)
2682 dump_flow_info (file);
2683
2684 free_basic_block_vars (1);
2685 }
2686
2687 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2688 Search for REGNO. If found, abort if it is not wider than word_mode. */
2689
2690 static int
2691 verify_wide_reg_1 (px, pregno)
2692 rtx *px;
2693 void *pregno;
2694 {
2695 rtx x = *px;
2696 unsigned int regno = *(int *) pregno;
2697
2698 if (GET_CODE (x) == REG && REGNO (x) == regno)
2699 {
2700 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2701 abort ();
2702 return 1;
2703 }
2704 return 0;
2705 }
2706
2707 /* A subroutine of verify_local_live_at_start. Search through insns
2708 between HEAD and END looking for register REGNO. */
2709
2710 static void
2711 verify_wide_reg (regno, head, end)
2712 int regno;
2713 rtx head, end;
2714 {
2715 while (1)
2716 {
2717 if (INSN_P (head)
2718 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2719 return;
2720 if (head == end)
2721 break;
2722 head = NEXT_INSN (head);
2723 }
2724
2725 /* We didn't find the register at all. Something's way screwy. */
2726 abort ();
2727 }
2728
2729 /* A subroutine of update_life_info. Verify that there are no untoward
2730 changes in live_at_start during a local update. */
2731
2732 static void
2733 verify_local_live_at_start (new_live_at_start, bb)
2734 regset new_live_at_start;
2735 basic_block bb;
2736 {
2737 if (reload_completed)
2738 {
2739 /* After reload, there are no pseudos, nor subregs of multi-word
2740 registers. The regsets should exactly match. */
2741 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2742 abort ();
2743 }
2744 else
2745 {
2746 int i;
2747
2748 /* Find the set of changed registers. */
2749 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2750
2751 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2752 {
2753 /* No registers should die. */
2754 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2755 abort ();
2756 /* Verify that the now-live register is wider than word_mode. */
2757 verify_wide_reg (i, bb->head, bb->end);
2758 });
2759 }
2760 }
2761
2762 /* Updates life information starting with the basic blocks set in BLOCKS.
2763 If BLOCKS is null, consider it to be the universal set.
2764
2765 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2766 we are only expecting local modifications to basic blocks. If we find
2767 extra registers live at the beginning of a block, then we either killed
2768 useful data, or we have a broken split that wants data not provided.
2769 If we find registers removed from live_at_start, that means we have
2770 a broken peephole that is killing a register it shouldn't.
2771
2772 ??? This is not true in one situation -- when a pre-reload splitter
2773 generates subregs of a multi-word pseudo, current life analysis will
2774 lose the kill. So we _can_ have a pseudo go live. How irritating.
2775
2776 Including PROP_REG_INFO does not properly refresh regs_ever_live
2777 unless the caller resets it to zero. */
2778
2779 void
2780 update_life_info (blocks, extent, prop_flags)
2781 sbitmap blocks;
2782 enum update_life_extent extent;
2783 int prop_flags;
2784 {
2785 regset tmp;
2786 regset_head tmp_head;
2787 int i;
2788
2789 tmp = INITIALIZE_REG_SET (tmp_head);
2790
2791 /* For a global update, we go through the relaxation process again. */
2792 if (extent != UPDATE_LIFE_LOCAL)
2793 {
2794 calculate_global_regs_live (blocks, blocks,
2795 prop_flags & PROP_SCAN_DEAD_CODE);
2796
2797 /* If asked, remove notes from the blocks we'll update. */
2798 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2799 count_or_remove_death_notes (blocks, 1);
2800 }
2801
2802 if (blocks)
2803 {
2804 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2805 {
2806 basic_block bb = BASIC_BLOCK (i);
2807
2808 COPY_REG_SET (tmp, bb->global_live_at_end);
2809 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2810
2811 if (extent == UPDATE_LIFE_LOCAL)
2812 verify_local_live_at_start (tmp, bb);
2813 });
2814 }
2815 else
2816 {
2817 for (i = n_basic_blocks - 1; i >= 0; --i)
2818 {
2819 basic_block bb = BASIC_BLOCK (i);
2820
2821 COPY_REG_SET (tmp, bb->global_live_at_end);
2822 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2823
2824 if (extent == UPDATE_LIFE_LOCAL)
2825 verify_local_live_at_start (tmp, bb);
2826 }
2827 }
2828
2829 FREE_REG_SET (tmp);
2830
2831 if (prop_flags & PROP_REG_INFO)
2832 {
2833 /* The only pseudos that are live at the beginning of the function
2834 are those that were not set anywhere in the function. local-alloc
2835 doesn't know how to handle these correctly, so mark them as not
2836 local to any one basic block. */
2837 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2838 FIRST_PSEUDO_REGISTER, i,
2839 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2840
2841 /* We have a problem with any pseudoreg that lives across the setjmp.
2842 ANSI says that if a user variable does not change in value between
2843 the setjmp and the longjmp, then the longjmp preserves it. This
2844 includes longjmp from a place where the pseudo appears dead.
2845 (In principle, the value still exists if it is in scope.)
2846 If the pseudo goes in a hard reg, some other value may occupy
2847 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2848 Conclusion: such a pseudo must not go in a hard reg. */
2849 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2850 FIRST_PSEUDO_REGISTER, i,
2851 {
2852 if (regno_reg_rtx[i] != 0)
2853 {
2854 REG_LIVE_LENGTH (i) = -1;
2855 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2856 }
2857 });
2858 }
2859 }
2860
2861 /* Free the variables allocated by find_basic_blocks.
2862
2863 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2864
2865 void
2866 free_basic_block_vars (keep_head_end_p)
2867 int keep_head_end_p;
2868 {
2869 if (basic_block_for_insn)
2870 {
2871 VARRAY_FREE (basic_block_for_insn);
2872 basic_block_for_insn = NULL;
2873 }
2874
2875 if (! keep_head_end_p)
2876 {
2877 clear_edges ();
2878 VARRAY_FREE (basic_block_info);
2879 n_basic_blocks = 0;
2880
2881 ENTRY_BLOCK_PTR->aux = NULL;
2882 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2883 EXIT_BLOCK_PTR->aux = NULL;
2884 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2885 }
2886 }
2887
2888 /* Return nonzero if the destination of SET equals the source. */
2889
2890 static int
2891 set_noop_p (set)
2892 rtx set;
2893 {
2894 rtx src = SET_SRC (set);
2895 rtx dst = SET_DEST (set);
2896
2897 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2898 {
2899 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2900 return 0;
2901 src = SUBREG_REG (src);
2902 dst = SUBREG_REG (dst);
2903 }
2904
2905 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2906 && REGNO (src) == REGNO (dst));
2907 }
2908
2909 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2910 value to itself. */
2911
2912 static int
2913 noop_move_p (insn)
2914 rtx insn;
2915 {
2916 rtx pat = PATTERN (insn);
2917
2918 /* Insns carrying these notes are useful later on. */
2919 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2920 return 0;
2921
2922 if (GET_CODE (pat) == SET && set_noop_p (pat))
2923 return 1;
2924
2925 if (GET_CODE (pat) == PARALLEL)
2926 {
2927 int i;
2928 /* If nothing but SETs of registers to themselves,
2929 this insn can also be deleted. */
2930 for (i = 0; i < XVECLEN (pat, 0); i++)
2931 {
2932 rtx tem = XVECEXP (pat, 0, i);
2933
2934 if (GET_CODE (tem) == USE
2935 || GET_CODE (tem) == CLOBBER)
2936 continue;
2937
2938 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2939 return 0;
2940 }
2941
2942 return 1;
2943 }
2944 return 0;
2945 }
2946
2947 /* Delete any insns that copy a register to itself. */
2948
2949 static void
2950 delete_noop_moves (f)
2951 rtx f;
2952 {
2953 rtx insn;
2954 for (insn = f; insn; insn = NEXT_INSN (insn))
2955 {
2956 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2957 {
2958 PUT_CODE (insn, NOTE);
2959 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2960 NOTE_SOURCE_FILE (insn) = 0;
2961 }
2962 }
2963 }
2964
2965 /* Determine if the stack pointer is constant over the life of the function.
2966 Only useful before prologues have been emitted. */
2967
2968 static void
2969 notice_stack_pointer_modification_1 (x, pat, data)
2970 rtx x;
2971 rtx pat ATTRIBUTE_UNUSED;
2972 void *data ATTRIBUTE_UNUSED;
2973 {
2974 if (x == stack_pointer_rtx
2975 /* The stack pointer is only modified indirectly as the result
2976 of a push until later in flow. See the comments in rtl.texi
2977 regarding Embedded Side-Effects on Addresses. */
2978 || (GET_CODE (x) == MEM
2979 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2980 || GET_CODE (XEXP (x, 0)) == PRE_INC
2981 || GET_CODE (XEXP (x, 0)) == POST_DEC
2982 || GET_CODE (XEXP (x, 0)) == POST_INC)
2983 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2984 current_function_sp_is_unchanging = 0;
2985 }
2986
2987 static void
2988 notice_stack_pointer_modification (f)
2989 rtx f;
2990 {
2991 rtx insn;
2992
2993 /* Assume that the stack pointer is unchanging if alloca hasn't
2994 been used. */
2995 current_function_sp_is_unchanging = !current_function_calls_alloca;
2996 if (! current_function_sp_is_unchanging)
2997 return;
2998
2999 for (insn = f; insn; insn = NEXT_INSN (insn))
3000 {
3001 if (INSN_P (insn))
3002 {
3003 /* Check if insn modifies the stack pointer. */
3004 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3005 NULL);
3006 if (! current_function_sp_is_unchanging)
3007 return;
3008 }
3009 }
3010 }
3011
3012 /* Mark a register in SET. Hard registers in large modes get all
3013 of their component registers set as well. */
3014
3015 static void
3016 mark_reg (reg, xset)
3017 rtx reg;
3018 void *xset;
3019 {
3020 regset set = (regset) xset;
3021 int regno = REGNO (reg);
3022
3023 if (GET_MODE (reg) == BLKmode)
3024 abort ();
3025
3026 SET_REGNO_REG_SET (set, regno);
3027 if (regno < FIRST_PSEUDO_REGISTER)
3028 {
3029 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3030 while (--n > 0)
3031 SET_REGNO_REG_SET (set, regno + n);
3032 }
3033 }
3034
3035 /* Mark those regs which are needed at the end of the function as live
3036 at the end of the last basic block. */
3037
3038 static void
3039 mark_regs_live_at_end (set)
3040 regset set;
3041 {
3042 int i;
3043
3044 /* If exiting needs the right stack value, consider the stack pointer
3045 live at the end of the function. */
3046 if ((HAVE_epilogue && reload_completed)
3047 || ! EXIT_IGNORE_STACK
3048 || (! FRAME_POINTER_REQUIRED
3049 && ! current_function_calls_alloca
3050 && flag_omit_frame_pointer)
3051 || current_function_sp_is_unchanging)
3052 {
3053 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3054 }
3055
3056 /* Mark the frame pointer if needed at the end of the function. If
3057 we end up eliminating it, it will be removed from the live list
3058 of each basic block by reload. */
3059
3060 if (! reload_completed || frame_pointer_needed)
3061 {
3062 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3063 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3064 /* If they are different, also mark the hard frame pointer as live. */
3065 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3066 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3067 #endif
3068 }
3069
3070 #ifdef PIC_OFFSET_TABLE_REGNUM
3071 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3072 /* Many architectures have a GP register even without flag_pic.
3073 Assume the pic register is not in use, or will be handled by
3074 other means, if it is not fixed. */
3075 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3076 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3077 #endif
3078 #endif
3079
3080 /* Mark all global registers, and all registers used by the epilogue
3081 as being live at the end of the function since they may be
3082 referenced by our caller. */
3083 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3084 if (global_regs[i] || EPILOGUE_USES (i))
3085 SET_REGNO_REG_SET (set, i);
3086
3087 /* Mark all call-saved registers that we actaully used. */
3088 if (HAVE_epilogue && reload_completed)
3089 {
3090 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3091 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3092 SET_REGNO_REG_SET (set, i);
3093 }
3094
3095 /* Mark function return value. */
3096 diddle_return_value (mark_reg, set);
3097 }
3098
3099 /* Callback function for for_each_successor_phi. DATA is a regset.
3100 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3101 INSN, in the regset. */
3102
3103 static int
3104 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3105 rtx insn ATTRIBUTE_UNUSED;
3106 int dest_regno ATTRIBUTE_UNUSED;
3107 int src_regno;
3108 void *data;
3109 {
3110 regset live = (regset) data;
3111 SET_REGNO_REG_SET (live, src_regno);
3112 return 0;
3113 }
3114
3115 /* Propagate global life info around the graph of basic blocks. Begin
3116 considering blocks with their corresponding bit set in BLOCKS_IN.
3117 If BLOCKS_IN is null, consider it the universal set.
3118
3119 BLOCKS_OUT is set for every block that was changed. */
3120
3121 static void
3122 calculate_global_regs_live (blocks_in, blocks_out, flags)
3123 sbitmap blocks_in, blocks_out;
3124 int flags;
3125 {
3126 basic_block *queue, *qhead, *qtail, *qend;
3127 regset tmp, new_live_at_end;
3128 regset_head tmp_head;
3129 regset_head new_live_at_end_head;
3130 int i;
3131
3132 tmp = INITIALIZE_REG_SET (tmp_head);
3133 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3134
3135 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3136 because the `head == tail' style test for an empty queue doesn't
3137 work with a full queue. */
3138 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3139 qtail = queue;
3140 qhead = qend = queue + n_basic_blocks + 2;
3141
3142 /* Clear out the garbage that might be hanging out in bb->aux. */
3143 for (i = n_basic_blocks - 1; i >= 0; --i)
3144 BASIC_BLOCK (i)->aux = NULL;
3145
3146 /* Queue the blocks set in the initial mask. Do this in reverse block
3147 number order so that we are more likely for the first round to do
3148 useful work. We use AUX non-null to flag that the block is queued. */
3149 if (blocks_in)
3150 {
3151 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3152 {
3153 basic_block bb = BASIC_BLOCK (i);
3154 *--qhead = bb;
3155 bb->aux = bb;
3156 });
3157 }
3158 else
3159 {
3160 for (i = 0; i < n_basic_blocks; ++i)
3161 {
3162 basic_block bb = BASIC_BLOCK (i);
3163 *--qhead = bb;
3164 bb->aux = bb;
3165 }
3166 }
3167
3168 if (blocks_out)
3169 sbitmap_zero (blocks_out);
3170
3171 while (qhead != qtail)
3172 {
3173 int rescan, changed;
3174 basic_block bb;
3175 edge e;
3176
3177 bb = *qhead++;
3178 if (qhead == qend)
3179 qhead = queue;
3180 bb->aux = NULL;
3181
3182 /* Begin by propogating live_at_start from the successor blocks. */
3183 CLEAR_REG_SET (new_live_at_end);
3184 for (e = bb->succ; e; e = e->succ_next)
3185 {
3186 basic_block sb = e->dest;
3187 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3188 }
3189
3190 /* Force the stack pointer to be live -- which might not already be
3191 the case for blocks within infinite loops. */
3192 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3193
3194 /* Similarly for the frame pointer before reload. Any reference
3195 to any pseudo before reload is a potential reference of the
3196 frame pointer. */
3197 if (! reload_completed)
3198 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3199
3200 /* Regs used in phi nodes are not included in
3201 global_live_at_start, since they are live only along a
3202 particular edge. Set those regs that are live because of a
3203 phi node alternative corresponding to this particular block. */
3204 if (in_ssa_form)
3205 for_each_successor_phi (bb, &set_phi_alternative_reg,
3206 new_live_at_end);
3207
3208 if (bb == ENTRY_BLOCK_PTR)
3209 {
3210 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3211 continue;
3212 }
3213
3214 /* On our first pass through this block, we'll go ahead and continue.
3215 Recognize first pass by local_set NULL. On subsequent passes, we
3216 get to skip out early if live_at_end wouldn't have changed. */
3217
3218 if (bb->local_set == NULL)
3219 {
3220 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3221 rescan = 1;
3222 }
3223 else
3224 {
3225 /* If any bits were removed from live_at_end, we'll have to
3226 rescan the block. This wouldn't be necessary if we had
3227 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3228 local_live is really dependent on live_at_end. */
3229 CLEAR_REG_SET (tmp);
3230 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3231 new_live_at_end, BITMAP_AND_COMPL);
3232
3233 if (! rescan)
3234 {
3235 /* Find the set of changed bits. Take this opportunity
3236 to notice that this set is empty and early out. */
3237 CLEAR_REG_SET (tmp);
3238 changed = bitmap_operation (tmp, bb->global_live_at_end,
3239 new_live_at_end, BITMAP_XOR);
3240 if (! changed)
3241 continue;
3242
3243 /* If any of the changed bits overlap with local_set,
3244 we'll have to rescan the block. Detect overlap by
3245 the AND with ~local_set turning off bits. */
3246 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3247 BITMAP_AND_COMPL);
3248 }
3249 }
3250
3251 /* Let our caller know that BB changed enough to require its
3252 death notes updated. */
3253 if (blocks_out)
3254 SET_BIT (blocks_out, bb->index);
3255
3256 if (! rescan)
3257 {
3258 /* Add to live_at_start the set of all registers in
3259 new_live_at_end that aren't in the old live_at_end. */
3260
3261 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3262 BITMAP_AND_COMPL);
3263 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3264
3265 changed = bitmap_operation (bb->global_live_at_start,
3266 bb->global_live_at_start,
3267 tmp, BITMAP_IOR);
3268 if (! changed)
3269 continue;
3270 }
3271 else
3272 {
3273 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3274
3275 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3276 into live_at_start. */
3277 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3278
3279 /* If live_at start didn't change, no need to go farther. */
3280 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3281 continue;
3282
3283 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3284 }
3285
3286 /* Queue all predecessors of BB so that we may re-examine
3287 their live_at_end. */
3288 for (e = bb->pred; e; e = e->pred_next)
3289 {
3290 basic_block pb = e->src;
3291 if (pb->aux == NULL)
3292 {
3293 *qtail++ = pb;
3294 if (qtail == qend)
3295 qtail = queue;
3296 pb->aux = pb;
3297 }
3298 }
3299 }
3300
3301 FREE_REG_SET (tmp);
3302 FREE_REG_SET (new_live_at_end);
3303
3304 if (blocks_out)
3305 {
3306 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3307 {
3308 basic_block bb = BASIC_BLOCK (i);
3309 FREE_REG_SET (bb->local_set);
3310 });
3311 }
3312 else
3313 {
3314 for (i = n_basic_blocks - 1; i >= 0; --i)
3315 {
3316 basic_block bb = BASIC_BLOCK (i);
3317 FREE_REG_SET (bb->local_set);
3318 }
3319 }
3320
3321 free (queue);
3322 }
3323 \f
3324 /* Subroutines of life analysis. */
3325
3326 /* Allocate the permanent data structures that represent the results
3327 of life analysis. Not static since used also for stupid life analysis. */
3328
3329 void
3330 allocate_bb_life_data ()
3331 {
3332 register int i;
3333
3334 for (i = 0; i < n_basic_blocks; i++)
3335 {
3336 basic_block bb = BASIC_BLOCK (i);
3337
3338 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3339 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3340 }
3341
3342 ENTRY_BLOCK_PTR->global_live_at_end
3343 = OBSTACK_ALLOC_REG_SET (function_obstack);
3344 EXIT_BLOCK_PTR->global_live_at_start
3345 = OBSTACK_ALLOC_REG_SET (function_obstack);
3346
3347 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3348 }
3349
3350 void
3351 allocate_reg_life_data ()
3352 {
3353 int i;
3354
3355 max_regno = max_reg_num ();
3356
3357 /* Recalculate the register space, in case it has grown. Old style
3358 vector oriented regsets would set regset_{size,bytes} here also. */
3359 allocate_reg_info (max_regno, FALSE, FALSE);
3360
3361 /* Reset all the data we'll collect in propagate_block and its
3362 subroutines. */
3363 for (i = 0; i < max_regno; i++)
3364 {
3365 REG_N_SETS (i) = 0;
3366 REG_N_REFS (i) = 0;
3367 REG_N_DEATHS (i) = 0;
3368 REG_N_CALLS_CROSSED (i) = 0;
3369 REG_LIVE_LENGTH (i) = 0;
3370 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3371 }
3372 }
3373
3374 /* Delete dead instructions for propagate_block. */
3375
3376 static void
3377 propagate_block_delete_insn (bb, insn)
3378 basic_block bb;
3379 rtx insn;
3380 {
3381 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3382
3383 /* If the insn referred to a label, and that label was attached to
3384 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3385 pretty much mandatory to delete it, because the ADDR_VEC may be
3386 referencing labels that no longer exist. */
3387
3388 if (inote)
3389 {
3390 rtx label = XEXP (inote, 0);
3391 rtx next;
3392
3393 if (LABEL_NUSES (label) == 1
3394 && (next = next_nonnote_insn (label)) != NULL
3395 && GET_CODE (next) == JUMP_INSN
3396 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3397 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3398 {
3399 rtx pat = PATTERN (next);
3400 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3401 int len = XVECLEN (pat, diff_vec_p);
3402 int i;
3403
3404 for (i = 0; i < len; i++)
3405 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3406
3407 flow_delete_insn (next);
3408 }
3409 }
3410
3411 if (bb->end == insn)
3412 bb->end = PREV_INSN (insn);
3413 flow_delete_insn (insn);
3414 }
3415
3416 /* Delete dead libcalls for propagate_block. Return the insn
3417 before the libcall. */
3418
3419 static rtx
3420 propagate_block_delete_libcall (bb, insn, note)
3421 basic_block bb;
3422 rtx insn, note;
3423 {
3424 rtx first = XEXP (note, 0);
3425 rtx before = PREV_INSN (first);
3426
3427 if (insn == bb->end)
3428 bb->end = before;
3429
3430 flow_delete_insn_chain (first, insn);
3431 return before;
3432 }
3433
3434 /* Update the life-status of regs for one insn. Return the previous insn. */
3435
3436 rtx
3437 propagate_one_insn (pbi, insn)
3438 struct propagate_block_info *pbi;
3439 rtx insn;
3440 {
3441 rtx prev = PREV_INSN (insn);
3442 int flags = pbi->flags;
3443 int insn_is_dead = 0;
3444 int libcall_is_dead = 0;
3445 rtx note;
3446 int i;
3447
3448 if (! INSN_P (insn))
3449 return prev;
3450
3451 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3452 if (flags & PROP_SCAN_DEAD_CODE)
3453 {
3454 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3455 REG_NOTES (insn));
3456 libcall_is_dead = (insn_is_dead && note != 0
3457 && libcall_dead_p (pbi, note, insn));
3458 }
3459
3460 /* We almost certainly don't want to delete prologue or epilogue
3461 instructions. Warn about probable compiler losage. */
3462 if (insn_is_dead
3463 && reload_completed
3464 && (((HAVE_epilogue || HAVE_prologue)
3465 && prologue_epilogue_contains (insn))
3466 || (HAVE_sibcall_epilogue
3467 && sibcall_epilogue_contains (insn)))
3468 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3469 {
3470 if (flags & PROP_KILL_DEAD_CODE)
3471 {
3472 warning ("ICE: would have deleted prologue/epilogue insn");
3473 if (!inhibit_warnings)
3474 debug_rtx (insn);
3475 }
3476 libcall_is_dead = insn_is_dead = 0;
3477 }
3478
3479 /* If an instruction consists of just dead store(s) on final pass,
3480 delete it. */
3481 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3482 {
3483 /* Record sets. Do this even for dead instructions, since they
3484 would have killed the values if they hadn't been deleted. */
3485 mark_set_regs (pbi, PATTERN (insn), insn);
3486
3487 /* CC0 is now known to be dead. Either this insn used it,
3488 in which case it doesn't anymore, or clobbered it,
3489 so the next insn can't use it. */
3490 pbi->cc0_live = 0;
3491
3492 if (libcall_is_dead)
3493 {
3494 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3495 insn = NEXT_INSN (prev);
3496 }
3497 else
3498 propagate_block_delete_insn (pbi->bb, insn);
3499
3500 return prev;
3501 }
3502
3503 /* See if this is an increment or decrement that can be merged into
3504 a following memory address. */
3505 #ifdef AUTO_INC_DEC
3506 {
3507 register rtx x = single_set (insn);
3508
3509 /* Does this instruction increment or decrement a register? */
3510 if ((flags & PROP_AUTOINC)
3511 && x != 0
3512 && GET_CODE (SET_DEST (x)) == REG
3513 && (GET_CODE (SET_SRC (x)) == PLUS
3514 || GET_CODE (SET_SRC (x)) == MINUS)
3515 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3516 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3517 /* Ok, look for a following memory ref we can combine with.
3518 If one is found, change the memory ref to a PRE_INC
3519 or PRE_DEC, cancel this insn, and return 1.
3520 Return 0 if nothing has been done. */
3521 && try_pre_increment_1 (pbi, insn))
3522 return prev;
3523 }
3524 #endif /* AUTO_INC_DEC */
3525
3526 CLEAR_REG_SET (pbi->new_set);
3527
3528 /* If this is not the final pass, and this insn is copying the value of
3529 a library call and it's dead, don't scan the insns that perform the
3530 library call, so that the call's arguments are not marked live. */
3531 if (libcall_is_dead)
3532 {
3533 /* Record the death of the dest reg. */
3534 mark_set_regs (pbi, PATTERN (insn), insn);
3535
3536 insn = XEXP (note, 0);
3537 return PREV_INSN (insn);
3538 }
3539 else if (GET_CODE (PATTERN (insn)) == SET
3540 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3541 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3542 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3543 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3544 /* We have an insn to pop a constant amount off the stack.
3545 (Such insns use PLUS regardless of the direction of the stack,
3546 and any insn to adjust the stack by a constant is always a pop.)
3547 These insns, if not dead stores, have no effect on life. */
3548 ;
3549 else
3550 {
3551 /* Any regs live at the time of a call instruction must not go
3552 in a register clobbered by calls. Find all regs now live and
3553 record this for them. */
3554
3555 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3556 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3557 { REG_N_CALLS_CROSSED (i)++; });
3558
3559 /* Record sets. Do this even for dead instructions, since they
3560 would have killed the values if they hadn't been deleted. */
3561 mark_set_regs (pbi, PATTERN (insn), insn);
3562
3563 if (GET_CODE (insn) == CALL_INSN)
3564 {
3565 register int i;
3566 rtx note, cond;
3567
3568 cond = NULL_RTX;
3569 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3570 cond = COND_EXEC_TEST (PATTERN (insn));
3571
3572 /* Non-constant calls clobber memory. */
3573 if (! CONST_CALL_P (insn))
3574 free_EXPR_LIST_list (&pbi->mem_set_list);
3575
3576 /* There may be extra registers to be clobbered. */
3577 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3578 note;
3579 note = XEXP (note, 1))
3580 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3581 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3582 cond, insn, pbi->flags);
3583
3584 /* Calls change all call-used and global registers. */
3585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3586 if (call_used_regs[i] && ! global_regs[i]
3587 && ! fixed_regs[i])
3588 {
3589 /* We do not want REG_UNUSED notes for these registers. */
3590 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3591 cond, insn,
3592 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3593 }
3594 }
3595
3596 /* If an insn doesn't use CC0, it becomes dead since we assume
3597 that every insn clobbers it. So show it dead here;
3598 mark_used_regs will set it live if it is referenced. */
3599 pbi->cc0_live = 0;
3600
3601 /* Record uses. */
3602 if (! insn_is_dead)
3603 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3604
3605 /* Sometimes we may have inserted something before INSN (such as a move)
3606 when we make an auto-inc. So ensure we will scan those insns. */
3607 #ifdef AUTO_INC_DEC
3608 prev = PREV_INSN (insn);
3609 #endif
3610
3611 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3612 {
3613 register int i;
3614 rtx note, cond;
3615
3616 cond = NULL_RTX;
3617 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3618 cond = COND_EXEC_TEST (PATTERN (insn));
3619
3620 /* Calls use their arguments. */
3621 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3622 note;
3623 note = XEXP (note, 1))
3624 if (GET_CODE (XEXP (note, 0)) == USE)
3625 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3626 cond, insn);
3627
3628 /* The stack ptr is used (honorarily) by a CALL insn. */
3629 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3630
3631 /* Calls may also reference any of the global registers,
3632 so they are made live. */
3633 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3634 if (global_regs[i])
3635 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3636 cond, insn);
3637 }
3638 }
3639
3640 /* On final pass, update counts of how many insns in which each reg
3641 is live. */
3642 if (flags & PROP_REG_INFO)
3643 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3644 { REG_LIVE_LENGTH (i)++; });
3645
3646 return prev;
3647 }
3648
3649 /* Initialize a propagate_block_info struct for public consumption.
3650 Note that the structure itself is opaque to this file, but that
3651 the user can use the regsets provided here. */
3652
3653 struct propagate_block_info *
3654 init_propagate_block_info (bb, live, local_set, flags)
3655 basic_block bb;
3656 regset live;
3657 regset local_set;
3658 int flags;
3659 {
3660 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3661
3662 pbi->bb = bb;
3663 pbi->reg_live = live;
3664 pbi->mem_set_list = NULL_RTX;
3665 pbi->local_set = local_set;
3666 pbi->cc0_live = 0;
3667 pbi->flags = flags;
3668
3669 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3670 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3671 else
3672 pbi->reg_next_use = NULL;
3673
3674 pbi->new_set = BITMAP_XMALLOC ();
3675
3676 #ifdef HAVE_conditional_execution
3677 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3678 free_reg_cond_life_info);
3679 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3680
3681 /* If this block ends in a conditional branch, for each register live
3682 from one side of the branch and not the other, record the register
3683 as conditionally dead. */
3684 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3685 && GET_CODE (bb->end) == JUMP_INSN
3686 && any_condjump_p (bb->end))
3687 {
3688 regset_head diff_head;
3689 regset diff = INITIALIZE_REG_SET (diff_head);
3690 basic_block bb_true, bb_false;
3691 rtx cond_true, cond_false, set_src;
3692 int i;
3693
3694 /* Identify the successor blocks. */
3695 bb_true = bb->succ->dest;
3696 if (bb->succ->succ_next != NULL)
3697 {
3698 bb_false = bb->succ->succ_next->dest;
3699
3700 if (bb->succ->flags & EDGE_FALLTHRU)
3701 {
3702 basic_block t = bb_false;
3703 bb_false = bb_true;
3704 bb_true = t;
3705 }
3706 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3707 abort ();
3708 }
3709 else
3710 {
3711 /* This can happen with a conditional jump to the next insn. */
3712 if (JUMP_LABEL (bb->end) != bb_true->head)
3713 abort ();
3714
3715 /* Simplest way to do nothing. */
3716 bb_false = bb_true;
3717 }
3718
3719 /* Extract the condition from the branch. */
3720 set_src = SET_SRC (pc_set (bb->end));
3721 cond_true = XEXP (set_src, 0);
3722 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3723 GET_MODE (cond_true), XEXP (cond_true, 0),
3724 XEXP (cond_true, 1));
3725 if (GET_CODE (XEXP (set_src, 1)) == PC)
3726 {
3727 rtx t = cond_false;
3728 cond_false = cond_true;
3729 cond_true = t;
3730 }
3731
3732 /* Compute which register lead different lives in the successors. */
3733 if (bitmap_operation (diff, bb_true->global_live_at_start,
3734 bb_false->global_live_at_start, BITMAP_XOR))
3735 {
3736 rtx reg = XEXP (cond_true, 0);
3737
3738 if (GET_CODE (reg) == SUBREG)
3739 reg = SUBREG_REG (reg);
3740
3741 if (GET_CODE (reg) != REG)
3742 abort ();
3743
3744 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
3745
3746 /* For each such register, mark it conditionally dead. */
3747 EXECUTE_IF_SET_IN_REG_SET
3748 (diff, 0, i,
3749 {
3750 struct reg_cond_life_info *rcli;
3751 rtx cond;
3752
3753 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3754
3755 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3756 cond = cond_false;
3757 else
3758 cond = cond_true;
3759 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3760
3761 splay_tree_insert (pbi->reg_cond_dead, i,
3762 (splay_tree_value) rcli);
3763 });
3764 }
3765
3766 FREE_REG_SET (diff);
3767 }
3768 #endif
3769
3770 /* If this block has no successors, any stores to the frame that aren't
3771 used later in the block are dead. So make a pass over the block
3772 recording any such that are made and show them dead at the end. We do
3773 a very conservative and simple job here. */
3774 if (optimize
3775 && (flags & PROP_SCAN_DEAD_CODE)
3776 && (bb->succ == NULL
3777 || (bb->succ->succ_next == NULL
3778 && bb->succ->dest == EXIT_BLOCK_PTR)))
3779 {
3780 rtx insn;
3781 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3782 if (GET_CODE (insn) == INSN
3783 && GET_CODE (PATTERN (insn)) == SET
3784 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3785 {
3786 rtx mem = SET_DEST (PATTERN (insn));
3787
3788 if (XEXP (mem, 0) == frame_pointer_rtx
3789 || (GET_CODE (XEXP (mem, 0)) == PLUS
3790 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3791 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3792 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3793 }
3794 }
3795
3796 return pbi;
3797 }
3798
3799 /* Release a propagate_block_info struct. */
3800
3801 void
3802 free_propagate_block_info (pbi)
3803 struct propagate_block_info *pbi;
3804 {
3805 free_EXPR_LIST_list (&pbi->mem_set_list);
3806
3807 BITMAP_XFREE (pbi->new_set);
3808
3809 #ifdef HAVE_conditional_execution
3810 splay_tree_delete (pbi->reg_cond_dead);
3811 BITMAP_XFREE (pbi->reg_cond_reg);
3812 #endif
3813
3814 if (pbi->reg_next_use)
3815 free (pbi->reg_next_use);
3816
3817 free (pbi);
3818 }
3819
3820 /* Compute the registers live at the beginning of a basic block BB from
3821 those live at the end.
3822
3823 When called, REG_LIVE contains those live at the end. On return, it
3824 contains those live at the beginning.
3825
3826 LOCAL_SET, if non-null, will be set with all registers killed by
3827 this basic block. */
3828
3829 void
3830 propagate_block (bb, live, local_set, flags)
3831 basic_block bb;
3832 regset live;
3833 regset local_set;
3834 int flags;
3835 {
3836 struct propagate_block_info *pbi;
3837 rtx insn, prev;
3838
3839 pbi = init_propagate_block_info (bb, live, local_set, flags);
3840
3841 if (flags & PROP_REG_INFO)
3842 {
3843 register int i;
3844
3845 /* Process the regs live at the end of the block.
3846 Mark them as not local to any one basic block. */
3847 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3848 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3849 }
3850
3851 /* Scan the block an insn at a time from end to beginning. */
3852
3853 for (insn = bb->end;; insn = prev)
3854 {
3855 /* If this is a call to `setjmp' et al, warn if any
3856 non-volatile datum is live. */
3857 if ((flags & PROP_REG_INFO)
3858 && GET_CODE (insn) == NOTE
3859 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3860 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3861
3862 prev = propagate_one_insn (pbi, insn);
3863
3864 if (insn == bb->head)
3865 break;
3866 }
3867
3868 free_propagate_block_info (pbi);
3869 }
3870 \f
3871 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3872 (SET expressions whose destinations are registers dead after the insn).
3873 NEEDED is the regset that says which regs are alive after the insn.
3874
3875 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3876
3877 If X is the entire body of an insn, NOTES contains the reg notes
3878 pertaining to the insn. */
3879
3880 static int
3881 insn_dead_p (pbi, x, call_ok, notes)
3882 struct propagate_block_info *pbi;
3883 rtx x;
3884 int call_ok;
3885 rtx notes ATTRIBUTE_UNUSED;
3886 {
3887 enum rtx_code code = GET_CODE (x);
3888
3889 #ifdef AUTO_INC_DEC
3890 /* If flow is invoked after reload, we must take existing AUTO_INC
3891 expresions into account. */
3892 if (reload_completed)
3893 {
3894 for (; notes; notes = XEXP (notes, 1))
3895 {
3896 if (REG_NOTE_KIND (notes) == REG_INC)
3897 {
3898 int regno = REGNO (XEXP (notes, 0));
3899
3900 /* Don't delete insns to set global regs. */
3901 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3902 || REGNO_REG_SET_P (pbi->reg_live, regno))
3903 return 0;
3904 }
3905 }
3906 }
3907 #endif
3908
3909 /* If setting something that's a reg or part of one,
3910 see if that register's altered value will be live. */
3911
3912 if (code == SET)
3913 {
3914 rtx r = SET_DEST (x);
3915
3916 #ifdef HAVE_cc0
3917 if (GET_CODE (r) == CC0)
3918 return ! pbi->cc0_live;
3919 #endif
3920
3921 /* A SET that is a subroutine call cannot be dead. */
3922 if (GET_CODE (SET_SRC (x)) == CALL)
3923 {
3924 if (! call_ok)
3925 return 0;
3926 }
3927
3928 /* Don't eliminate loads from volatile memory or volatile asms. */
3929 else if (volatile_refs_p (SET_SRC (x)))
3930 return 0;
3931
3932 if (GET_CODE (r) == MEM)
3933 {
3934 rtx temp;
3935
3936 if (MEM_VOLATILE_P (r))
3937 return 0;
3938
3939 /* Walk the set of memory locations we are currently tracking
3940 and see if one is an identical match to this memory location.
3941 If so, this memory write is dead (remember, we're walking
3942 backwards from the end of the block to the start). */
3943 temp = pbi->mem_set_list;
3944 while (temp)
3945 {
3946 if (rtx_equal_p (XEXP (temp, 0), r))
3947 return 1;
3948 temp = XEXP (temp, 1);
3949 }
3950 }
3951 else
3952 {
3953 while (GET_CODE (r) == SUBREG
3954 || GET_CODE (r) == STRICT_LOW_PART
3955 || GET_CODE (r) == ZERO_EXTRACT)
3956 r = XEXP (r, 0);
3957
3958 if (GET_CODE (r) == REG)
3959 {
3960 int regno = REGNO (r);
3961
3962 /* Obvious. */
3963 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3964 return 0;
3965
3966 /* If this is a hard register, verify that subsequent
3967 words are not needed. */
3968 if (regno < FIRST_PSEUDO_REGISTER)
3969 {
3970 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3971
3972 while (--n > 0)
3973 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3974 return 0;
3975 }
3976
3977 /* Don't delete insns to set global regs. */
3978 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3979 return 0;
3980
3981 /* Make sure insns to set the stack pointer aren't deleted. */
3982 if (regno == STACK_POINTER_REGNUM)
3983 return 0;
3984
3985 /* Make sure insns to set the frame pointer aren't deleted. */
3986 if (regno == FRAME_POINTER_REGNUM
3987 && (! reload_completed || frame_pointer_needed))
3988 return 0;
3989 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3990 if (regno == HARD_FRAME_POINTER_REGNUM
3991 && (! reload_completed || frame_pointer_needed))
3992 return 0;
3993 #endif
3994
3995 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3996 /* Make sure insns to set arg pointer are never deleted
3997 (if the arg pointer isn't fixed, there will be a USE
3998 for it, so we can treat it normally). */
3999 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4000 return 0;
4001 #endif
4002
4003 #ifdef PIC_OFFSET_TABLE_REGNUM
4004 /* Before reload, do not allow sets of the pic register
4005 to be deleted. Reload can insert references to
4006 constant pool memory anywhere in the function, making
4007 the PIC register live where it wasn't before. */
4008 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
4009 && ! reload_completed)
4010 return 0;
4011 #endif
4012
4013 /* Otherwise, the set is dead. */
4014 return 1;
4015 }
4016 }
4017 }
4018
4019 /* If performing several activities, insn is dead if each activity
4020 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4021 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4022 worth keeping. */
4023 else if (code == PARALLEL)
4024 {
4025 int i = XVECLEN (x, 0);
4026
4027 for (i--; i >= 0; i--)
4028 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4029 && GET_CODE (XVECEXP (x, 0, i)) != USE
4030 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4031 return 0;
4032
4033 return 1;
4034 }
4035
4036 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4037 is not necessarily true for hard registers. */
4038 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4039 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4040 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4041 return 1;
4042
4043 /* We do not check other CLOBBER or USE here. An insn consisting of just
4044 a CLOBBER or just a USE should not be deleted. */
4045 return 0;
4046 }
4047
4048 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4049 return 1 if the entire library call is dead.
4050 This is true if INSN copies a register (hard or pseudo)
4051 and if the hard return reg of the call insn is dead.
4052 (The caller should have tested the destination of the SET inside
4053 INSN already for death.)
4054
4055 If this insn doesn't just copy a register, then we don't
4056 have an ordinary libcall. In that case, cse could not have
4057 managed to substitute the source for the dest later on,
4058 so we can assume the libcall is dead.
4059
4060 PBI is the block info giving pseudoregs live before this insn.
4061 NOTE is the REG_RETVAL note of the insn. */
4062
4063 static int
4064 libcall_dead_p (pbi, note, insn)
4065 struct propagate_block_info *pbi;
4066 rtx note;
4067 rtx insn;
4068 {
4069 rtx x = single_set (insn);
4070
4071 if (x)
4072 {
4073 register rtx r = SET_SRC (x);
4074 if (GET_CODE (r) == REG)
4075 {
4076 rtx call = XEXP (note, 0);
4077 rtx call_pat;
4078 register int i;
4079
4080 /* Find the call insn. */
4081 while (call != insn && GET_CODE (call) != CALL_INSN)
4082 call = NEXT_INSN (call);
4083
4084 /* If there is none, do nothing special,
4085 since ordinary death handling can understand these insns. */
4086 if (call == insn)
4087 return 0;
4088
4089 /* See if the hard reg holding the value is dead.
4090 If this is a PARALLEL, find the call within it. */
4091 call_pat = PATTERN (call);
4092 if (GET_CODE (call_pat) == PARALLEL)
4093 {
4094 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4095 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4096 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4097 break;
4098
4099 /* This may be a library call that is returning a value
4100 via invisible pointer. Do nothing special, since
4101 ordinary death handling can understand these insns. */
4102 if (i < 0)
4103 return 0;
4104
4105 call_pat = XVECEXP (call_pat, 0, i);
4106 }
4107
4108 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4109 }
4110 }
4111 return 1;
4112 }
4113
4114 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4115 live at function entry. Don't count global register variables, variables
4116 in registers that can be used for function arg passing, or variables in
4117 fixed hard registers. */
4118
4119 int
4120 regno_uninitialized (regno)
4121 int regno;
4122 {
4123 if (n_basic_blocks == 0
4124 || (regno < FIRST_PSEUDO_REGISTER
4125 && (global_regs[regno]
4126 || fixed_regs[regno]
4127 || FUNCTION_ARG_REGNO_P (regno))))
4128 return 0;
4129
4130 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4131 }
4132
4133 /* 1 if register REGNO was alive at a place where `setjmp' was called
4134 and was set more than once or is an argument.
4135 Such regs may be clobbered by `longjmp'. */
4136
4137 int
4138 regno_clobbered_at_setjmp (regno)
4139 int regno;
4140 {
4141 if (n_basic_blocks == 0)
4142 return 0;
4143
4144 return ((REG_N_SETS (regno) > 1
4145 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4146 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4147 }
4148 \f
4149 /* INSN references memory, possibly using autoincrement addressing modes.
4150 Find any entries on the mem_set_list that need to be invalidated due
4151 to an address change. */
4152
4153 static void
4154 invalidate_mems_from_autoinc (pbi, insn)
4155 struct propagate_block_info *pbi;
4156 rtx insn;
4157 {
4158 rtx note = REG_NOTES (insn);
4159 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4160 {
4161 if (REG_NOTE_KIND (note) == REG_INC)
4162 {
4163 rtx temp = pbi->mem_set_list;
4164 rtx prev = NULL_RTX;
4165 rtx next;
4166
4167 while (temp)
4168 {
4169 next = XEXP (temp, 1);
4170 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4171 {
4172 /* Splice temp out of list. */
4173 if (prev)
4174 XEXP (prev, 1) = next;
4175 else
4176 pbi->mem_set_list = next;
4177 free_EXPR_LIST_node (temp);
4178 }
4179 else
4180 prev = temp;
4181 temp = next;
4182 }
4183 }
4184 }
4185 }
4186
4187 /* Process the registers that are set within X. Their bits are set to
4188 1 in the regset DEAD, because they are dead prior to this insn.
4189
4190 If INSN is nonzero, it is the insn being processed.
4191
4192 FLAGS is the set of operations to perform. */
4193
4194 static void
4195 mark_set_regs (pbi, x, insn)
4196 struct propagate_block_info *pbi;
4197 rtx x, insn;
4198 {
4199 rtx cond = NULL_RTX;
4200 rtx link;
4201 enum rtx_code code;
4202
4203 if (insn)
4204 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4205 {
4206 if (REG_NOTE_KIND (link) == REG_INC)
4207 mark_set_1 (pbi, SET, XEXP (link, 0),
4208 (GET_CODE (x) == COND_EXEC
4209 ? COND_EXEC_TEST (x) : NULL_RTX),
4210 insn, pbi->flags);
4211 }
4212 retry:
4213 switch (code = GET_CODE (x))
4214 {
4215 case SET:
4216 case CLOBBER:
4217 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4218 return;
4219
4220 case COND_EXEC:
4221 cond = COND_EXEC_TEST (x);
4222 x = COND_EXEC_CODE (x);
4223 goto retry;
4224
4225 case PARALLEL:
4226 {
4227 register int i;
4228 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4229 {
4230 rtx sub = XVECEXP (x, 0, i);
4231 switch (code = GET_CODE (sub))
4232 {
4233 case COND_EXEC:
4234 if (cond != NULL_RTX)
4235 abort ();
4236
4237 cond = COND_EXEC_TEST (sub);
4238 sub = COND_EXEC_CODE (sub);
4239 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4240 break;
4241 /* Fall through. */
4242
4243 case SET:
4244 case CLOBBER:
4245 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4246 break;
4247
4248 default:
4249 break;
4250 }
4251 }
4252 break;
4253 }
4254
4255 default:
4256 break;
4257 }
4258 }
4259
4260 /* Process a single SET rtx, X. */
4261
4262 static void
4263 mark_set_1 (pbi, code, reg, cond, insn, flags)
4264 struct propagate_block_info *pbi;
4265 enum rtx_code code;
4266 rtx reg, cond, insn;
4267 int flags;
4268 {
4269 int regno_first = -1, regno_last = -1;
4270 int not_dead = 0;
4271 int i;
4272
4273 /* Some targets place small structures in registers for
4274 return values of functions. We have to detect this
4275 case specially here to get correct flow information. */
4276 if (GET_CODE (reg) == PARALLEL
4277 && GET_MODE (reg) == BLKmode)
4278 {
4279 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4280 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4281 return;
4282 }
4283
4284 /* Modifying just one hardware register of a multi-reg value or just a
4285 byte field of a register does not mean the value from before this insn
4286 is now dead. Of course, if it was dead after it's unused now. */
4287
4288 switch (GET_CODE (reg))
4289 {
4290 case ZERO_EXTRACT:
4291 case SIGN_EXTRACT:
4292 case STRICT_LOW_PART:
4293 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4294 do
4295 reg = XEXP (reg, 0);
4296 while (GET_CODE (reg) == SUBREG
4297 || GET_CODE (reg) == ZERO_EXTRACT
4298 || GET_CODE (reg) == SIGN_EXTRACT
4299 || GET_CODE (reg) == STRICT_LOW_PART);
4300 if (GET_CODE (reg) == MEM)
4301 break;
4302 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4303 /* Fall through. */
4304
4305 case REG:
4306 regno_last = regno_first = REGNO (reg);
4307 if (regno_first < FIRST_PSEUDO_REGISTER)
4308 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4309 break;
4310
4311 case SUBREG:
4312 if (GET_CODE (SUBREG_REG (reg)) == REG)
4313 {
4314 enum machine_mode outer_mode = GET_MODE (reg);
4315 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4316
4317 /* Identify the range of registers affected. This is moderately
4318 tricky for hard registers. See alter_subreg. */
4319
4320 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4321 if (regno_first < FIRST_PSEUDO_REGISTER)
4322 {
4323 #ifdef ALTER_HARD_SUBREG
4324 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4325 inner_mode, regno_first);
4326 #else
4327 regno_first += SUBREG_WORD (reg);
4328 #endif
4329 regno_last = (regno_first
4330 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4331
4332 /* Since we've just adjusted the register number ranges, make
4333 sure REG matches. Otherwise some_was_live will be clear
4334 when it shouldn't have been, and we'll create incorrect
4335 REG_UNUSED notes. */
4336 reg = gen_rtx_REG (outer_mode, regno_first);
4337 }
4338 else
4339 {
4340 /* If the number of words in the subreg is less than the number
4341 of words in the full register, we have a well-defined partial
4342 set. Otherwise the high bits are undefined.
4343
4344 This is only really applicable to pseudos, since we just took
4345 care of multi-word hard registers. */
4346 if (((GET_MODE_SIZE (outer_mode)
4347 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4348 < ((GET_MODE_SIZE (inner_mode)
4349 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4350 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4351
4352 reg = SUBREG_REG (reg);
4353 }
4354 }
4355 else
4356 reg = SUBREG_REG (reg);
4357 break;
4358
4359 default:
4360 break;
4361 }
4362
4363 /* If this set is a MEM, then it kills any aliased writes.
4364 If this set is a REG, then it kills any MEMs which use the reg. */
4365 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4366 {
4367 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4368 {
4369 rtx temp = pbi->mem_set_list;
4370 rtx prev = NULL_RTX;
4371 rtx next;
4372
4373 while (temp)
4374 {
4375 next = XEXP (temp, 1);
4376 if ((GET_CODE (reg) == MEM
4377 && output_dependence (XEXP (temp, 0), reg))
4378 || (GET_CODE (reg) == REG
4379 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4380 {
4381 /* Splice this entry out of the list. */
4382 if (prev)
4383 XEXP (prev, 1) = next;
4384 else
4385 pbi->mem_set_list = next;
4386 free_EXPR_LIST_node (temp);
4387 }
4388 else
4389 prev = temp;
4390 temp = next;
4391 }
4392 }
4393
4394 /* If the memory reference had embedded side effects (autoincrement
4395 address modes. Then we may need to kill some entries on the
4396 memory set list. */
4397 if (insn && GET_CODE (reg) == MEM)
4398 invalidate_mems_from_autoinc (pbi, insn);
4399
4400 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4401 /* ??? With more effort we could track conditional memory life. */
4402 && ! cond
4403 /* We do not know the size of a BLKmode store, so we do not track
4404 them for redundant store elimination. */
4405 && GET_MODE (reg) != BLKmode
4406 /* There are no REG_INC notes for SP, so we can't assume we'll see
4407 everything that invalidates it. To be safe, don't eliminate any
4408 stores though SP; none of them should be redundant anyway. */
4409 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4410 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4411 }
4412
4413 if (GET_CODE (reg) == REG
4414 && ! (regno_first == FRAME_POINTER_REGNUM
4415 && (! reload_completed || frame_pointer_needed))
4416 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4417 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4418 && (! reload_completed || frame_pointer_needed))
4419 #endif
4420 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4421 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4422 #endif
4423 )
4424 {
4425 int some_was_live = 0, some_was_dead = 0;
4426
4427 for (i = regno_first; i <= regno_last; ++i)
4428 {
4429 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4430 if (pbi->local_set)
4431 SET_REGNO_REG_SET (pbi->local_set, i);
4432 if (code != CLOBBER)
4433 SET_REGNO_REG_SET (pbi->new_set, i);
4434
4435 some_was_live |= needed_regno;
4436 some_was_dead |= ! needed_regno;
4437 }
4438
4439 #ifdef HAVE_conditional_execution
4440 /* Consider conditional death in deciding that the register needs
4441 a death note. */
4442 if (some_was_live && ! not_dead
4443 /* The stack pointer is never dead. Well, not strictly true,
4444 but it's very difficult to tell from here. Hopefully
4445 combine_stack_adjustments will fix up the most egregious
4446 errors. */
4447 && regno_first != STACK_POINTER_REGNUM)
4448 {
4449 for (i = regno_first; i <= regno_last; ++i)
4450 if (! mark_regno_cond_dead (pbi, i, cond))
4451 not_dead = 1;
4452 }
4453 #endif
4454
4455 /* Additional data to record if this is the final pass. */
4456 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4457 | PROP_DEATH_NOTES | PROP_AUTOINC))
4458 {
4459 register rtx y;
4460 register int blocknum = pbi->bb->index;
4461
4462 y = NULL_RTX;
4463 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4464 {
4465 y = pbi->reg_next_use[regno_first];
4466
4467 /* The next use is no longer next, since a store intervenes. */
4468 for (i = regno_first; i <= regno_last; ++i)
4469 pbi->reg_next_use[i] = 0;
4470 }
4471
4472 if (flags & PROP_REG_INFO)
4473 {
4474 for (i = regno_first; i <= regno_last; ++i)
4475 {
4476 /* Count (weighted) references, stores, etc. This counts a
4477 register twice if it is modified, but that is correct. */
4478 REG_N_SETS (i) += 1;
4479 REG_N_REFS (i) += (optimize_size ? 1
4480 : pbi->bb->loop_depth + 1);
4481
4482 /* The insns where a reg is live are normally counted
4483 elsewhere, but we want the count to include the insn
4484 where the reg is set, and the normal counting mechanism
4485 would not count it. */
4486 REG_LIVE_LENGTH (i) += 1;
4487 }
4488
4489 /* If this is a hard reg, record this function uses the reg. */
4490 if (regno_first < FIRST_PSEUDO_REGISTER)
4491 {
4492 for (i = regno_first; i <= regno_last; i++)
4493 regs_ever_live[i] = 1;
4494 }
4495 else
4496 {
4497 /* Keep track of which basic blocks each reg appears in. */
4498 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4499 REG_BASIC_BLOCK (regno_first) = blocknum;
4500 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4501 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4502 }
4503 }
4504
4505 if (! some_was_dead)
4506 {
4507 if (flags & PROP_LOG_LINKS)
4508 {
4509 /* Make a logical link from the next following insn
4510 that uses this register, back to this insn.
4511 The following insns have already been processed.
4512
4513 We don't build a LOG_LINK for hard registers containing
4514 in ASM_OPERANDs. If these registers get replaced,
4515 we might wind up changing the semantics of the insn,
4516 even if reload can make what appear to be valid
4517 assignments later. */
4518 if (y && (BLOCK_NUM (y) == blocknum)
4519 && (regno_first >= FIRST_PSEUDO_REGISTER
4520 || asm_noperands (PATTERN (y)) < 0))
4521 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4522 }
4523 }
4524 else if (not_dead)
4525 ;
4526 else if (! some_was_live)
4527 {
4528 if (flags & PROP_REG_INFO)
4529 REG_N_DEATHS (regno_first) += 1;
4530
4531 if (flags & PROP_DEATH_NOTES)
4532 {
4533 /* Note that dead stores have already been deleted
4534 when possible. If we get here, we have found a
4535 dead store that cannot be eliminated (because the
4536 same insn does something useful). Indicate this
4537 by marking the reg being set as dying here. */
4538 REG_NOTES (insn)
4539 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4540 }
4541 }
4542 else
4543 {
4544 if (flags & PROP_DEATH_NOTES)
4545 {
4546 /* This is a case where we have a multi-word hard register
4547 and some, but not all, of the words of the register are
4548 needed in subsequent insns. Write REG_UNUSED notes
4549 for those parts that were not needed. This case should
4550 be rare. */
4551
4552 for (i = regno_first; i <= regno_last; ++i)
4553 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4554 REG_NOTES (insn)
4555 = alloc_EXPR_LIST (REG_UNUSED,
4556 gen_rtx_REG (reg_raw_mode[i], i),
4557 REG_NOTES (insn));
4558 }
4559 }
4560 }
4561
4562 /* Mark the register as being dead. */
4563 if (some_was_live
4564 && ! not_dead
4565 /* The stack pointer is never dead. Well, not strictly true,
4566 but it's very difficult to tell from here. Hopefully
4567 combine_stack_adjustments will fix up the most egregious
4568 errors. */
4569 && regno_first != STACK_POINTER_REGNUM)
4570 {
4571 for (i = regno_first; i <= regno_last; ++i)
4572 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4573 }
4574 }
4575 else if (GET_CODE (reg) == REG)
4576 {
4577 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4578 pbi->reg_next_use[regno_first] = 0;
4579 }
4580
4581 /* If this is the last pass and this is a SCRATCH, show it will be dying
4582 here and count it. */
4583 else if (GET_CODE (reg) == SCRATCH)
4584 {
4585 if (flags & PROP_DEATH_NOTES)
4586 REG_NOTES (insn)
4587 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4588 }
4589 }
4590 \f
4591 #ifdef HAVE_conditional_execution
4592 /* Mark REGNO conditionally dead.
4593 Return true if the register is now unconditionally dead. */
4594
4595 static int
4596 mark_regno_cond_dead (pbi, regno, cond)
4597 struct propagate_block_info *pbi;
4598 int regno;
4599 rtx cond;
4600 {
4601 /* If this is a store to a predicate register, the value of the
4602 predicate is changing, we don't know that the predicate as seen
4603 before is the same as that seen after. Flush all dependent
4604 conditions from reg_cond_dead. This will make all such
4605 conditionally live registers unconditionally live. */
4606 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4607 flush_reg_cond_reg (pbi, regno);
4608
4609 /* If this is an unconditional store, remove any conditional
4610 life that may have existed. */
4611 if (cond == NULL_RTX)
4612 splay_tree_remove (pbi->reg_cond_dead, regno);
4613 else
4614 {
4615 splay_tree_node node;
4616 struct reg_cond_life_info *rcli;
4617 rtx ncond;
4618
4619 /* Otherwise this is a conditional set. Record that fact.
4620 It may have been conditionally used, or there may be a
4621 subsequent set with a complimentary condition. */
4622
4623 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4624 if (node == NULL)
4625 {
4626 /* The register was unconditionally live previously.
4627 Record the current condition as the condition under
4628 which it is dead. */
4629 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4630 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4631 splay_tree_insert (pbi->reg_cond_dead, regno,
4632 (splay_tree_value) rcli);
4633
4634 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4635
4636 /* Not unconditionaly dead. */
4637 return 0;
4638 }
4639 else
4640 {
4641 /* The register was conditionally live previously.
4642 Add the new condition to the old. */
4643 rcli = (struct reg_cond_life_info *) node->value;
4644 ncond = rcli->condition;
4645 ncond = ior_reg_cond (ncond, cond);
4646
4647 /* If the register is now unconditionally dead,
4648 remove the entry in the splay_tree. */
4649 if (ncond == const1_rtx)
4650 splay_tree_remove (pbi->reg_cond_dead, regno);
4651 else
4652 {
4653 rcli->condition = ncond;
4654
4655 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
4656
4657 /* Not unconditionaly dead. */
4658 return 0;
4659 }
4660 }
4661 }
4662
4663 return 1;
4664 }
4665
4666 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4667
4668 static void
4669 free_reg_cond_life_info (value)
4670 splay_tree_value value;
4671 {
4672 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4673 free_EXPR_LIST_list (&rcli->condition);
4674 free (rcli);
4675 }
4676
4677 /* Helper function for flush_reg_cond_reg. */
4678
4679 static int
4680 flush_reg_cond_reg_1 (node, data)
4681 splay_tree_node node;
4682 void *data;
4683 {
4684 struct reg_cond_life_info *rcli;
4685 int *xdata = (int *) data;
4686 unsigned int regno = xdata[0];
4687 rtx c, *prev;
4688
4689 /* Don't need to search if last flushed value was farther on in
4690 the in-order traversal. */
4691 if (xdata[1] >= (int) node->key)
4692 return 0;
4693
4694 /* Splice out portions of the expression that refer to regno. */
4695 rcli = (struct reg_cond_life_info *) node->value;
4696 c = *(prev = &rcli->condition);
4697 while (c)
4698 {
4699 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4700 {
4701 rtx next = XEXP (c, 1);
4702 free_EXPR_LIST_node (c);
4703 c = *prev = next;
4704 }
4705 else
4706 c = *(prev = &XEXP (c, 1));
4707 }
4708
4709 /* If the entire condition is now NULL, signal the node to be removed. */
4710 if (! rcli->condition)
4711 {
4712 xdata[1] = node->key;
4713 return -1;
4714 }
4715 else
4716 return 0;
4717 }
4718
4719 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4720
4721 static void
4722 flush_reg_cond_reg (pbi, regno)
4723 struct propagate_block_info *pbi;
4724 int regno;
4725 {
4726 int pair[2];
4727
4728 pair[0] = regno;
4729 pair[1] = -1;
4730 while (splay_tree_foreach (pbi->reg_cond_dead,
4731 flush_reg_cond_reg_1, pair) == -1)
4732 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4733
4734 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4735 }
4736
4737 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4738 We actually use EXPR_LIST to chain the sub-expressions together
4739 instead of IOR because it's easier to manipulate and we have
4740 the lists.c functions to reuse nodes.
4741
4742 Return a new rtl expression as appropriate. */
4743
4744 static rtx
4745 ior_reg_cond (old, x)
4746 rtx old, x;
4747 {
4748 enum rtx_code x_code;
4749 rtx x_reg;
4750 rtx c;
4751
4752 /* We expect these conditions to be of the form (eq reg 0). */
4753 x_code = GET_CODE (x);
4754 if (GET_RTX_CLASS (x_code) != '<'
4755 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4756 || XEXP (x, 1) != const0_rtx)
4757 abort ();
4758
4759 /* Search the expression for an existing sub-expression of X_REG. */
4760 for (c = old; c; c = XEXP (c, 1))
4761 {
4762 rtx y = XEXP (c, 0);
4763 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4764 {
4765 /* If we find X already present in OLD, we need do nothing. */
4766 if (GET_CODE (y) == x_code)
4767 return old;
4768
4769 /* If we find X being a compliment of a condition in OLD,
4770 then the entire condition is true. */
4771 if (GET_CODE (y) == reverse_condition (x_code))
4772 return const1_rtx;
4773 }
4774 }
4775
4776 /* Otherwise just add to the chain. */
4777 return alloc_EXPR_LIST (0, x, old);
4778 }
4779
4780 static rtx
4781 not_reg_cond (x)
4782 rtx x;
4783 {
4784 enum rtx_code x_code;
4785 rtx x_reg;
4786
4787 /* We expect these conditions to be of the form (eq reg 0). */
4788 x_code = GET_CODE (x);
4789 if (GET_RTX_CLASS (x_code) != '<'
4790 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4791 || XEXP (x, 1) != const0_rtx)
4792 abort ();
4793
4794 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4795 VOIDmode, x_reg, const0_rtx),
4796 NULL_RTX);
4797 }
4798
4799 static rtx
4800 nand_reg_cond (old, x)
4801 rtx old, x;
4802 {
4803 enum rtx_code x_code;
4804 rtx x_reg;
4805 rtx c, *prev;
4806
4807 /* We expect these conditions to be of the form (eq reg 0). */
4808 x_code = GET_CODE (x);
4809 if (GET_RTX_CLASS (x_code) != '<'
4810 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4811 || XEXP (x, 1) != const0_rtx)
4812 abort ();
4813
4814 /* Search the expression for an existing sub-expression of X_REG. */
4815
4816 for (c = *(prev = &old); c; c = *(prev = &XEXP (c, 1)))
4817 {
4818 rtx y = XEXP (c, 0);
4819 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4820 {
4821 /* If we find X already present in OLD, then we need to
4822 splice it out. */
4823 if (GET_CODE (y) == x_code)
4824 {
4825 *prev = XEXP (c, 1);
4826 free_EXPR_LIST_node (c);
4827 return old ? old : const0_rtx;
4828 }
4829
4830 /* If we find X being a compliment of a condition in OLD,
4831 then we need do nothing. */
4832 if (GET_CODE (y) == reverse_condition (x_code))
4833 return old;
4834 }
4835 }
4836
4837 /* Otherwise, by implication, the register in question is now live for
4838 the inverse of the condition X. */
4839 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4840 VOIDmode, x_reg, const0_rtx),
4841 old);
4842 }
4843 #endif /* HAVE_conditional_execution */
4844 \f
4845 #ifdef AUTO_INC_DEC
4846
4847 /* Try to substitute the auto-inc expression INC as the address inside
4848 MEM which occurs in INSN. Currently, the address of MEM is an expression
4849 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
4850 that has a single set whose source is a PLUS of INCR_REG and something
4851 else. */
4852
4853 static void
4854 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
4855 struct propagate_block_info *pbi;
4856 rtx inc, insn, mem, incr, incr_reg;
4857 {
4858 int regno = REGNO (incr_reg);
4859 rtx set = single_set (incr);
4860 rtx q = SET_DEST (set);
4861 rtx y = SET_SRC (set);
4862 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
4863
4864 /* Make sure this reg appears only once in this insn. */
4865 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
4866 return;
4867
4868 if (dead_or_set_p (incr, incr_reg)
4869 /* Mustn't autoinc an eliminable register. */
4870 && (regno >= FIRST_PSEUDO_REGISTER
4871 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4872 {
4873 /* This is the simple case. Try to make the auto-inc. If
4874 we can't, we are done. Otherwise, we will do any
4875 needed updates below. */
4876 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
4877 return;
4878 }
4879 else if (GET_CODE (q) == REG
4880 /* PREV_INSN used here to check the semi-open interval
4881 [insn,incr). */
4882 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4883 /* We must also check for sets of q as q may be
4884 a call clobbered hard register and there may
4885 be a call between PREV_INSN (insn) and incr. */
4886 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4887 {
4888 /* We have *p followed sometime later by q = p+size.
4889 Both p and q must be live afterward,
4890 and q is not used between INSN and its assignment.
4891 Change it to q = p, ...*q..., q = q+size.
4892 Then fall into the usual case. */
4893 rtx insns, temp;
4894
4895 start_sequence ();
4896 emit_move_insn (q, incr_reg);
4897 insns = get_insns ();
4898 end_sequence ();
4899
4900 if (basic_block_for_insn)
4901 for (temp = insns; temp; temp = NEXT_INSN (temp))
4902 set_block_for_insn (temp, pbi->bb);
4903
4904 /* If we can't make the auto-inc, or can't make the
4905 replacement into Y, exit. There's no point in making
4906 the change below if we can't do the auto-inc and doing
4907 so is not correct in the pre-inc case. */
4908
4909 XEXP (inc, 0) = q;
4910 validate_change (insn, &XEXP (mem, 0), inc, 1);
4911 validate_change (incr, &XEXP (y, opnum), q, 1);
4912 if (! apply_change_group ())
4913 return;
4914
4915 /* We now know we'll be doing this change, so emit the
4916 new insn(s) and do the updates. */
4917 emit_insns_before (insns, insn);
4918
4919 if (pbi->bb->head == insn)
4920 pbi->bb->head = insns;
4921
4922 /* INCR will become a NOTE and INSN won't contain a
4923 use of INCR_REG. If a use of INCR_REG was just placed in
4924 the insn before INSN, make that the next use.
4925 Otherwise, invalidate it. */
4926 if (GET_CODE (PREV_INSN (insn)) == INSN
4927 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4928 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
4929 pbi->reg_next_use[regno] = PREV_INSN (insn);
4930 else
4931 pbi->reg_next_use[regno] = 0;
4932
4933 incr_reg = q;
4934 regno = REGNO (q);
4935
4936 /* REGNO is now used in INCR which is below INSN, but
4937 it previously wasn't live here. If we don't mark
4938 it as live, we'll put a REG_DEAD note for it
4939 on this insn, which is incorrect. */
4940 SET_REGNO_REG_SET (pbi->reg_live, regno);
4941
4942 /* If there are any calls between INSN and INCR, show
4943 that REGNO now crosses them. */
4944 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4945 if (GET_CODE (temp) == CALL_INSN)
4946 REG_N_CALLS_CROSSED (regno)++;
4947 }
4948 else
4949 return;
4950
4951 /* If we haven't returned, it means we were able to make the
4952 auto-inc, so update the status. First, record that this insn
4953 has an implicit side effect. */
4954
4955 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
4956
4957 /* Modify the old increment-insn to simply copy
4958 the already-incremented value of our register. */
4959 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
4960 abort ();
4961
4962 /* If that makes it a no-op (copying the register into itself) delete
4963 it so it won't appear to be a "use" and a "set" of this
4964 register. */
4965 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
4966 {
4967 /* If the original source was dead, it's dead now. */
4968 rtx note;
4969
4970 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
4971 {
4972 remove_note (incr, note);
4973 if (XEXP (note, 0) != incr_reg)
4974 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4975 }
4976
4977 PUT_CODE (incr, NOTE);
4978 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4979 NOTE_SOURCE_FILE (incr) = 0;
4980 }
4981
4982 if (regno >= FIRST_PSEUDO_REGISTER)
4983 {
4984 /* Count an extra reference to the reg. When a reg is
4985 incremented, spilling it is worse, so we want to make
4986 that less likely. */
4987 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
4988
4989 /* Count the increment as a setting of the register,
4990 even though it isn't a SET in rtl. */
4991 REG_N_SETS (regno)++;
4992 }
4993 }
4994
4995 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4996 reference. */
4997
4998 static void
4999 find_auto_inc (pbi, x, insn)
5000 struct propagate_block_info *pbi;
5001 rtx x;
5002 rtx insn;
5003 {
5004 rtx addr = XEXP (x, 0);
5005 HOST_WIDE_INT offset = 0;
5006 rtx set, y, incr, inc_val;
5007 int regno;
5008 int size = GET_MODE_SIZE (GET_MODE (x));
5009
5010 if (GET_CODE (insn) == JUMP_INSN)
5011 return;
5012
5013 /* Here we detect use of an index register which might be good for
5014 postincrement, postdecrement, preincrement, or predecrement. */
5015
5016 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5017 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5018
5019 if (GET_CODE (addr) != REG)
5020 return;
5021
5022 regno = REGNO (addr);
5023
5024 /* Is the next use an increment that might make auto-increment? */
5025 incr = pbi->reg_next_use[regno];
5026 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5027 return;
5028 set = single_set (incr);
5029 if (set == 0 || GET_CODE (set) != SET)
5030 return;
5031 y = SET_SRC (set);
5032
5033 if (GET_CODE (y) != PLUS)
5034 return;
5035
5036 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5037 inc_val = XEXP (y, 1);
5038 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5039 inc_val = XEXP (y, 0);
5040 else
5041 return;
5042
5043 if (GET_CODE (inc_val) == CONST_INT)
5044 {
5045 if (HAVE_POST_INCREMENT
5046 && (INTVAL (inc_val) == size && offset == 0))
5047 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5048 incr, addr);
5049 else if (HAVE_POST_DECREMENT
5050 && (INTVAL (inc_val) == -size && offset == 0))
5051 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5052 incr, addr);
5053 else if (HAVE_PRE_INCREMENT
5054 && (INTVAL (inc_val) == size && offset == size))
5055 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5056 incr, addr);
5057 else if (HAVE_PRE_DECREMENT
5058 && (INTVAL (inc_val) == -size && offset == -size))
5059 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5060 incr, addr);
5061 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5062 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5063 gen_rtx_PLUS (Pmode,
5064 addr,
5065 inc_val)),
5066 insn, x, incr, addr);
5067 }
5068 else if (GET_CODE (inc_val) == REG
5069 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5070 NEXT_INSN (incr)))
5071
5072 {
5073 if (HAVE_POST_MODIFY_REG && offset == 0)
5074 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5075 gen_rtx_PLUS (Pmode,
5076 addr,
5077 inc_val)),
5078 insn, x, incr, addr);
5079 }
5080 }
5081
5082 #endif /* AUTO_INC_DEC */
5083 \f
5084 static void
5085 mark_used_reg (pbi, reg, cond, insn)
5086 struct propagate_block_info *pbi;
5087 rtx reg;
5088 rtx cond ATTRIBUTE_UNUSED;
5089 rtx insn;
5090 {
5091 int regno = REGNO (reg);
5092 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5093 int some_was_dead = ! some_was_live;
5094 int some_not_set;
5095 int n;
5096
5097 /* A hard reg in a wide mode may really be multiple registers.
5098 If so, mark all of them just like the first. */
5099 if (regno < FIRST_PSEUDO_REGISTER)
5100 {
5101 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5102 while (--n > 0)
5103 {
5104 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5105 some_was_live |= needed_regno;
5106 some_was_dead |= ! needed_regno;
5107 }
5108 }
5109
5110 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5111 {
5112 /* Record where each reg is used, so when the reg is set we know
5113 the next insn that uses it. */
5114 pbi->reg_next_use[regno] = insn;
5115 }
5116
5117 if (pbi->flags & PROP_REG_INFO)
5118 {
5119 if (regno < FIRST_PSEUDO_REGISTER)
5120 {
5121 /* If this is a register we are going to try to eliminate,
5122 don't mark it live here. If we are successful in
5123 eliminating it, it need not be live unless it is used for
5124 pseudos, in which case it will have been set live when it
5125 was allocated to the pseudos. If the register will not
5126 be eliminated, reload will set it live at that point.
5127
5128 Otherwise, record that this function uses this register. */
5129 /* ??? The PPC backend tries to "eliminate" on the pic
5130 register to itself. This should be fixed. In the mean
5131 time, hack around it. */
5132
5133 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5134 && (regno == FRAME_POINTER_REGNUM
5135 || regno == ARG_POINTER_REGNUM)))
5136 {
5137 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5138 do
5139 regs_ever_live[regno + --n] = 1;
5140 while (n > 0);
5141 }
5142 }
5143 else
5144 {
5145 /* Keep track of which basic block each reg appears in. */
5146
5147 register int blocknum = pbi->bb->index;
5148 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5149 REG_BASIC_BLOCK (regno) = blocknum;
5150 else if (REG_BASIC_BLOCK (regno) != blocknum)
5151 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5152
5153 /* Count (weighted) number of uses of each reg. */
5154 REG_N_REFS (regno) += (optimize_size ? 1
5155 : pbi->bb->loop_depth + 1);
5156 }
5157 }
5158
5159 /* Find out if any of the register was set this insn. */
5160 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5161 if (regno < FIRST_PSEUDO_REGISTER)
5162 {
5163 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5164 while (--n > 0)
5165 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5166 }
5167
5168 /* Record and count the insns in which a reg dies. If it is used in
5169 this insn and was dead below the insn then it dies in this insn.
5170 If it was set in this insn, we do not make a REG_DEAD note;
5171 likewise if we already made such a note. */
5172 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5173 && some_was_dead
5174 && some_not_set)
5175 {
5176 /* Check for the case where the register dying partially
5177 overlaps the register set by this insn. */
5178 if (regno < FIRST_PSEUDO_REGISTER
5179 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5180 {
5181 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5182 while (--n >= 0)
5183 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5184 }
5185
5186 /* If none of the words in X is needed, make a REG_DEAD note.
5187 Otherwise, we must make partial REG_DEAD notes. */
5188 if (! some_was_live)
5189 {
5190 if ((pbi->flags & PROP_DEATH_NOTES)
5191 && ! find_regno_note (insn, REG_DEAD, regno))
5192 REG_NOTES (insn)
5193 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5194
5195 if (pbi->flags & PROP_REG_INFO)
5196 REG_N_DEATHS (regno)++;
5197 }
5198 else
5199 {
5200 /* Don't make a REG_DEAD note for a part of a register
5201 that is set in the insn. */
5202
5203 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5204 for (; n >= regno; n--)
5205 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5206 && ! dead_or_set_regno_p (insn, n))
5207 REG_NOTES (insn)
5208 = alloc_EXPR_LIST (REG_DEAD,
5209 gen_rtx_REG (reg_raw_mode[n], n),
5210 REG_NOTES (insn));
5211 }
5212 }
5213
5214 SET_REGNO_REG_SET (pbi->reg_live, regno);
5215 if (regno < FIRST_PSEUDO_REGISTER)
5216 {
5217 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5218 while (--n > 0)
5219 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5220 }
5221
5222 #ifdef HAVE_conditional_execution
5223 /* If this is a conditional use, record that fact. If it is later
5224 conditionally set, we'll know to kill the register. */
5225 if (cond != NULL_RTX)
5226 {
5227 splay_tree_node node;
5228 struct reg_cond_life_info *rcli;
5229 rtx ncond;
5230
5231 if (some_was_live)
5232 {
5233 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5234 if (node == NULL)
5235 {
5236 /* The register was unconditionally live previously.
5237 No need to do anything. */
5238 }
5239 else
5240 {
5241 /* The register was conditionally live previously.
5242 Subtract the new life cond from the old death cond. */
5243 rcli = (struct reg_cond_life_info *) node->value;
5244 ncond = rcli->condition;
5245 ncond = nand_reg_cond (ncond, cond);
5246
5247 /* If the register is now unconditionally live, remove the
5248 entry in the splay_tree. */
5249 if (ncond == const0_rtx)
5250 {
5251 rcli->condition = NULL_RTX;
5252 splay_tree_remove (pbi->reg_cond_dead, regno);
5253 }
5254 else
5255 {
5256 rcli->condition = ncond;
5257 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5258 }
5259 }
5260 }
5261 else
5262 {
5263 /* The register was not previously live at all. Record
5264 the condition under which it is still dead. */
5265 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5266 rcli->condition = not_reg_cond (cond);
5267 splay_tree_insert (pbi->reg_cond_dead, regno,
5268 (splay_tree_value) rcli);
5269
5270 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5271 }
5272 }
5273 else if (some_was_live)
5274 {
5275 splay_tree_node node;
5276 struct reg_cond_life_info *rcli;
5277
5278 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5279 if (node != NULL)
5280 {
5281 /* The register was conditionally live previously, but is now
5282 unconditionally so. Remove it from the conditionally dead
5283 list, so that a conditional set won't cause us to think
5284 it dead. */
5285 rcli = (struct reg_cond_life_info *) node->value;
5286 rcli->condition = NULL_RTX;
5287 splay_tree_remove (pbi->reg_cond_dead, regno);
5288 }
5289 }
5290
5291 #endif
5292 }
5293
5294 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5295 This is done assuming the registers needed from X are those that
5296 have 1-bits in PBI->REG_LIVE.
5297
5298 INSN is the containing instruction. If INSN is dead, this function
5299 is not called. */
5300
5301 static void
5302 mark_used_regs (pbi, x, cond, insn)
5303 struct propagate_block_info *pbi;
5304 rtx x, cond, insn;
5305 {
5306 register RTX_CODE code;
5307 register int regno;
5308 int flags = pbi->flags;
5309
5310 retry:
5311 code = GET_CODE (x);
5312 switch (code)
5313 {
5314 case LABEL_REF:
5315 case SYMBOL_REF:
5316 case CONST_INT:
5317 case CONST:
5318 case CONST_DOUBLE:
5319 case PC:
5320 case ADDR_VEC:
5321 case ADDR_DIFF_VEC:
5322 return;
5323
5324 #ifdef HAVE_cc0
5325 case CC0:
5326 pbi->cc0_live = 1;
5327 return;
5328 #endif
5329
5330 case CLOBBER:
5331 /* If we are clobbering a MEM, mark any registers inside the address
5332 as being used. */
5333 if (GET_CODE (XEXP (x, 0)) == MEM)
5334 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5335 return;
5336
5337 case MEM:
5338 /* Don't bother watching stores to mems if this is not the
5339 final pass. We'll not be deleting dead stores this round. */
5340 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5341 {
5342 /* Invalidate the data for the last MEM stored, but only if MEM is
5343 something that can be stored into. */
5344 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5345 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5346 /* Needn't clear the memory set list. */
5347 ;
5348 else
5349 {
5350 rtx temp = pbi->mem_set_list;
5351 rtx prev = NULL_RTX;
5352 rtx next;
5353
5354 while (temp)
5355 {
5356 next = XEXP (temp, 1);
5357 if (anti_dependence (XEXP (temp, 0), x))
5358 {
5359 /* Splice temp out of the list. */
5360 if (prev)
5361 XEXP (prev, 1) = next;
5362 else
5363 pbi->mem_set_list = next;
5364 free_EXPR_LIST_node (temp);
5365 }
5366 else
5367 prev = temp;
5368 temp = next;
5369 }
5370 }
5371
5372 /* If the memory reference had embedded side effects (autoincrement
5373 address modes. Then we may need to kill some entries on the
5374 memory set list. */
5375 if (insn)
5376 invalidate_mems_from_autoinc (pbi, insn);
5377 }
5378
5379 #ifdef AUTO_INC_DEC
5380 if (flags & PROP_AUTOINC)
5381 find_auto_inc (pbi, x, insn);
5382 #endif
5383 break;
5384
5385 case SUBREG:
5386 #ifdef CLASS_CANNOT_CHANGE_MODE
5387 if (GET_CODE (SUBREG_REG (x)) == REG
5388 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5389 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5390 GET_MODE (SUBREG_REG (x))))
5391 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5392 #endif
5393
5394 /* While we're here, optimize this case. */
5395 x = SUBREG_REG (x);
5396 if (GET_CODE (x) != REG)
5397 goto retry;
5398 /* Fall through. */
5399
5400 case REG:
5401 /* See a register other than being set => mark it as needed. */
5402 mark_used_reg (pbi, x, cond, insn);
5403 return;
5404
5405 case SET:
5406 {
5407 register rtx testreg = SET_DEST (x);
5408 int mark_dest = 0;
5409
5410 /* If storing into MEM, don't show it as being used. But do
5411 show the address as being used. */
5412 if (GET_CODE (testreg) == MEM)
5413 {
5414 #ifdef AUTO_INC_DEC
5415 if (flags & PROP_AUTOINC)
5416 find_auto_inc (pbi, testreg, insn);
5417 #endif
5418 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5419 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5420 return;
5421 }
5422
5423 /* Storing in STRICT_LOW_PART is like storing in a reg
5424 in that this SET might be dead, so ignore it in TESTREG.
5425 but in some other ways it is like using the reg.
5426
5427 Storing in a SUBREG or a bit field is like storing the entire
5428 register in that if the register's value is not used
5429 then this SET is not needed. */
5430 while (GET_CODE (testreg) == STRICT_LOW_PART
5431 || GET_CODE (testreg) == ZERO_EXTRACT
5432 || GET_CODE (testreg) == SIGN_EXTRACT
5433 || GET_CODE (testreg) == SUBREG)
5434 {
5435 #ifdef CLASS_CANNOT_CHANGE_MODE
5436 if (GET_CODE (testreg) == SUBREG
5437 && GET_CODE (SUBREG_REG (testreg)) == REG
5438 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5439 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5440 GET_MODE (testreg)))
5441 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5442 #endif
5443
5444 /* Modifying a single register in an alternate mode
5445 does not use any of the old value. But these other
5446 ways of storing in a register do use the old value. */
5447 if (GET_CODE (testreg) == SUBREG
5448 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5449 ;
5450 else
5451 mark_dest = 1;
5452
5453 testreg = XEXP (testreg, 0);
5454 }
5455
5456 /* If this is a store into a register, recursively scan the
5457 value being stored. */
5458
5459 if ((GET_CODE (testreg) == PARALLEL
5460 && GET_MODE (testreg) == BLKmode)
5461 || (GET_CODE (testreg) == REG
5462 && (regno = REGNO (testreg),
5463 ! (regno == FRAME_POINTER_REGNUM
5464 && (! reload_completed || frame_pointer_needed)))
5465 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5466 && ! (regno == HARD_FRAME_POINTER_REGNUM
5467 && (! reload_completed || frame_pointer_needed))
5468 #endif
5469 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5470 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5471 #endif
5472 ))
5473 {
5474 if (mark_dest)
5475 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5476 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5477 return;
5478 }
5479 }
5480 break;
5481
5482 case ASM_OPERANDS:
5483 case UNSPEC_VOLATILE:
5484 case TRAP_IF:
5485 case ASM_INPUT:
5486 {
5487 /* Traditional and volatile asm instructions must be considered to use
5488 and clobber all hard registers, all pseudo-registers and all of
5489 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5490
5491 Consider for instance a volatile asm that changes the fpu rounding
5492 mode. An insn should not be moved across this even if it only uses
5493 pseudo-regs because it might give an incorrectly rounded result.
5494
5495 ?!? Unfortunately, marking all hard registers as live causes massive
5496 problems for the register allocator and marking all pseudos as live
5497 creates mountains of uninitialized variable warnings.
5498
5499 So for now, just clear the memory set list and mark any regs
5500 we can find in ASM_OPERANDS as used. */
5501 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5502 free_EXPR_LIST_list (&pbi->mem_set_list);
5503
5504 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5505 We can not just fall through here since then we would be confused
5506 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5507 traditional asms unlike their normal usage. */
5508 if (code == ASM_OPERANDS)
5509 {
5510 int j;
5511
5512 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5513 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5514 }
5515 break;
5516 }
5517
5518 case COND_EXEC:
5519 if (cond != NULL_RTX)
5520 abort ();
5521
5522 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5523
5524 cond = COND_EXEC_TEST (x);
5525 x = COND_EXEC_CODE (x);
5526 goto retry;
5527
5528 case PHI:
5529 /* We _do_not_ want to scan operands of phi nodes. Operands of
5530 a phi function are evaluated only when control reaches this
5531 block along a particular edge. Therefore, regs that appear
5532 as arguments to phi should not be added to the global live at
5533 start. */
5534 return;
5535
5536 default:
5537 break;
5538 }
5539
5540 /* Recursively scan the operands of this expression. */
5541
5542 {
5543 register const char *fmt = GET_RTX_FORMAT (code);
5544 register int i;
5545
5546 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5547 {
5548 if (fmt[i] == 'e')
5549 {
5550 /* Tail recursive case: save a function call level. */
5551 if (i == 0)
5552 {
5553 x = XEXP (x, 0);
5554 goto retry;
5555 }
5556 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5557 }
5558 else if (fmt[i] == 'E')
5559 {
5560 register int j;
5561 for (j = 0; j < XVECLEN (x, i); j++)
5562 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5563 }
5564 }
5565 }
5566 }
5567 \f
5568 #ifdef AUTO_INC_DEC
5569
5570 static int
5571 try_pre_increment_1 (pbi, insn)
5572 struct propagate_block_info *pbi;
5573 rtx insn;
5574 {
5575 /* Find the next use of this reg. If in same basic block,
5576 make it do pre-increment or pre-decrement if appropriate. */
5577 rtx x = single_set (insn);
5578 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5579 * INTVAL (XEXP (SET_SRC (x), 1)));
5580 int regno = REGNO (SET_DEST (x));
5581 rtx y = pbi->reg_next_use[regno];
5582 if (y != 0
5583 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5584 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5585 mode would be better. */
5586 && ! dead_or_set_p (y, SET_DEST (x))
5587 && try_pre_increment (y, SET_DEST (x), amount))
5588 {
5589 /* We have found a suitable auto-increment
5590 and already changed insn Y to do it.
5591 So flush this increment-instruction. */
5592 PUT_CODE (insn, NOTE);
5593 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5594 NOTE_SOURCE_FILE (insn) = 0;
5595 /* Count a reference to this reg for the increment
5596 insn we are deleting. When a reg is incremented.
5597 spilling it is worse, so we want to make that
5598 less likely. */
5599 if (regno >= FIRST_PSEUDO_REGISTER)
5600 {
5601 REG_N_REFS (regno) += (optimize_size ? 1
5602 : pbi->bb->loop_depth + 1);
5603 REG_N_SETS (regno)++;
5604 }
5605 return 1;
5606 }
5607 return 0;
5608 }
5609
5610 /* Try to change INSN so that it does pre-increment or pre-decrement
5611 addressing on register REG in order to add AMOUNT to REG.
5612 AMOUNT is negative for pre-decrement.
5613 Returns 1 if the change could be made.
5614 This checks all about the validity of the result of modifying INSN. */
5615
5616 static int
5617 try_pre_increment (insn, reg, amount)
5618 rtx insn, reg;
5619 HOST_WIDE_INT amount;
5620 {
5621 register rtx use;
5622
5623 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5624 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5625 int pre_ok = 0;
5626 /* Nonzero if we can try to make a post-increment or post-decrement.
5627 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5628 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5629 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5630 int post_ok = 0;
5631
5632 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5633 int do_post = 0;
5634
5635 /* From the sign of increment, see which possibilities are conceivable
5636 on this target machine. */
5637 if (HAVE_PRE_INCREMENT && amount > 0)
5638 pre_ok = 1;
5639 if (HAVE_POST_INCREMENT && amount > 0)
5640 post_ok = 1;
5641
5642 if (HAVE_PRE_DECREMENT && amount < 0)
5643 pre_ok = 1;
5644 if (HAVE_POST_DECREMENT && amount < 0)
5645 post_ok = 1;
5646
5647 if (! (pre_ok || post_ok))
5648 return 0;
5649
5650 /* It is not safe to add a side effect to a jump insn
5651 because if the incremented register is spilled and must be reloaded
5652 there would be no way to store the incremented value back in memory. */
5653
5654 if (GET_CODE (insn) == JUMP_INSN)
5655 return 0;
5656
5657 use = 0;
5658 if (pre_ok)
5659 use = find_use_as_address (PATTERN (insn), reg, 0);
5660 if (post_ok && (use == 0 || use == (rtx) 1))
5661 {
5662 use = find_use_as_address (PATTERN (insn), reg, -amount);
5663 do_post = 1;
5664 }
5665
5666 if (use == 0 || use == (rtx) 1)
5667 return 0;
5668
5669 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5670 return 0;
5671
5672 /* See if this combination of instruction and addressing mode exists. */
5673 if (! validate_change (insn, &XEXP (use, 0),
5674 gen_rtx_fmt_e (amount > 0
5675 ? (do_post ? POST_INC : PRE_INC)
5676 : (do_post ? POST_DEC : PRE_DEC),
5677 Pmode, reg), 0))
5678 return 0;
5679
5680 /* Record that this insn now has an implicit side effect on X. */
5681 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5682 return 1;
5683 }
5684
5685 #endif /* AUTO_INC_DEC */
5686 \f
5687 /* Find the place in the rtx X where REG is used as a memory address.
5688 Return the MEM rtx that so uses it.
5689 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5690 (plus REG (const_int PLUSCONST)).
5691
5692 If such an address does not appear, return 0.
5693 If REG appears more than once, or is used other than in such an address,
5694 return (rtx)1. */
5695
5696 rtx
5697 find_use_as_address (x, reg, plusconst)
5698 register rtx x;
5699 rtx reg;
5700 HOST_WIDE_INT plusconst;
5701 {
5702 enum rtx_code code = GET_CODE (x);
5703 const char *fmt = GET_RTX_FORMAT (code);
5704 register int i;
5705 register rtx value = 0;
5706 register rtx tem;
5707
5708 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5709 return x;
5710
5711 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5712 && XEXP (XEXP (x, 0), 0) == reg
5713 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5714 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5715 return x;
5716
5717 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5718 {
5719 /* If REG occurs inside a MEM used in a bit-field reference,
5720 that is unacceptable. */
5721 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5722 return (rtx) (HOST_WIDE_INT) 1;
5723 }
5724
5725 if (x == reg)
5726 return (rtx) (HOST_WIDE_INT) 1;
5727
5728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5729 {
5730 if (fmt[i] == 'e')
5731 {
5732 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5733 if (value == 0)
5734 value = tem;
5735 else if (tem != 0)
5736 return (rtx) (HOST_WIDE_INT) 1;
5737 }
5738 else if (fmt[i] == 'E')
5739 {
5740 register int j;
5741 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5742 {
5743 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5744 if (value == 0)
5745 value = tem;
5746 else if (tem != 0)
5747 return (rtx) (HOST_WIDE_INT) 1;
5748 }
5749 }
5750 }
5751
5752 return value;
5753 }
5754 \f
5755 /* Write information about registers and basic blocks into FILE.
5756 This is part of making a debugging dump. */
5757
5758 void
5759 dump_regset (r, outf)
5760 regset r;
5761 FILE *outf;
5762 {
5763 int i;
5764 if (r == NULL)
5765 {
5766 fputs (" (nil)", outf);
5767 return;
5768 }
5769
5770 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5771 {
5772 fprintf (outf, " %d", i);
5773 if (i < FIRST_PSEUDO_REGISTER)
5774 fprintf (outf, " [%s]",
5775 reg_names[i]);
5776 });
5777 }
5778
5779 void
5780 debug_regset (r)
5781 regset r;
5782 {
5783 dump_regset (r, stderr);
5784 putc ('\n', stderr);
5785 }
5786
5787 void
5788 dump_flow_info (file)
5789 FILE *file;
5790 {
5791 register int i;
5792 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5793
5794 fprintf (file, "%d registers.\n", max_regno);
5795 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5796 if (REG_N_REFS (i))
5797 {
5798 enum reg_class class, altclass;
5799 fprintf (file, "\nRegister %d used %d times across %d insns",
5800 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5801 if (REG_BASIC_BLOCK (i) >= 0)
5802 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5803 if (REG_N_SETS (i))
5804 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5805 (REG_N_SETS (i) == 1) ? "" : "s");
5806 if (REG_USERVAR_P (regno_reg_rtx[i]))
5807 fprintf (file, "; user var");
5808 if (REG_N_DEATHS (i) != 1)
5809 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5810 if (REG_N_CALLS_CROSSED (i) == 1)
5811 fprintf (file, "; crosses 1 call");
5812 else if (REG_N_CALLS_CROSSED (i))
5813 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5814 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5815 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5816 class = reg_preferred_class (i);
5817 altclass = reg_alternate_class (i);
5818 if (class != GENERAL_REGS || altclass != ALL_REGS)
5819 {
5820 if (altclass == ALL_REGS || class == ALL_REGS)
5821 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5822 else if (altclass == NO_REGS)
5823 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5824 else
5825 fprintf (file, "; pref %s, else %s",
5826 reg_class_names[(int) class],
5827 reg_class_names[(int) altclass]);
5828 }
5829 if (REGNO_POINTER_FLAG (i))
5830 fprintf (file, "; pointer");
5831 fprintf (file, ".\n");
5832 }
5833
5834 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5835 for (i = 0; i < n_basic_blocks; i++)
5836 {
5837 register basic_block bb = BASIC_BLOCK (i);
5838 register edge e;
5839
5840 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5841 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5842
5843 fprintf (file, "Predecessors: ");
5844 for (e = bb->pred; e; e = e->pred_next)
5845 dump_edge_info (file, e, 0);
5846
5847 fprintf (file, "\nSuccessors: ");
5848 for (e = bb->succ; e; e = e->succ_next)
5849 dump_edge_info (file, e, 1);
5850
5851 fprintf (file, "\nRegisters live at start:");
5852 dump_regset (bb->global_live_at_start, file);
5853
5854 fprintf (file, "\nRegisters live at end:");
5855 dump_regset (bb->global_live_at_end, file);
5856
5857 putc ('\n', file);
5858 }
5859
5860 putc ('\n', file);
5861 }
5862
5863 void
5864 debug_flow_info ()
5865 {
5866 dump_flow_info (stderr);
5867 }
5868
5869 static void
5870 dump_edge_info (file, e, do_succ)
5871 FILE *file;
5872 edge e;
5873 int do_succ;
5874 {
5875 basic_block side = (do_succ ? e->dest : e->src);
5876
5877 if (side == ENTRY_BLOCK_PTR)
5878 fputs (" ENTRY", file);
5879 else if (side == EXIT_BLOCK_PTR)
5880 fputs (" EXIT", file);
5881 else
5882 fprintf (file, " %d", side->index);
5883
5884 if (e->count)
5885 fprintf (file, " count:%d", e->count);
5886
5887 if (e->flags)
5888 {
5889 static const char * const bitnames[] = {
5890 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5891 };
5892 int comma = 0;
5893 int i, flags = e->flags;
5894
5895 fputc (' ', file);
5896 fputc ('(', file);
5897 for (i = 0; flags; i++)
5898 if (flags & (1 << i))
5899 {
5900 flags &= ~(1 << i);
5901
5902 if (comma)
5903 fputc (',', file);
5904 if (i < (int) (sizeof (bitnames) / sizeof (*bitnames)))
5905 fputs (bitnames[i], file);
5906 else
5907 fprintf (file, "%d", i);
5908 comma = 1;
5909 }
5910 fputc (')', file);
5911 }
5912 }
5913 \f
5914 /* Print out one basic block with live information at start and end. */
5915
5916 void
5917 dump_bb (bb, outf)
5918 basic_block bb;
5919 FILE *outf;
5920 {
5921 rtx insn;
5922 rtx last;
5923 edge e;
5924
5925 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5926 bb->index, bb->loop_depth, bb->count);
5927 if (bb->eh_beg != -1 || bb->eh_end != -1)
5928 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5929 putc ('\n', outf);
5930
5931 fputs (";; Predecessors: ", outf);
5932 for (e = bb->pred; e; e = e->pred_next)
5933 dump_edge_info (outf, e, 0);
5934 putc ('\n', outf);
5935
5936 fputs (";; Registers live at start:", outf);
5937 dump_regset (bb->global_live_at_start, outf);
5938 putc ('\n', outf);
5939
5940 for (insn = bb->head, last = NEXT_INSN (bb->end);
5941 insn != last;
5942 insn = NEXT_INSN (insn))
5943 print_rtl_single (outf, insn);
5944
5945 fputs (";; Registers live at end:", outf);
5946 dump_regset (bb->global_live_at_end, outf);
5947 putc ('\n', outf);
5948
5949 fputs (";; Successors: ", outf);
5950 for (e = bb->succ; e; e = e->succ_next)
5951 dump_edge_info (outf, e, 1);
5952 putc ('\n', outf);
5953 }
5954
5955 void
5956 debug_bb (bb)
5957 basic_block bb;
5958 {
5959 dump_bb (bb, stderr);
5960 }
5961
5962 void
5963 debug_bb_n (n)
5964 int n;
5965 {
5966 dump_bb (BASIC_BLOCK (n), stderr);
5967 }
5968
5969 /* Like print_rtl, but also print out live information for the start of each
5970 basic block. */
5971
5972 void
5973 print_rtl_with_bb (outf, rtx_first)
5974 FILE *outf;
5975 rtx rtx_first;
5976 {
5977 register rtx tmp_rtx;
5978
5979 if (rtx_first == 0)
5980 fprintf (outf, "(nil)\n");
5981 else
5982 {
5983 int i;
5984 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5985 int max_uid = get_max_uid ();
5986 basic_block *start = (basic_block *)
5987 xcalloc (max_uid, sizeof (basic_block));
5988 basic_block *end = (basic_block *)
5989 xcalloc (max_uid, sizeof (basic_block));
5990 enum bb_state *in_bb_p = (enum bb_state *)
5991 xcalloc (max_uid, sizeof (enum bb_state));
5992
5993 for (i = n_basic_blocks - 1; i >= 0; i--)
5994 {
5995 basic_block bb = BASIC_BLOCK (i);
5996 rtx x;
5997
5998 start[INSN_UID (bb->head)] = bb;
5999 end[INSN_UID (bb->end)] = bb;
6000 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6001 {
6002 enum bb_state state = IN_MULTIPLE_BB;
6003 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6004 state = IN_ONE_BB;
6005 in_bb_p[INSN_UID (x)] = state;
6006
6007 if (x == bb->end)
6008 break;
6009 }
6010 }
6011
6012 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6013 {
6014 int did_output;
6015 basic_block bb;
6016
6017 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6018 {
6019 fprintf (outf, ";; Start of basic block %d, registers live:",
6020 bb->index);
6021 dump_regset (bb->global_live_at_start, outf);
6022 putc ('\n', outf);
6023 }
6024
6025 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6026 && GET_CODE (tmp_rtx) != NOTE
6027 && GET_CODE (tmp_rtx) != BARRIER)
6028 fprintf (outf, ";; Insn is not within a basic block\n");
6029 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6030 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6031
6032 did_output = print_rtl_single (outf, tmp_rtx);
6033
6034 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6035 {
6036 fprintf (outf, ";; End of basic block %d, registers live:\n",
6037 bb->index);
6038 dump_regset (bb->global_live_at_end, outf);
6039 putc ('\n', outf);
6040 }
6041
6042 if (did_output)
6043 putc ('\n', outf);
6044 }
6045
6046 free (start);
6047 free (end);
6048 free (in_bb_p);
6049 }
6050
6051 if (current_function_epilogue_delay_list != 0)
6052 {
6053 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6054 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6055 tmp_rtx = XEXP (tmp_rtx, 1))
6056 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6057 }
6058 }
6059
6060 /* Compute dominator relationships using new flow graph structures. */
6061
6062 void
6063 compute_flow_dominators (dominators, post_dominators)
6064 sbitmap *dominators;
6065 sbitmap *post_dominators;
6066 {
6067 int bb;
6068 sbitmap *temp_bitmap;
6069 edge e;
6070 basic_block *worklist, *workend, *qin, *qout;
6071 int qlen;
6072
6073 /* Allocate a worklist array/queue. Entries are only added to the
6074 list if they were not already on the list. So the size is
6075 bounded by the number of basic blocks. */
6076 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6077 workend = &worklist[n_basic_blocks];
6078
6079 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6080 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6081
6082 if (dominators)
6083 {
6084 /* The optimistic setting of dominators requires us to put every
6085 block on the work list initially. */
6086 qin = qout = worklist;
6087 for (bb = 0; bb < n_basic_blocks; bb++)
6088 {
6089 *qin++ = BASIC_BLOCK (bb);
6090 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6091 }
6092 qlen = n_basic_blocks;
6093 qin = worklist;
6094
6095 /* We want a maximal solution, so initially assume everything dominates
6096 everything else. */
6097 sbitmap_vector_ones (dominators, n_basic_blocks);
6098
6099 /* Mark successors of the entry block so we can identify them below. */
6100 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6101 e->dest->aux = ENTRY_BLOCK_PTR;
6102
6103 /* Iterate until the worklist is empty. */
6104 while (qlen)
6105 {
6106 /* Take the first entry off the worklist. */
6107 basic_block b = *qout++;
6108 if (qout >= workend)
6109 qout = worklist;
6110 qlen--;
6111
6112 bb = b->index;
6113
6114 /* Compute the intersection of the dominators of all the
6115 predecessor blocks.
6116
6117 If one of the predecessor blocks is the ENTRY block, then the
6118 intersection of the dominators of the predecessor blocks is
6119 defined as the null set. We can identify such blocks by the
6120 special value in the AUX field in the block structure. */
6121 if (b->aux == ENTRY_BLOCK_PTR)
6122 {
6123 /* Do not clear the aux field for blocks which are
6124 successors of the ENTRY block. That way we never add
6125 them to the worklist again.
6126
6127 The intersect of dominators of the preds of this block is
6128 defined as the null set. */
6129 sbitmap_zero (temp_bitmap[bb]);
6130 }
6131 else
6132 {
6133 /* Clear the aux field of this block so it can be added to
6134 the worklist again if necessary. */
6135 b->aux = NULL;
6136 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6137 }
6138
6139 /* Make sure each block always dominates itself. */
6140 SET_BIT (temp_bitmap[bb], bb);
6141
6142 /* If the out state of this block changed, then we need to
6143 add the successors of this block to the worklist if they
6144 are not already on the worklist. */
6145 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6146 {
6147 for (e = b->succ; e; e = e->succ_next)
6148 {
6149 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6150 {
6151 *qin++ = e->dest;
6152 if (qin >= workend)
6153 qin = worklist;
6154 qlen++;
6155
6156 e->dest->aux = e;
6157 }
6158 }
6159 }
6160 }
6161 }
6162
6163 if (post_dominators)
6164 {
6165 /* The optimistic setting of dominators requires us to put every
6166 block on the work list initially. */
6167 qin = qout = worklist;
6168 for (bb = 0; bb < n_basic_blocks; bb++)
6169 {
6170 *qin++ = BASIC_BLOCK (bb);
6171 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6172 }
6173 qlen = n_basic_blocks;
6174 qin = worklist;
6175
6176 /* We want a maximal solution, so initially assume everything post
6177 dominates everything else. */
6178 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6179
6180 /* Mark predecessors of the exit block so we can identify them below. */
6181 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6182 e->src->aux = EXIT_BLOCK_PTR;
6183
6184 /* Iterate until the worklist is empty. */
6185 while (qlen)
6186 {
6187 /* Take the first entry off the worklist. */
6188 basic_block b = *qout++;
6189 if (qout >= workend)
6190 qout = worklist;
6191 qlen--;
6192
6193 bb = b->index;
6194
6195 /* Compute the intersection of the post dominators of all the
6196 successor blocks.
6197
6198 If one of the successor blocks is the EXIT block, then the
6199 intersection of the dominators of the successor blocks is
6200 defined as the null set. We can identify such blocks by the
6201 special value in the AUX field in the block structure. */
6202 if (b->aux == EXIT_BLOCK_PTR)
6203 {
6204 /* Do not clear the aux field for blocks which are
6205 predecessors of the EXIT block. That way we we never
6206 add them to the worklist again.
6207
6208 The intersect of dominators of the succs of this block is
6209 defined as the null set. */
6210 sbitmap_zero (temp_bitmap[bb]);
6211 }
6212 else
6213 {
6214 /* Clear the aux field of this block so it can be added to
6215 the worklist again if necessary. */
6216 b->aux = NULL;
6217 sbitmap_intersection_of_succs (temp_bitmap[bb],
6218 post_dominators, bb);
6219 }
6220
6221 /* Make sure each block always post dominates itself. */
6222 SET_BIT (temp_bitmap[bb], bb);
6223
6224 /* If the out state of this block changed, then we need to
6225 add the successors of this block to the worklist if they
6226 are not already on the worklist. */
6227 if (sbitmap_a_and_b (post_dominators[bb],
6228 post_dominators[bb],
6229 temp_bitmap[bb]))
6230 {
6231 for (e = b->pred; e; e = e->pred_next)
6232 {
6233 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6234 {
6235 *qin++ = e->src;
6236 if (qin >= workend)
6237 qin = worklist;
6238 qlen++;
6239
6240 e->src->aux = e;
6241 }
6242 }
6243 }
6244 }
6245 }
6246
6247 free (worklist);
6248 free (temp_bitmap);
6249 }
6250
6251 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6252 block dominates only itself, its entry remains as INVALID_BLOCK. */
6253
6254 void
6255 compute_immediate_dominators (idom, dominators)
6256 int *idom;
6257 sbitmap *dominators;
6258 {
6259 sbitmap *tmp;
6260 int b;
6261
6262 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6263
6264 /* Begin with tmp(n) = dom(n) - { n }. */
6265 for (b = n_basic_blocks; --b >= 0;)
6266 {
6267 sbitmap_copy (tmp[b], dominators[b]);
6268 RESET_BIT (tmp[b], b);
6269 }
6270
6271 /* Subtract out all of our dominator's dominators. */
6272 for (b = n_basic_blocks; --b >= 0;)
6273 {
6274 sbitmap tmp_b = tmp[b];
6275 int s;
6276
6277 for (s = n_basic_blocks; --s >= 0;)
6278 if (TEST_BIT (tmp_b, s))
6279 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6280 }
6281
6282 /* Find the one bit set in the bitmap and put it in the output array. */
6283 for (b = n_basic_blocks; --b >= 0;)
6284 {
6285 int t;
6286 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6287 }
6288
6289 sbitmap_vector_free (tmp);
6290 }
6291
6292 /* Given POSTDOMINATORS, compute the immediate postdominators into
6293 IDOM. If a block is only dominated by itself, its entry remains as
6294 INVALID_BLOCK. */
6295
6296 void
6297 compute_immediate_postdominators (idom, postdominators)
6298 int *idom;
6299 sbitmap *postdominators;
6300 {
6301 compute_immediate_dominators (idom, postdominators);
6302 }
6303
6304 /* Recompute register set/reference counts immediately prior to register
6305 allocation.
6306
6307 This avoids problems with set/reference counts changing to/from values
6308 which have special meanings to the register allocators.
6309
6310 Additionally, the reference counts are the primary component used by the
6311 register allocators to prioritize pseudos for allocation to hard regs.
6312 More accurate reference counts generally lead to better register allocation.
6313
6314 F is the first insn to be scanned.
6315
6316 LOOP_STEP denotes how much loop_depth should be incremented per
6317 loop nesting level in order to increase the ref count more for
6318 references in a loop.
6319
6320 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6321 possibly other information which is used by the register allocators. */
6322
6323 void
6324 recompute_reg_usage (f, loop_step)
6325 rtx f ATTRIBUTE_UNUSED;
6326 int loop_step ATTRIBUTE_UNUSED;
6327 {
6328 allocate_reg_life_data ();
6329 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6330 }
6331
6332 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6333 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6334 of the number of registers that died. */
6335
6336 int
6337 count_or_remove_death_notes (blocks, kill)
6338 sbitmap blocks;
6339 int kill;
6340 {
6341 int i, count = 0;
6342
6343 for (i = n_basic_blocks - 1; i >= 0; --i)
6344 {
6345 basic_block bb;
6346 rtx insn;
6347
6348 if (blocks && ! TEST_BIT (blocks, i))
6349 continue;
6350
6351 bb = BASIC_BLOCK (i);
6352
6353 for (insn = bb->head;; insn = NEXT_INSN (insn))
6354 {
6355 if (INSN_P (insn))
6356 {
6357 rtx *pprev = &REG_NOTES (insn);
6358 rtx link = *pprev;
6359
6360 while (link)
6361 {
6362 switch (REG_NOTE_KIND (link))
6363 {
6364 case REG_DEAD:
6365 if (GET_CODE (XEXP (link, 0)) == REG)
6366 {
6367 rtx reg = XEXP (link, 0);
6368 int n;
6369
6370 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6371 n = 1;
6372 else
6373 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6374 count += n;
6375 }
6376 /* Fall through. */
6377
6378 case REG_UNUSED:
6379 if (kill)
6380 {
6381 rtx next = XEXP (link, 1);
6382 free_EXPR_LIST_node (link);
6383 *pprev = link = next;
6384 break;
6385 }
6386 /* Fall through. */
6387
6388 default:
6389 pprev = &XEXP (link, 1);
6390 link = *pprev;
6391 break;
6392 }
6393 }
6394 }
6395
6396 if (insn == bb->end)
6397 break;
6398 }
6399 }
6400
6401 return count;
6402 }
6403
6404 /* Record INSN's block as BB. */
6405
6406 void
6407 set_block_for_insn (insn, bb)
6408 rtx insn;
6409 basic_block bb;
6410 {
6411 size_t uid = INSN_UID (insn);
6412 if (uid >= basic_block_for_insn->num_elements)
6413 {
6414 int new_size;
6415
6416 /* Add one-eighth the size so we don't keep calling xrealloc. */
6417 new_size = uid + (uid + 7) / 8;
6418
6419 VARRAY_GROW (basic_block_for_insn, new_size);
6420 }
6421 VARRAY_BB (basic_block_for_insn, uid) = bb;
6422 }
6423
6424 /* Record INSN's block number as BB. */
6425 /* ??? This has got to go. */
6426
6427 void
6428 set_block_num (insn, bb)
6429 rtx insn;
6430 int bb;
6431 {
6432 set_block_for_insn (insn, BASIC_BLOCK (bb));
6433 }
6434 \f
6435 /* Verify the CFG consistency. This function check some CFG invariants and
6436 aborts when something is wrong. Hope that this function will help to
6437 convert many optimization passes to preserve CFG consistent.
6438
6439 Currently it does following checks:
6440
6441 - test head/end pointers
6442 - overlapping of basic blocks
6443 - edge list corectness
6444 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6445 - tails of basic blocks (ensure that boundary is necesary)
6446 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6447 and NOTE_INSN_BASIC_BLOCK
6448 - check that all insns are in the basic blocks
6449 (except the switch handling code, barriers and notes)
6450 - check that all returns are followed by barriers
6451
6452 In future it can be extended check a lot of other stuff as well
6453 (reachability of basic blocks, life information, etc. etc.). */
6454
6455 void
6456 verify_flow_info ()
6457 {
6458 const int max_uid = get_max_uid ();
6459 const rtx rtx_first = get_insns ();
6460 rtx last_head = get_last_insn ();
6461 basic_block *bb_info;
6462 rtx x;
6463 int i, last_bb_num_seen, num_bb_notes, err = 0;
6464
6465 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6466
6467 for (i = n_basic_blocks - 1; i >= 0; i--)
6468 {
6469 basic_block bb = BASIC_BLOCK (i);
6470 rtx head = bb->head;
6471 rtx end = bb->end;
6472
6473 /* Verify the end of the basic block is in the INSN chain. */
6474 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6475 if (x == end)
6476 break;
6477 if (!x)
6478 {
6479 error ("End insn %d for block %d not found in the insn stream.",
6480 INSN_UID (end), bb->index);
6481 err = 1;
6482 }
6483
6484 /* Work backwards from the end to the head of the basic block
6485 to verify the head is in the RTL chain. */
6486 for (; x != NULL_RTX; x = PREV_INSN (x))
6487 {
6488 /* While walking over the insn chain, verify insns appear
6489 in only one basic block and initialize the BB_INFO array
6490 used by other passes. */
6491 if (bb_info[INSN_UID (x)] != NULL)
6492 {
6493 error ("Insn %d is in multiple basic blocks (%d and %d)",
6494 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6495 err = 1;
6496 }
6497 bb_info[INSN_UID (x)] = bb;
6498
6499 if (x == head)
6500 break;
6501 }
6502 if (!x)
6503 {
6504 error ("Head insn %d for block %d not found in the insn stream.",
6505 INSN_UID (head), bb->index);
6506 err = 1;
6507 }
6508
6509 last_head = x;
6510 }
6511
6512 /* Now check the basic blocks (boundaries etc.) */
6513 for (i = n_basic_blocks - 1; i >= 0; i--)
6514 {
6515 basic_block bb = BASIC_BLOCK (i);
6516 /* Check corectness of edge lists */
6517 edge e;
6518
6519 e = bb->succ;
6520 while (e)
6521 {
6522 if (e->src != bb)
6523 {
6524 fprintf (stderr,
6525 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6526 bb->index);
6527 fprintf (stderr, "Predecessor: ");
6528 dump_edge_info (stderr, e, 0);
6529 fprintf (stderr, "\nSuccessor: ");
6530 dump_edge_info (stderr, e, 1);
6531 fflush (stderr);
6532 err = 1;
6533 }
6534 if (e->dest != EXIT_BLOCK_PTR)
6535 {
6536 edge e2 = e->dest->pred;
6537 while (e2 && e2 != e)
6538 e2 = e2->pred_next;
6539 if (!e2)
6540 {
6541 error ("Basic block %i edge lists are corrupted", bb->index);
6542 err = 1;
6543 }
6544 }
6545 e = e->succ_next;
6546 }
6547
6548 e = bb->pred;
6549 while (e)
6550 {
6551 if (e->dest != bb)
6552 {
6553 error ("Basic block %d pred edge is corrupted", bb->index);
6554 fputs ("Predecessor: ", stderr);
6555 dump_edge_info (stderr, e, 0);
6556 fputs ("\nSuccessor: ", stderr);
6557 dump_edge_info (stderr, e, 1);
6558 fputc ('\n', stderr);
6559 err = 1;
6560 }
6561 if (e->src != ENTRY_BLOCK_PTR)
6562 {
6563 edge e2 = e->src->succ;
6564 while (e2 && e2 != e)
6565 e2 = e2->succ_next;
6566 if (!e2)
6567 {
6568 error ("Basic block %i edge lists are corrupted", bb->index);
6569 err = 1;
6570 }
6571 }
6572 e = e->pred_next;
6573 }
6574
6575 /* OK pointers are correct. Now check the header of basic
6576 block. It ought to contain optional CODE_LABEL followed
6577 by NOTE_BASIC_BLOCK. */
6578 x = bb->head;
6579 if (GET_CODE (x) == CODE_LABEL)
6580 {
6581 if (bb->end == x)
6582 {
6583 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6584 bb->index);
6585 err = 1;
6586 }
6587 x = NEXT_INSN (x);
6588 }
6589 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6590 {
6591 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6592 bb->index);
6593 err = 1;
6594 }
6595
6596 if (bb->end == x)
6597 {
6598 /* Do checks for empty blocks here */
6599 }
6600 else
6601 {
6602 x = NEXT_INSN (x);
6603 while (x)
6604 {
6605 if (NOTE_INSN_BASIC_BLOCK_P (x))
6606 {
6607 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6608 INSN_UID (x), bb->index);
6609 err = 1;
6610 }
6611
6612 if (x == bb->end)
6613 break;
6614
6615 if (GET_CODE (x) == JUMP_INSN
6616 || GET_CODE (x) == CODE_LABEL
6617 || GET_CODE (x) == BARRIER)
6618 {
6619 error ("In basic block %d:", bb->index);
6620 fatal_insn ("Flow control insn inside a basic block", x);
6621 }
6622
6623 x = NEXT_INSN (x);
6624 }
6625 }
6626 }
6627
6628 last_bb_num_seen = -1;
6629 num_bb_notes = 0;
6630 x = rtx_first;
6631 while (x)
6632 {
6633 if (NOTE_INSN_BASIC_BLOCK_P (x))
6634 {
6635 basic_block bb = NOTE_BASIC_BLOCK (x);
6636 num_bb_notes++;
6637 if (bb->index != last_bb_num_seen + 1)
6638 fatal ("Basic blocks not numbered consecutively");
6639 last_bb_num_seen = bb->index;
6640 }
6641
6642 if (!bb_info[INSN_UID (x)])
6643 {
6644 switch (GET_CODE (x))
6645 {
6646 case BARRIER:
6647 case NOTE:
6648 break;
6649
6650 case CODE_LABEL:
6651 /* An addr_vec is placed outside any block block. */
6652 if (NEXT_INSN (x)
6653 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6654 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6655 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6656 {
6657 x = NEXT_INSN (x);
6658 }
6659
6660 /* But in any case, non-deletable labels can appear anywhere. */
6661 break;
6662
6663 default:
6664 fatal_insn ("Insn outside basic block", x);
6665 }
6666 }
6667
6668 if (INSN_P (x)
6669 && GET_CODE (x) == JUMP_INSN
6670 && returnjump_p (x) && ! condjump_p (x)
6671 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6672 fatal_insn ("Return not followed by barrier", x);
6673
6674 x = NEXT_INSN (x);
6675 }
6676
6677 if (num_bb_notes != n_basic_blocks)
6678 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6679 num_bb_notes, n_basic_blocks);
6680
6681 if (err)
6682 abort ();
6683
6684 /* Clean up. */
6685 free (bb_info);
6686 }
6687 \f
6688 /* Functions to access an edge list with a vector representation.
6689 Enough data is kept such that given an index number, the
6690 pred and succ that edge represents can be determined, or
6691 given a pred and a succ, its index number can be returned.
6692 This allows algorithms which consume a lot of memory to
6693 represent the normally full matrix of edge (pred,succ) with a
6694 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6695 wasted space in the client code due to sparse flow graphs. */
6696
6697 /* This functions initializes the edge list. Basically the entire
6698 flowgraph is processed, and all edges are assigned a number,
6699 and the data structure is filled in. */
6700
6701 struct edge_list *
6702 create_edge_list ()
6703 {
6704 struct edge_list *elist;
6705 edge e;
6706 int num_edges;
6707 int x;
6708 int block_count;
6709
6710 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6711
6712 num_edges = 0;
6713
6714 /* Determine the number of edges in the flow graph by counting successor
6715 edges on each basic block. */
6716 for (x = 0; x < n_basic_blocks; x++)
6717 {
6718 basic_block bb = BASIC_BLOCK (x);
6719
6720 for (e = bb->succ; e; e = e->succ_next)
6721 num_edges++;
6722 }
6723 /* Don't forget successors of the entry block. */
6724 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6725 num_edges++;
6726
6727 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6728 elist->num_blocks = block_count;
6729 elist->num_edges = num_edges;
6730 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6731
6732 num_edges = 0;
6733
6734 /* Follow successors of the entry block, and register these edges. */
6735 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6736 {
6737 elist->index_to_edge[num_edges] = e;
6738 num_edges++;
6739 }
6740
6741 for (x = 0; x < n_basic_blocks; x++)
6742 {
6743 basic_block bb = BASIC_BLOCK (x);
6744
6745 /* Follow all successors of blocks, and register these edges. */
6746 for (e = bb->succ; e; e = e->succ_next)
6747 {
6748 elist->index_to_edge[num_edges] = e;
6749 num_edges++;
6750 }
6751 }
6752 return elist;
6753 }
6754
6755 /* This function free's memory associated with an edge list. */
6756
6757 void
6758 free_edge_list (elist)
6759 struct edge_list *elist;
6760 {
6761 if (elist)
6762 {
6763 free (elist->index_to_edge);
6764 free (elist);
6765 }
6766 }
6767
6768 /* This function provides debug output showing an edge list. */
6769
6770 void
6771 print_edge_list (f, elist)
6772 FILE *f;
6773 struct edge_list *elist;
6774 {
6775 int x;
6776 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6777 elist->num_blocks - 2, elist->num_edges);
6778
6779 for (x = 0; x < elist->num_edges; x++)
6780 {
6781 fprintf (f, " %-4d - edge(", x);
6782 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6783 fprintf (f, "entry,");
6784 else
6785 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6786
6787 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6788 fprintf (f, "exit)\n");
6789 else
6790 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6791 }
6792 }
6793
6794 /* This function provides an internal consistency check of an edge list,
6795 verifying that all edges are present, and that there are no
6796 extra edges. */
6797
6798 void
6799 verify_edge_list (f, elist)
6800 FILE *f;
6801 struct edge_list *elist;
6802 {
6803 int x, pred, succ, index;
6804 edge e;
6805
6806 for (x = 0; x < n_basic_blocks; x++)
6807 {
6808 basic_block bb = BASIC_BLOCK (x);
6809
6810 for (e = bb->succ; e; e = e->succ_next)
6811 {
6812 pred = e->src->index;
6813 succ = e->dest->index;
6814 index = EDGE_INDEX (elist, e->src, e->dest);
6815 if (index == EDGE_INDEX_NO_EDGE)
6816 {
6817 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6818 continue;
6819 }
6820 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6821 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6822 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6823 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6824 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6825 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6826 }
6827 }
6828 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6829 {
6830 pred = e->src->index;
6831 succ = e->dest->index;
6832 index = EDGE_INDEX (elist, e->src, e->dest);
6833 if (index == EDGE_INDEX_NO_EDGE)
6834 {
6835 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
6836 continue;
6837 }
6838 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6839 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6840 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6841 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6842 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6843 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6844 }
6845 /* We've verified that all the edges are in the list, no lets make sure
6846 there are no spurious edges in the list. */
6847
6848 for (pred = 0; pred < n_basic_blocks; pred++)
6849 for (succ = 0; succ < n_basic_blocks; succ++)
6850 {
6851 basic_block p = BASIC_BLOCK (pred);
6852 basic_block s = BASIC_BLOCK (succ);
6853
6854 int found_edge = 0;
6855
6856 for (e = p->succ; e; e = e->succ_next)
6857 if (e->dest == s)
6858 {
6859 found_edge = 1;
6860 break;
6861 }
6862 for (e = s->pred; e; e = e->pred_next)
6863 if (e->src == p)
6864 {
6865 found_edge = 1;
6866 break;
6867 }
6868 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6869 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6870 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6871 pred, succ);
6872 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6873 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6874 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6875 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6876 BASIC_BLOCK (succ)));
6877 }
6878 for (succ = 0; succ < n_basic_blocks; succ++)
6879 {
6880 basic_block p = ENTRY_BLOCK_PTR;
6881 basic_block s = BASIC_BLOCK (succ);
6882
6883 int found_edge = 0;
6884
6885 for (e = p->succ; e; e = e->succ_next)
6886 if (e->dest == s)
6887 {
6888 found_edge = 1;
6889 break;
6890 }
6891 for (e = s->pred; e; e = e->pred_next)
6892 if (e->src == p)
6893 {
6894 found_edge = 1;
6895 break;
6896 }
6897 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6898 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6899 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6900 succ);
6901 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6902 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6903 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6904 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6905 BASIC_BLOCK (succ)));
6906 }
6907 for (pred = 0; pred < n_basic_blocks; pred++)
6908 {
6909 basic_block p = BASIC_BLOCK (pred);
6910 basic_block s = EXIT_BLOCK_PTR;
6911
6912 int found_edge = 0;
6913
6914 for (e = p->succ; e; e = e->succ_next)
6915 if (e->dest == s)
6916 {
6917 found_edge = 1;
6918 break;
6919 }
6920 for (e = s->pred; e; e = e->pred_next)
6921 if (e->src == p)
6922 {
6923 found_edge = 1;
6924 break;
6925 }
6926 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6927 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6928 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6929 pred);
6930 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6931 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6932 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6933 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6934 EXIT_BLOCK_PTR));
6935 }
6936 }
6937
6938 /* This routine will determine what, if any, edge there is between
6939 a specified predecessor and successor. */
6940
6941 int
6942 find_edge_index (edge_list, pred, succ)
6943 struct edge_list *edge_list;
6944 basic_block pred, succ;
6945 {
6946 int x;
6947 for (x = 0; x < NUM_EDGES (edge_list); x++)
6948 {
6949 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6950 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6951 return x;
6952 }
6953 return (EDGE_INDEX_NO_EDGE);
6954 }
6955
6956 /* This function will remove an edge from the flow graph. */
6957
6958 void
6959 remove_edge (e)
6960 edge e;
6961 {
6962 edge last_pred = NULL;
6963 edge last_succ = NULL;
6964 edge tmp;
6965 basic_block src, dest;
6966 src = e->src;
6967 dest = e->dest;
6968 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6969 last_succ = tmp;
6970
6971 if (!tmp)
6972 abort ();
6973 if (last_succ)
6974 last_succ->succ_next = e->succ_next;
6975 else
6976 src->succ = e->succ_next;
6977
6978 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6979 last_pred = tmp;
6980
6981 if (!tmp)
6982 abort ();
6983 if (last_pred)
6984 last_pred->pred_next = e->pred_next;
6985 else
6986 dest->pred = e->pred_next;
6987
6988 n_edges--;
6989 free (e);
6990 }
6991
6992 /* This routine will remove any fake successor edges for a basic block.
6993 When the edge is removed, it is also removed from whatever predecessor
6994 list it is in. */
6995
6996 static void
6997 remove_fake_successors (bb)
6998 basic_block bb;
6999 {
7000 edge e;
7001 for (e = bb->succ; e;)
7002 {
7003 edge tmp = e;
7004 e = e->succ_next;
7005 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7006 remove_edge (tmp);
7007 }
7008 }
7009
7010 /* This routine will remove all fake edges from the flow graph. If
7011 we remove all fake successors, it will automatically remove all
7012 fake predecessors. */
7013
7014 void
7015 remove_fake_edges ()
7016 {
7017 int x;
7018
7019 for (x = 0; x < n_basic_blocks; x++)
7020 remove_fake_successors (BASIC_BLOCK (x));
7021
7022 /* We've handled all successors except the entry block's. */
7023 remove_fake_successors (ENTRY_BLOCK_PTR);
7024 }
7025
7026 /* This function will add a fake edge between any block which has no
7027 successors, and the exit block. Some data flow equations require these
7028 edges to exist. */
7029
7030 void
7031 add_noreturn_fake_exit_edges ()
7032 {
7033 int x;
7034
7035 for (x = 0; x < n_basic_blocks; x++)
7036 if (BASIC_BLOCK (x)->succ == NULL)
7037 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7038 }
7039
7040 /* This function adds a fake edge between any infinite loops to the
7041 exit block. Some optimizations require a path from each node to
7042 the exit node.
7043
7044 See also Morgan, Figure 3.10, pp. 82-83.
7045
7046 The current implementation is ugly, not attempting to minimize the
7047 number of inserted fake edges. To reduce the number of fake edges
7048 to insert, add fake edges from _innermost_ loops containing only
7049 nodes not reachable from the exit block. */
7050
7051 void
7052 connect_infinite_loops_to_exit ()
7053 {
7054 basic_block unvisited_block;
7055
7056 /* Perform depth-first search in the reverse graph to find nodes
7057 reachable from the exit block. */
7058 struct depth_first_search_dsS dfs_ds;
7059
7060 flow_dfs_compute_reverse_init (&dfs_ds);
7061 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7062
7063 /* Repeatedly add fake edges, updating the unreachable nodes. */
7064 while (1)
7065 {
7066 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7067 if (!unvisited_block)
7068 break;
7069 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7070 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7071 }
7072
7073 flow_dfs_compute_reverse_finish (&dfs_ds);
7074
7075 return;
7076 }
7077
7078 /* Redirect an edge's successor from one block to another. */
7079
7080 void
7081 redirect_edge_succ (e, new_succ)
7082 edge e;
7083 basic_block new_succ;
7084 {
7085 edge *pe;
7086
7087 /* Disconnect the edge from the old successor block. */
7088 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7089 continue;
7090 *pe = (*pe)->pred_next;
7091
7092 /* Reconnect the edge to the new successor block. */
7093 e->pred_next = new_succ->pred;
7094 new_succ->pred = e;
7095 e->dest = new_succ;
7096 }
7097
7098 /* Redirect an edge's predecessor from one block to another. */
7099
7100 void
7101 redirect_edge_pred (e, new_pred)
7102 edge e;
7103 basic_block new_pred;
7104 {
7105 edge *pe;
7106
7107 /* Disconnect the edge from the old predecessor block. */
7108 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7109 continue;
7110 *pe = (*pe)->succ_next;
7111
7112 /* Reconnect the edge to the new predecessor block. */
7113 e->succ_next = new_pred->succ;
7114 new_pred->succ = e;
7115 e->src = new_pred;
7116 }
7117 \f
7118 /* Dump the list of basic blocks in the bitmap NODES. */
7119
7120 static void
7121 flow_nodes_print (str, nodes, file)
7122 const char *str;
7123 const sbitmap nodes;
7124 FILE *file;
7125 {
7126 int node;
7127
7128 fprintf (file, "%s { ", str);
7129 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7130 fputs ("}\n", file);
7131 }
7132
7133 /* Dump the list of exiting edges in the array EDGES. */
7134
7135 static void
7136 flow_exits_print (str, edges, num_edges, file)
7137 const char *str;
7138 const edge *edges;
7139 int num_edges;
7140 FILE *file;
7141 {
7142 int i;
7143
7144 fprintf (file, "%s { ", str);
7145 for (i = 0; i < num_edges; i++)
7146 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
7147 fputs ("}\n", file);
7148 }
7149
7150 /* Dump loop related CFG information. */
7151
7152 static void
7153 flow_loops_cfg_dump (loops, file)
7154 const struct loops *loops;
7155 FILE *file;
7156 {
7157 int i;
7158
7159 if (! loops->num || ! file || ! loops->cfg.dom)
7160 return;
7161
7162 for (i = 0; i < n_basic_blocks; i++)
7163 {
7164 edge succ;
7165
7166 fprintf (file, ";; %d succs { ", i);
7167 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7168 fprintf (file, "%d ", succ->dest->index);
7169 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7170 }
7171
7172 /* Dump the DFS node order. */
7173 if (loops->cfg.dfs_order)
7174 {
7175 fputs (";; DFS order: ", file);
7176 for (i = 0; i < n_basic_blocks; i++)
7177 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7178 fputs ("\n", file);
7179 }
7180 /* Dump the reverse completion node order. */
7181 if (loops->cfg.rc_order)
7182 {
7183 fputs (";; RC order: ", file);
7184 for (i = 0; i < n_basic_blocks; i++)
7185 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7186 fputs ("\n", file);
7187 }
7188 }
7189
7190 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7191
7192 static int
7193 flow_loop_nested_p (outer, loop)
7194 struct loop *outer;
7195 struct loop *loop;
7196 {
7197 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7198 }
7199
7200 /* Dump the loop information specified by LOOPS to the stream FILE. */
7201
7202 void
7203 flow_loops_dump (loops, file, verbose)
7204 const struct loops *loops;
7205 FILE *file;
7206 int verbose;
7207 {
7208 int i;
7209 int num_loops;
7210
7211 num_loops = loops->num;
7212 if (! num_loops || ! file)
7213 return;
7214
7215 fprintf (file, ";; %d loops found, %d levels\n",
7216 num_loops, loops->levels);
7217
7218 for (i = 0; i < num_loops; i++)
7219 {
7220 struct loop *loop = &loops->array[i];
7221
7222 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7223 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7224 loop->header->index, loop->latch->index,
7225 loop->pre_header ? loop->pre_header->index : -1,
7226 loop->depth, loop->level,
7227 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7228 fprintf (file, ";; %d", loop->num_nodes);
7229 flow_nodes_print (" nodes", loop->nodes, file);
7230 fprintf (file, ";; %d", loop->num_exits);
7231 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7232
7233 if (loop->shared)
7234 {
7235 int j;
7236
7237 for (j = 0; j < i; j++)
7238 {
7239 struct loop *oloop = &loops->array[j];
7240
7241 if (loop->header == oloop->header)
7242 {
7243 int disjoint;
7244 int smaller;
7245
7246 smaller = loop->num_nodes < oloop->num_nodes;
7247
7248 /* If the union of LOOP and OLOOP is different than
7249 the larger of LOOP and OLOOP then LOOP and OLOOP
7250 must be disjoint. */
7251 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7252 smaller ? oloop : loop);
7253 fprintf (file,
7254 ";; loop header %d shared by loops %d, %d %s\n",
7255 loop->header->index, i, j,
7256 disjoint ? "disjoint" : "nested");
7257 }
7258 }
7259 }
7260
7261 if (verbose)
7262 {
7263 /* Print diagnostics to compare our concept of a loop with
7264 what the loop notes say. */
7265 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7266 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7267 != NOTE_INSN_LOOP_BEG)
7268 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7269 INSN_UID (PREV_INSN (loop->first->head)));
7270 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7271 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7272 != NOTE_INSN_LOOP_END)
7273 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7274 INSN_UID (NEXT_INSN (loop->last->end)));
7275 }
7276 }
7277
7278 if (verbose)
7279 flow_loops_cfg_dump (loops, file);
7280 }
7281
7282 /* Free all the memory allocated for LOOPS. */
7283
7284 void
7285 flow_loops_free (loops)
7286 struct loops *loops;
7287 {
7288 if (loops->array)
7289 {
7290 int i;
7291
7292 if (! loops->num)
7293 abort ();
7294
7295 /* Free the loop descriptors. */
7296 for (i = 0; i < loops->num; i++)
7297 {
7298 struct loop *loop = &loops->array[i];
7299
7300 if (loop->nodes)
7301 sbitmap_free (loop->nodes);
7302 if (loop->exits)
7303 free (loop->exits);
7304 }
7305 free (loops->array);
7306 loops->array = NULL;
7307
7308 if (loops->cfg.dom)
7309 sbitmap_vector_free (loops->cfg.dom);
7310 if (loops->cfg.dfs_order)
7311 free (loops->cfg.dfs_order);
7312
7313 sbitmap_free (loops->shared_headers);
7314 }
7315 }
7316
7317 /* Find the exits from the loop using the bitmap of loop nodes NODES
7318 and store in EXITS array. Return the number of exits from the
7319 loop. */
7320
7321 static int
7322 flow_loop_exits_find (nodes, exits)
7323 const sbitmap nodes;
7324 edge **exits;
7325 {
7326 edge e;
7327 int node;
7328 int num_exits;
7329
7330 *exits = NULL;
7331
7332 /* Check all nodes within the loop to see if there are any
7333 successors not in the loop. Note that a node may have multiple
7334 exiting edges. */
7335 num_exits = 0;
7336 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7337 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7338 {
7339 basic_block dest = e->dest;
7340
7341 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7342 num_exits++;
7343 }
7344 });
7345
7346 if (! num_exits)
7347 return 0;
7348
7349 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7350
7351 /* Store all exiting edges into an array. */
7352 num_exits = 0;
7353 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7354 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7355 {
7356 basic_block dest = e->dest;
7357
7358 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7359 (*exits)[num_exits++] = e;
7360 }
7361 });
7362
7363 return num_exits;
7364 }
7365
7366 /* Find the nodes contained within the loop with header HEADER and
7367 latch LATCH and store in NODES. Return the number of nodes within
7368 the loop. */
7369
7370 static int
7371 flow_loop_nodes_find (header, latch, nodes)
7372 basic_block header;
7373 basic_block latch;
7374 sbitmap nodes;
7375 {
7376 basic_block *stack;
7377 int sp;
7378 int num_nodes = 0;
7379
7380 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7381 sp = 0;
7382
7383 /* Start with only the loop header in the set of loop nodes. */
7384 sbitmap_zero (nodes);
7385 SET_BIT (nodes, header->index);
7386 num_nodes++;
7387 header->loop_depth++;
7388
7389 /* Push the loop latch on to the stack. */
7390 if (! TEST_BIT (nodes, latch->index))
7391 {
7392 SET_BIT (nodes, latch->index);
7393 latch->loop_depth++;
7394 num_nodes++;
7395 stack[sp++] = latch;
7396 }
7397
7398 while (sp)
7399 {
7400 basic_block node;
7401 edge e;
7402
7403 node = stack[--sp];
7404 for (e = node->pred; e; e = e->pred_next)
7405 {
7406 basic_block ancestor = e->src;
7407
7408 /* If each ancestor not marked as part of loop, add to set of
7409 loop nodes and push on to stack. */
7410 if (ancestor != ENTRY_BLOCK_PTR
7411 && ! TEST_BIT (nodes, ancestor->index))
7412 {
7413 SET_BIT (nodes, ancestor->index);
7414 ancestor->loop_depth++;
7415 num_nodes++;
7416 stack[sp++] = ancestor;
7417 }
7418 }
7419 }
7420 free (stack);
7421 return num_nodes;
7422 }
7423
7424 /* Compute the depth first search order and store in the array
7425 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7426 RC_ORDER is non-zero, return the reverse completion number for each
7427 node. Returns the number of nodes visited. A depth first search
7428 tries to get as far away from the starting point as quickly as
7429 possible. */
7430
7431 static int
7432 flow_depth_first_order_compute (dfs_order, rc_order)
7433 int *dfs_order;
7434 int *rc_order;
7435 {
7436 edge *stack;
7437 int sp;
7438 int dfsnum = 0;
7439 int rcnum = n_basic_blocks - 1;
7440 sbitmap visited;
7441
7442 /* Allocate stack for back-tracking up CFG. */
7443 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7444 sp = 0;
7445
7446 /* Allocate bitmap to track nodes that have been visited. */
7447 visited = sbitmap_alloc (n_basic_blocks);
7448
7449 /* None of the nodes in the CFG have been visited yet. */
7450 sbitmap_zero (visited);
7451
7452 /* Push the first edge on to the stack. */
7453 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7454
7455 while (sp)
7456 {
7457 edge e;
7458 basic_block src;
7459 basic_block dest;
7460
7461 /* Look at the edge on the top of the stack. */
7462 e = stack[sp - 1];
7463 src = e->src;
7464 dest = e->dest;
7465
7466 /* Check if the edge destination has been visited yet. */
7467 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7468 {
7469 /* Mark that we have visited the destination. */
7470 SET_BIT (visited, dest->index);
7471
7472 if (dfs_order)
7473 dfs_order[dfsnum++] = dest->index;
7474
7475 if (dest->succ)
7476 {
7477 /* Since the DEST node has been visited for the first
7478 time, check its successors. */
7479 stack[sp++] = dest->succ;
7480 }
7481 else
7482 {
7483 /* There are no successors for the DEST node so assign
7484 its reverse completion number. */
7485 if (rc_order)
7486 rc_order[rcnum--] = dest->index;
7487 }
7488 }
7489 else
7490 {
7491 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7492 {
7493 /* There are no more successors for the SRC node
7494 so assign its reverse completion number. */
7495 if (rc_order)
7496 rc_order[rcnum--] = src->index;
7497 }
7498
7499 if (e->succ_next)
7500 stack[sp - 1] = e->succ_next;
7501 else
7502 sp--;
7503 }
7504 }
7505
7506 free (stack);
7507 sbitmap_free (visited);
7508
7509 /* The number of nodes visited should not be greater than
7510 n_basic_blocks. */
7511 if (dfsnum > n_basic_blocks)
7512 abort ();
7513
7514 /* There are some nodes left in the CFG that are unreachable. */
7515 if (dfsnum < n_basic_blocks)
7516 abort ();
7517 return dfsnum;
7518 }
7519
7520 /* Compute the depth first search order on the _reverse_ graph and
7521 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7522 Returns the number of nodes visited.
7523
7524 The computation is split into three pieces:
7525
7526 flow_dfs_compute_reverse_init () creates the necessary data
7527 structures.
7528
7529 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7530 structures. The block will start the search.
7531
7532 flow_dfs_compute_reverse_execute () continues (or starts) the
7533 search using the block on the top of the stack, stopping when the
7534 stack is empty.
7535
7536 flow_dfs_compute_reverse_finish () destroys the necessary data
7537 structures.
7538
7539 Thus, the user will probably call ..._init(), call ..._add_bb() to
7540 add a beginning basic block to the stack, call ..._execute(),
7541 possibly add another bb to the stack and again call ..._execute(),
7542 ..., and finally call _finish(). */
7543
7544 /* Initialize the data structures used for depth-first search on the
7545 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7546 added to the basic block stack. DATA is the current depth-first
7547 search context. If INITIALIZE_STACK is non-zero, there is an
7548 element on the stack. */
7549
7550 static void
7551 flow_dfs_compute_reverse_init (data)
7552 depth_first_search_ds data;
7553 {
7554 /* Allocate stack for back-tracking up CFG. */
7555 data->stack =
7556 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7557 * sizeof (basic_block));
7558 data->sp = 0;
7559
7560 /* Allocate bitmap to track nodes that have been visited. */
7561 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7562
7563 /* None of the nodes in the CFG have been visited yet. */
7564 sbitmap_zero (data->visited_blocks);
7565
7566 return;
7567 }
7568
7569 /* Add the specified basic block to the top of the dfs data
7570 structures. When the search continues, it will start at the
7571 block. */
7572
7573 static void
7574 flow_dfs_compute_reverse_add_bb (data, bb)
7575 depth_first_search_ds data;
7576 basic_block bb;
7577 {
7578 data->stack[data->sp++] = bb;
7579 return;
7580 }
7581
7582 /* Continue the depth-first search through the reverse graph starting
7583 with the block at the stack's top and ending when the stack is
7584 empty. Visited nodes are marked. Returns an unvisited basic
7585 block, or NULL if there is none available. */
7586
7587 static basic_block
7588 flow_dfs_compute_reverse_execute (data)
7589 depth_first_search_ds data;
7590 {
7591 basic_block bb;
7592 edge e;
7593 int i;
7594
7595 while (data->sp > 0)
7596 {
7597 bb = data->stack[--data->sp];
7598
7599 /* Mark that we have visited this node. */
7600 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
7601 {
7602 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
7603
7604 /* Perform depth-first search on adjacent vertices. */
7605 for (e = bb->pred; e; e = e->pred_next)
7606 flow_dfs_compute_reverse_add_bb (data, e->src);
7607 }
7608 }
7609
7610 /* Determine if there are unvisited basic blocks. */
7611 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
7612 if (!TEST_BIT (data->visited_blocks, i))
7613 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
7614 return NULL;
7615 }
7616
7617 /* Destroy the data structures needed for depth-first search on the
7618 reverse graph. */
7619
7620 static void
7621 flow_dfs_compute_reverse_finish (data)
7622 depth_first_search_ds data;
7623 {
7624 free (data->stack);
7625 sbitmap_free (data->visited_blocks);
7626 return;
7627 }
7628
7629 /* Return the block for the pre-header of the loop with header
7630 HEADER where DOM specifies the dominator information. Return NULL if
7631 there is no pre-header. */
7632
7633 static basic_block
7634 flow_loop_pre_header_find (header, dom)
7635 basic_block header;
7636 const sbitmap *dom;
7637 {
7638 basic_block pre_header;
7639 edge e;
7640
7641 /* If block p is a predecessor of the header and is the only block
7642 that the header does not dominate, then it is the pre-header. */
7643 pre_header = NULL;
7644 for (e = header->pred; e; e = e->pred_next)
7645 {
7646 basic_block node = e->src;
7647
7648 if (node != ENTRY_BLOCK_PTR
7649 && ! TEST_BIT (dom[node->index], header->index))
7650 {
7651 if (pre_header == NULL)
7652 pre_header = node;
7653 else
7654 {
7655 /* There are multiple edges into the header from outside
7656 the loop so there is no pre-header block. */
7657 pre_header = NULL;
7658 break;
7659 }
7660 }
7661 }
7662 return pre_header;
7663 }
7664
7665 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7666 previously added. The insertion algorithm assumes that the loops
7667 are added in the order found by a depth first search of the CFG. */
7668
7669 static void
7670 flow_loop_tree_node_add (prevloop, loop)
7671 struct loop *prevloop;
7672 struct loop *loop;
7673 {
7674
7675 if (flow_loop_nested_p (prevloop, loop))
7676 {
7677 prevloop->inner = loop;
7678 loop->outer = prevloop;
7679 return;
7680 }
7681
7682 while (prevloop->outer)
7683 {
7684 if (flow_loop_nested_p (prevloop->outer, loop))
7685 {
7686 prevloop->next = loop;
7687 loop->outer = prevloop->outer;
7688 return;
7689 }
7690 prevloop = prevloop->outer;
7691 }
7692
7693 prevloop->next = loop;
7694 loop->outer = NULL;
7695 }
7696
7697 /* Build the loop hierarchy tree for LOOPS. */
7698
7699 static void
7700 flow_loops_tree_build (loops)
7701 struct loops *loops;
7702 {
7703 int i;
7704 int num_loops;
7705
7706 num_loops = loops->num;
7707 if (! num_loops)
7708 return;
7709
7710 /* Root the loop hierarchy tree with the first loop found.
7711 Since we used a depth first search this should be the
7712 outermost loop. */
7713 loops->tree = &loops->array[0];
7714 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7715
7716 /* Add the remaining loops to the tree. */
7717 for (i = 1; i < num_loops; i++)
7718 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7719 }
7720
7721 /* Helper function to compute loop nesting depth and enclosed loop level
7722 for the natural loop specified by LOOP at the loop depth DEPTH.
7723 Returns the loop level. */
7724
7725 static int
7726 flow_loop_level_compute (loop, depth)
7727 struct loop *loop;
7728 int depth;
7729 {
7730 struct loop *inner;
7731 int level = 1;
7732
7733 if (! loop)
7734 return 0;
7735
7736 /* Traverse loop tree assigning depth and computing level as the
7737 maximum level of all the inner loops of this loop. The loop
7738 level is equivalent to the height of the loop in the loop tree
7739 and corresponds to the number of enclosed loop levels (including
7740 itself). */
7741 for (inner = loop->inner; inner; inner = inner->next)
7742 {
7743 int ilevel;
7744
7745 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7746
7747 if (ilevel > level)
7748 level = ilevel;
7749 }
7750 loop->level = level;
7751 loop->depth = depth;
7752 return level;
7753 }
7754
7755 /* Compute the loop nesting depth and enclosed loop level for the loop
7756 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7757 level. */
7758
7759 static int
7760 flow_loops_level_compute (loops)
7761 struct loops *loops;
7762 {
7763 struct loop *loop;
7764 int level;
7765 int levels = 0;
7766
7767 /* Traverse all the outer level loops. */
7768 for (loop = loops->tree; loop; loop = loop->next)
7769 {
7770 level = flow_loop_level_compute (loop, 1);
7771 if (level > levels)
7772 levels = level;
7773 }
7774 return levels;
7775 }
7776
7777 /* Find all the natural loops in the function and save in LOOPS structure
7778 and recalculate loop_depth information in basic block structures.
7779 Return the number of natural loops found. */
7780
7781 int
7782 flow_loops_find (loops)
7783 struct loops *loops;
7784 {
7785 int i;
7786 int b;
7787 int num_loops;
7788 edge e;
7789 sbitmap headers;
7790 sbitmap *dom;
7791 int *dfs_order;
7792 int *rc_order;
7793
7794 loops->num = 0;
7795 loops->array = NULL;
7796 loops->tree = NULL;
7797 dfs_order = NULL;
7798 rc_order = NULL;
7799
7800 /* Taking care of this degenerate case makes the rest of
7801 this code simpler. */
7802 if (n_basic_blocks == 0)
7803 return 0;
7804
7805 /* Compute the dominators. */
7806 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7807 compute_flow_dominators (dom, NULL);
7808
7809 /* Count the number of loop edges (back edges). This should be the
7810 same as the number of natural loops. Also clear the loop_depth
7811 and as we work from inner->outer in a loop nest we call
7812 find_loop_nodes_find which will increment loop_depth for nodes
7813 within the current loop, which happens to enclose inner loops. */
7814
7815 num_loops = 0;
7816 for (b = 0; b < n_basic_blocks; b++)
7817 {
7818 BASIC_BLOCK (b)->loop_depth = 0;
7819 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7820 {
7821 basic_block latch = e->src;
7822
7823 /* Look for back edges where a predecessor is dominated
7824 by this block. A natural loop has a single entry
7825 node (header) that dominates all the nodes in the
7826 loop. It also has single back edge to the header
7827 from a latch node. Note that multiple natural loops
7828 may share the same header. */
7829 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7830 num_loops++;
7831 }
7832 }
7833
7834 if (num_loops)
7835 {
7836 /* Compute depth first search order of the CFG so that outer
7837 natural loops will be found before inner natural loops. */
7838 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7839 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7840 flow_depth_first_order_compute (dfs_order, rc_order);
7841
7842 /* Allocate loop structures. */
7843 loops->array
7844 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7845
7846 headers = sbitmap_alloc (n_basic_blocks);
7847 sbitmap_zero (headers);
7848
7849 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7850 sbitmap_zero (loops->shared_headers);
7851
7852 /* Find and record information about all the natural loops
7853 in the CFG. */
7854 num_loops = 0;
7855 for (b = 0; b < n_basic_blocks; b++)
7856 {
7857 basic_block header;
7858
7859 /* Search the nodes of the CFG in DFS order that we can find
7860 outer loops first. */
7861 header = BASIC_BLOCK (rc_order[b]);
7862
7863 /* Look for all the possible latch blocks for this header. */
7864 for (e = header->pred; e; e = e->pred_next)
7865 {
7866 basic_block latch = e->src;
7867
7868 /* Look for back edges where a predecessor is dominated
7869 by this block. A natural loop has a single entry
7870 node (header) that dominates all the nodes in the
7871 loop. It also has single back edge to the header
7872 from a latch node. Note that multiple natural loops
7873 may share the same header. */
7874 if (latch != ENTRY_BLOCK_PTR
7875 && TEST_BIT (dom[latch->index], header->index))
7876 {
7877 struct loop *loop;
7878
7879 loop = loops->array + num_loops;
7880
7881 loop->header = header;
7882 loop->latch = latch;
7883 loop->num = num_loops;
7884
7885 /* Keep track of blocks that are loop headers so
7886 that we can tell which loops should be merged. */
7887 if (TEST_BIT (headers, header->index))
7888 SET_BIT (loops->shared_headers, header->index);
7889 SET_BIT (headers, header->index);
7890
7891 /* Find nodes contained within the loop. */
7892 loop->nodes = sbitmap_alloc (n_basic_blocks);
7893 loop->num_nodes
7894 = flow_loop_nodes_find (header, latch, loop->nodes);
7895
7896 /* Compute first and last blocks within the loop.
7897 These are often the same as the loop header and
7898 loop latch respectively, but this is not always
7899 the case. */
7900 loop->first
7901 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7902 loop->last
7903 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7904
7905 /* Find edges which exit the loop. Note that a node
7906 may have several exit edges. */
7907 loop->num_exits
7908 = flow_loop_exits_find (loop->nodes, &loop->exits);
7909
7910 /* Look to see if the loop has a pre-header node. */
7911 loop->pre_header = flow_loop_pre_header_find (header, dom);
7912
7913 num_loops++;
7914 }
7915 }
7916 }
7917
7918 /* Natural loops with shared headers may either be disjoint or
7919 nested. Disjoint loops with shared headers cannot be inner
7920 loops and should be merged. For now just mark loops that share
7921 headers. */
7922 for (i = 0; i < num_loops; i++)
7923 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7924 loops->array[i].shared = 1;
7925
7926 sbitmap_free (headers);
7927 }
7928
7929 loops->num = num_loops;
7930
7931 /* Save CFG derived information to avoid recomputing it. */
7932 loops->cfg.dom = dom;
7933 loops->cfg.dfs_order = dfs_order;
7934 loops->cfg.rc_order = rc_order;
7935
7936 /* Build the loop hierarchy tree. */
7937 flow_loops_tree_build (loops);
7938
7939 /* Assign the loop nesting depth and enclosed loop level for each
7940 loop. */
7941 loops->levels = flow_loops_level_compute (loops);
7942
7943 return num_loops;
7944 }
7945
7946 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7947
7948 int
7949 flow_loop_outside_edge_p (loop, e)
7950 const struct loop *loop;
7951 edge e;
7952 {
7953 if (e->dest != loop->header)
7954 abort ();
7955 return (e->src == ENTRY_BLOCK_PTR)
7956 || ! TEST_BIT (loop->nodes, e->src->index);
7957 }
7958
7959 /* Clear LOG_LINKS fields of insns in a chain.
7960 Also clear the global_live_at_{start,end} fields of the basic block
7961 structures. */
7962
7963 void
7964 clear_log_links (insns)
7965 rtx insns;
7966 {
7967 rtx i;
7968 int b;
7969
7970 for (i = insns; i; i = NEXT_INSN (i))
7971 if (INSN_P (i))
7972 LOG_LINKS (i) = 0;
7973
7974 for (b = 0; b < n_basic_blocks; b++)
7975 {
7976 basic_block bb = BASIC_BLOCK (b);
7977
7978 bb->global_live_at_start = NULL;
7979 bb->global_live_at_end = NULL;
7980 }
7981
7982 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
7983 EXIT_BLOCK_PTR->global_live_at_start = NULL;
7984 }
7985
7986 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7987 correspond to the hard registers, if any, set in that map. This
7988 could be done far more efficiently by having all sorts of special-cases
7989 with moving single words, but probably isn't worth the trouble. */
7990
7991 void
7992 reg_set_to_hard_reg_set (to, from)
7993 HARD_REG_SET *to;
7994 bitmap from;
7995 {
7996 int i;
7997
7998 EXECUTE_IF_SET_IN_BITMAP
7999 (from, 0, i,
8000 {
8001 if (i >= FIRST_PSEUDO_REGISTER)
8002 return;
8003 SET_HARD_REG_BIT (*to, i);
8004 });
8005 }