7bebfa803400904b00f9635618340fe254a80935
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
26
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
30
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
34
35 ** find_basic_blocks **
36
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
41
42 find_basic_blocks also finds any unreachable loops and deletes them.
43
44 ** life_analysis **
45
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
49
50 ** live-register info **
51
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
59
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
66
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
77
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
84
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
88
89 ** Other actions of life_analysis **
90
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
93
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
96
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
102
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
105
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
112
113 /* TODO:
114
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
120 */
121 \f
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
130 #include "regs.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
154
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
164
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
169
170 extern struct obstack *function_obstack;
171
172 /* Number of basic blocks in the current function. */
173
174 int n_basic_blocks;
175
176 /* Number of edges in the current function. */
177
178 int n_edges;
179
180 /* The basic block array. */
181
182 varray_type basic_block_info;
183
184 /* The special entry and exit blocks. */
185
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1, /* eh_beg, eh_end */
198 0 /* count */
199 },
200 {
201 NULL, /* head */
202 NULL, /* end */
203 NULL, /* pred */
204 NULL, /* succ */
205 NULL, /* local_set */
206 NULL, /* global_live_at_start */
207 NULL, /* global_live_at_end */
208 NULL, /* aux */
209 EXIT_BLOCK, /* index */
210 0, /* loop_depth */
211 -1, -1, /* eh_beg, eh_end */
212 0 /* count */
213 }
214 };
215
216 /* Nonzero if the second flow pass has completed. */
217 int flow2_completed;
218
219 /* Maximum register number used in this function, plus one. */
220
221 int max_regno;
222
223 /* Indexed by n, giving various register information */
224
225 varray_type reg_n_info;
226
227 /* Size of a regset for the current function,
228 in (1) bytes and (2) elements. */
229
230 int regset_bytes;
231 int regset_size;
232
233 /* Regset of regs live when calls to `setjmp'-like functions happen. */
234 /* ??? Does this exist only for the setjmp-clobbered warning message? */
235
236 regset regs_live_at_setjmp;
237
238 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
239 that have to go in the same hard reg.
240 The first two regs in the list are a pair, and the next two
241 are another pair, etc. */
242 rtx regs_may_share;
243
244 /* Set of registers that may be eliminable. These are handled specially
245 in updating regs_ever_live. */
246
247 static HARD_REG_SET elim_reg_set;
248
249 /* The basic block structure for every insn, indexed by uid. */
250
251 varray_type basic_block_for_insn;
252
253 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
254 /* ??? Should probably be using LABEL_NUSES instead. It would take a
255 bit of surgery to be able to use or co-opt the routines in jump. */
256
257 static rtx label_value_list;
258 static rtx tail_recursion_label_list;
259
260 /* Holds information for tracking conditional register life information. */
261 struct reg_cond_life_info
262 {
263 /* An EXPR_LIST of conditions under which a register is dead. */
264 rtx condition;
265
266 /* ??? Could store mask of bytes that are dead, so that we could finally
267 track lifetimes of multi-word registers accessed via subregs. */
268 };
269
270 /* For use in communicating between propagate_block and its subroutines.
271 Holds all information needed to compute life and def-use information. */
272
273 struct propagate_block_info
274 {
275 /* The basic block we're considering. */
276 basic_block bb;
277
278 /* Bit N is set if register N is conditionally or unconditionally live. */
279 regset reg_live;
280
281 /* Bit N is set if register N is set this insn. */
282 regset new_set;
283
284 /* Element N is the next insn that uses (hard or pseudo) register N
285 within the current basic block; or zero, if there is no such insn. */
286 rtx *reg_next_use;
287
288 /* Contains a list of all the MEMs we are tracking for dead store
289 elimination. */
290 rtx mem_set_list;
291
292 /* If non-null, record the set of registers set in the basic block. */
293 regset local_set;
294
295 #ifdef HAVE_conditional_execution
296 /* Indexed by register number, holds a reg_cond_life_info for each
297 register that is not unconditionally live or dead. */
298 splay_tree reg_cond_dead;
299
300 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 regset reg_cond_reg;
302 #endif
303
304 /* Non-zero if the value of CC0 is live. */
305 int cc0_live;
306
307 /* Flags controling the set of information propagate_block collects. */
308 int flags;
309 };
310
311 /* Forward declarations */
312 static int count_basic_blocks PARAMS ((rtx));
313 static void find_basic_blocks_1 PARAMS ((rtx));
314 static rtx find_label_refs PARAMS ((rtx, rtx));
315 static void clear_edges PARAMS ((void));
316 static void make_edges PARAMS ((rtx));
317 static void make_label_edge PARAMS ((sbitmap *, basic_block,
318 rtx, int));
319 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
320 basic_block, rtx, int));
321 static void mark_critical_edges PARAMS ((void));
322 static void move_stray_eh_region_notes PARAMS ((void));
323 static void record_active_eh_regions PARAMS ((rtx));
324
325 static void commit_one_edge_insertion PARAMS ((edge));
326
327 static void delete_unreachable_blocks PARAMS ((void));
328 static void delete_eh_regions PARAMS ((void));
329 static int can_delete_note_p PARAMS ((rtx));
330 static void expunge_block PARAMS ((basic_block));
331 static int can_delete_label_p PARAMS ((rtx));
332 static int tail_recursion_label_p PARAMS ((rtx));
333 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
334 basic_block));
335 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
336 basic_block));
337 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
338 static void try_merge_blocks PARAMS ((void));
339 static void tidy_fallthru_edges PARAMS ((void));
340 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
341 static void verify_wide_reg PARAMS ((int, rtx, rtx));
342 static void verify_local_live_at_start PARAMS ((regset, basic_block));
343 static int set_noop_p PARAMS ((rtx));
344 static int noop_move_p PARAMS ((rtx));
345 static void delete_noop_moves PARAMS ((rtx));
346 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
347 static void notice_stack_pointer_modification PARAMS ((rtx));
348 static void mark_reg PARAMS ((rtx, void *));
349 static void mark_regs_live_at_end PARAMS ((regset));
350 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
351 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
352 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
353 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
354 static int insn_dead_p PARAMS ((struct propagate_block_info *,
355 rtx, int, rtx));
356 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
357 rtx, rtx));
358 static void mark_set_regs PARAMS ((struct propagate_block_info *,
359 rtx, rtx));
360 static void mark_set_1 PARAMS ((struct propagate_block_info *,
361 enum rtx_code, rtx, rtx,
362 rtx, int));
363 #ifdef HAVE_conditional_execution
364 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
365 int, rtx));
366 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
367 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
368 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
369 int));
370 static rtx ior_reg_cond PARAMS ((rtx, rtx));
371 static rtx not_reg_cond PARAMS ((rtx));
372 static rtx nand_reg_cond PARAMS ((rtx, rtx));
373 #endif
374 #ifdef AUTO_INC_DEC
375 static void find_auto_inc PARAMS ((struct propagate_block_info *,
376 rtx, rtx));
377 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
378 rtx));
379 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
380 #endif
381 static void mark_used_reg PARAMS ((struct propagate_block_info *,
382 rtx, rtx, rtx));
383 static void mark_used_regs PARAMS ((struct propagate_block_info *,
384 rtx, rtx, rtx));
385 void dump_flow_info PARAMS ((FILE *));
386 void debug_flow_info PARAMS ((void));
387 static void dump_edge_info PARAMS ((FILE *, edge, int));
388
389 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
390 rtx));
391 static void remove_fake_successors PARAMS ((basic_block));
392 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
393 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
394 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
395 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
396 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
397 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
398 static int flow_depth_first_order_compute PARAMS ((int *));
399 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
400 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
401 static void flow_loops_tree_build PARAMS ((struct loops *));
402 static int flow_loop_level_compute PARAMS ((struct loop *, int));
403 static int flow_loops_level_compute PARAMS ((struct loops *));
404 \f
405 /* Find basic blocks of the current function.
406 F is the first insn of the function and NREGS the number of register
407 numbers in use. */
408
409 void
410 find_basic_blocks (f, nregs, file)
411 rtx f;
412 int nregs ATTRIBUTE_UNUSED;
413 FILE *file ATTRIBUTE_UNUSED;
414 {
415 int max_uid;
416
417 /* Flush out existing data. */
418 if (basic_block_info != NULL)
419 {
420 int i;
421
422 clear_edges ();
423
424 /* Clear bb->aux on all extant basic blocks. We'll use this as a
425 tag for reuse during create_basic_block, just in case some pass
426 copies around basic block notes improperly. */
427 for (i = 0; i < n_basic_blocks; ++i)
428 BASIC_BLOCK (i)->aux = NULL;
429
430 VARRAY_FREE (basic_block_info);
431 }
432
433 n_basic_blocks = count_basic_blocks (f);
434
435 /* Size the basic block table. The actual structures will be allocated
436 by find_basic_blocks_1, since we want to keep the structure pointers
437 stable across calls to find_basic_blocks. */
438 /* ??? This whole issue would be much simpler if we called find_basic_blocks
439 exactly once, and thereafter we don't have a single long chain of
440 instructions at all until close to the end of compilation when we
441 actually lay them out. */
442
443 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
444
445 find_basic_blocks_1 (f);
446
447 /* Record the block to which an insn belongs. */
448 /* ??? This should be done another way, by which (perhaps) a label is
449 tagged directly with the basic block that it starts. It is used for
450 more than that currently, but IMO that is the only valid use. */
451
452 max_uid = get_max_uid ();
453 #ifdef AUTO_INC_DEC
454 /* Leave space for insns life_analysis makes in some cases for auto-inc.
455 These cases are rare, so we don't need too much space. */
456 max_uid += max_uid / 10;
457 #endif
458
459 compute_bb_for_insn (max_uid);
460
461 /* Discover the edges of our cfg. */
462 record_active_eh_regions (f);
463 make_edges (label_value_list);
464
465 /* Do very simple cleanup now, for the benefit of code that runs between
466 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
467 tidy_fallthru_edges ();
468
469 mark_critical_edges ();
470
471 #ifdef ENABLE_CHECKING
472 verify_flow_info ();
473 #endif
474 }
475
476 /* Count the basic blocks of the function. */
477
478 static int
479 count_basic_blocks (f)
480 rtx f;
481 {
482 register rtx insn;
483 register RTX_CODE prev_code;
484 register int count = 0;
485 int eh_region = 0;
486 int call_had_abnormal_edge = 0;
487
488 prev_code = JUMP_INSN;
489 for (insn = f; insn; insn = NEXT_INSN (insn))
490 {
491 register RTX_CODE code = GET_CODE (insn);
492
493 if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || prev_code == BARRIER
497 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
498 count++;
499
500 /* Record whether this call created an edge. */
501 if (code == CALL_INSN)
502 {
503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
504 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
505
506 call_had_abnormal_edge = 0;
507
508 /* If there is an EH region or rethrow, we have an edge. */
509 if ((eh_region && region > 0)
510 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
511 call_had_abnormal_edge = 1;
512 else if (nonlocal_goto_handler_labels && region >= 0)
513 /* If there is a nonlocal goto label and the specified
514 region number isn't -1, we have an edge. (0 means
515 no throw, but might have a nonlocal goto). */
516 call_had_abnormal_edge = 1;
517 }
518
519 if (code != NOTE)
520 prev_code = code;
521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
522 ++eh_region;
523 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
524 --eh_region;
525 }
526
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
529 if (count == 0)
530 {
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
532 count = 1;
533 }
534
535 return count;
536 }
537
538 /* Scan a list of insns for labels referred to other than by jumps.
539 This is used to scan the alternatives of a call placeholder. */
540 static rtx
541 find_label_refs (f, lvl)
542 rtx f;
543 rtx lvl;
544 {
545 rtx insn;
546
547 for (insn = f; insn; insn = NEXT_INSN (insn))
548 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
549 {
550 rtx note;
551
552 /* Make a list of all labels referred to other than by jumps
553 (which just don't have the REG_LABEL notes).
554
555 Make a special exception for labels followed by an ADDR*VEC,
556 as this would be a part of the tablejump setup code.
557
558 Make a special exception for the eh_return_stub_label, which
559 we know isn't part of any otherwise visible control flow. */
560
561 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
562 if (REG_NOTE_KIND (note) == REG_LABEL)
563 {
564 rtx lab = XEXP (note, 0), next;
565
566 if (lab == eh_return_stub_label)
567 ;
568 else if ((next = next_nonnote_insn (lab)) != NULL
569 && GET_CODE (next) == JUMP_INSN
570 && (GET_CODE (PATTERN (next)) == ADDR_VEC
571 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
572 ;
573 else if (GET_CODE (lab) == NOTE)
574 ;
575 else
576 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
577 }
578 }
579
580 return lvl;
581 }
582
583 /* Find all basic blocks of the function whose first insn is F.
584
585 Collect and return a list of labels whose addresses are taken. This
586 will be used in make_edges for use with computed gotos. */
587
588 static void
589 find_basic_blocks_1 (f)
590 rtx f;
591 {
592 register rtx insn, next;
593 int i = 0;
594 rtx bb_note = NULL_RTX;
595 rtx eh_list = NULL_RTX;
596 rtx lvl = NULL_RTX;
597 rtx trll = NULL_RTX;
598 rtx head = NULL_RTX;
599 rtx end = NULL_RTX;
600
601 /* We process the instructions in a slightly different way than we did
602 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
603 closed out the previous block, so that it gets attached at the proper
604 place. Since this form should be equivalent to the previous,
605 count_basic_blocks continues to use the old form as a check. */
606
607 for (insn = f; insn; insn = next)
608 {
609 enum rtx_code code = GET_CODE (insn);
610
611 next = NEXT_INSN (insn);
612
613 switch (code)
614 {
615 case NOTE:
616 {
617 int kind = NOTE_LINE_NUMBER (insn);
618
619 /* Keep a LIFO list of the currently active exception notes. */
620 if (kind == NOTE_INSN_EH_REGION_BEG)
621 eh_list = alloc_INSN_LIST (insn, eh_list);
622 else if (kind == NOTE_INSN_EH_REGION_END)
623 {
624 rtx t = eh_list;
625
626 eh_list = XEXP (eh_list, 1);
627 free_INSN_LIST_node (t);
628 }
629
630 /* Look for basic block notes with which to keep the
631 basic_block_info pointers stable. Unthread the note now;
632 we'll put it back at the right place in create_basic_block.
633 Or not at all if we've already found a note in this block. */
634 else if (kind == NOTE_INSN_BASIC_BLOCK)
635 {
636 if (bb_note == NULL_RTX)
637 bb_note = insn;
638 else
639 next = flow_delete_insn (insn);
640 }
641 break;
642 }
643
644 case CODE_LABEL:
645 /* A basic block starts at a label. If we've closed one off due
646 to a barrier or some such, no need to do it again. */
647 if (head != NULL_RTX)
648 {
649 /* While we now have edge lists with which other portions of
650 the compiler might determine a call ending a basic block
651 does not imply an abnormal edge, it will be a bit before
652 everything can be updated. So continue to emit a noop at
653 the end of such a block. */
654 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
655 {
656 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
657 end = emit_insn_after (nop, end);
658 }
659
660 create_basic_block (i++, head, end, bb_note);
661 bb_note = NULL_RTX;
662 }
663
664 head = end = insn;
665 break;
666
667 case JUMP_INSN:
668 /* A basic block ends at a jump. */
669 if (head == NULL_RTX)
670 head = insn;
671 else
672 {
673 /* ??? Make a special check for table jumps. The way this
674 happens is truly and amazingly gross. We are about to
675 create a basic block that contains just a code label and
676 an addr*vec jump insn. Worse, an addr_diff_vec creates
677 its own natural loop.
678
679 Prevent this bit of brain damage, pasting things together
680 correctly in make_edges.
681
682 The correct solution involves emitting the table directly
683 on the tablejump instruction as a note, or JUMP_LABEL. */
684
685 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
686 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
687 {
688 head = end = NULL;
689 n_basic_blocks--;
690 break;
691 }
692 }
693 end = insn;
694 goto new_bb_inclusive;
695
696 case BARRIER:
697 /* A basic block ends at a barrier. It may be that an unconditional
698 jump already closed the basic block -- no need to do it again. */
699 if (head == NULL_RTX)
700 break;
701
702 /* While we now have edge lists with which other portions of the
703 compiler might determine a call ending a basic block does not
704 imply an abnormal edge, it will be a bit before everything can
705 be updated. So continue to emit a noop at the end of such a
706 block. */
707 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
708 {
709 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
710 end = emit_insn_after (nop, end);
711 }
712 goto new_bb_exclusive;
713
714 case CALL_INSN:
715 {
716 /* Record whether this call created an edge. */
717 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
718 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
719 int call_has_abnormal_edge = 0;
720
721 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
722 {
723 /* Scan each of the alternatives for label refs. */
724 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
725 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
726 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
727 /* Record its tail recursion label, if any. */
728 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
729 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
730 }
731
732 /* If there is an EH region or rethrow, we have an edge. */
733 if ((eh_list && region > 0)
734 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
735 call_has_abnormal_edge = 1;
736 else if (nonlocal_goto_handler_labels && region >= 0)
737 /* If there is a nonlocal goto label and the specified
738 region number isn't -1, we have an edge. (0 means
739 no throw, but might have a nonlocal goto). */
740 call_has_abnormal_edge = 1;
741
742 /* A basic block ends at a call that can either throw or
743 do a non-local goto. */
744 if (call_has_abnormal_edge)
745 {
746 new_bb_inclusive:
747 if (head == NULL_RTX)
748 head = insn;
749 end = insn;
750
751 new_bb_exclusive:
752 create_basic_block (i++, head, end, bb_note);
753 head = end = NULL_RTX;
754 bb_note = NULL_RTX;
755 break;
756 }
757 }
758 /* FALLTHRU */
759
760 default:
761 if (GET_RTX_CLASS (code) == 'i')
762 {
763 if (head == NULL_RTX)
764 head = insn;
765 end = insn;
766 }
767 break;
768 }
769
770 if (GET_RTX_CLASS (code) == 'i')
771 {
772 rtx note;
773
774 /* Make a list of all labels referred to other than by jumps
775 (which just don't have the REG_LABEL notes).
776
777 Make a special exception for labels followed by an ADDR*VEC,
778 as this would be a part of the tablejump setup code.
779
780 Make a special exception for the eh_return_stub_label, which
781 we know isn't part of any otherwise visible control flow. */
782
783 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
784 if (REG_NOTE_KIND (note) == REG_LABEL)
785 {
786 rtx lab = XEXP (note, 0), next;
787
788 if (lab == eh_return_stub_label)
789 ;
790 else if ((next = next_nonnote_insn (lab)) != NULL
791 && GET_CODE (next) == JUMP_INSN
792 && (GET_CODE (PATTERN (next)) == ADDR_VEC
793 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
794 ;
795 else if (GET_CODE (lab) == NOTE)
796 ;
797 else
798 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
799 }
800 }
801 }
802
803 if (head != NULL_RTX)
804 create_basic_block (i++, head, end, bb_note);
805 else if (bb_note)
806 flow_delete_insn (bb_note);
807
808 if (i != n_basic_blocks)
809 abort ();
810
811 label_value_list = lvl;
812 tail_recursion_label_list = trll;
813 }
814
815 /* Tidy the CFG by deleting unreachable code and whatnot. */
816
817 void
818 cleanup_cfg (f)
819 rtx f;
820 {
821 delete_unreachable_blocks ();
822 move_stray_eh_region_notes ();
823 record_active_eh_regions (f);
824 try_merge_blocks ();
825 mark_critical_edges ();
826
827 /* Kill the data we won't maintain. */
828 free_EXPR_LIST_list (&label_value_list);
829 free_EXPR_LIST_list (&tail_recursion_label_list);
830 }
831
832 /* Create a new basic block consisting of the instructions between
833 HEAD and END inclusive. Reuses the note and basic block struct
834 in BB_NOTE, if any. */
835
836 void
837 create_basic_block (index, head, end, bb_note)
838 int index;
839 rtx head, end, bb_note;
840 {
841 basic_block bb;
842
843 if (bb_note
844 && ! RTX_INTEGRATED_P (bb_note)
845 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
846 && bb->aux == NULL)
847 {
848 /* If we found an existing note, thread it back onto the chain. */
849
850 rtx after;
851
852 if (GET_CODE (head) == CODE_LABEL)
853 after = head;
854 else
855 {
856 after = PREV_INSN (head);
857 head = bb_note;
858 }
859
860 if (after != bb_note && NEXT_INSN (after) != bb_note)
861 reorder_insns (bb_note, bb_note, after);
862 }
863 else
864 {
865 /* Otherwise we must create a note and a basic block structure.
866 Since we allow basic block structs in rtl, give the struct
867 the same lifetime by allocating it off the function obstack
868 rather than using malloc. */
869
870 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
871 memset (bb, 0, sizeof (*bb));
872
873 if (GET_CODE (head) == CODE_LABEL)
874 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
875 else
876 {
877 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
878 head = bb_note;
879 }
880 NOTE_BASIC_BLOCK (bb_note) = bb;
881 }
882
883 /* Always include the bb note in the block. */
884 if (NEXT_INSN (end) == bb_note)
885 end = bb_note;
886
887 bb->head = head;
888 bb->end = end;
889 bb->index = index;
890 BASIC_BLOCK (index) = bb;
891
892 /* Tag the block so that we know it has been used when considering
893 other basic block notes. */
894 bb->aux = bb;
895 }
896 \f
897 /* Records the basic block struct in BB_FOR_INSN, for every instruction
898 indexed by INSN_UID. MAX is the size of the array. */
899
900 void
901 compute_bb_for_insn (max)
902 int max;
903 {
904 int i;
905
906 if (basic_block_for_insn)
907 VARRAY_FREE (basic_block_for_insn);
908 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
909
910 for (i = 0; i < n_basic_blocks; ++i)
911 {
912 basic_block bb = BASIC_BLOCK (i);
913 rtx insn, end;
914
915 end = bb->end;
916 insn = bb->head;
917 while (1)
918 {
919 int uid = INSN_UID (insn);
920 if (uid < max)
921 VARRAY_BB (basic_block_for_insn, uid) = bb;
922 if (insn == end)
923 break;
924 insn = NEXT_INSN (insn);
925 }
926 }
927 }
928
929 /* Free the memory associated with the edge structures. */
930
931 static void
932 clear_edges ()
933 {
934 int i;
935 edge n, e;
936
937 for (i = 0; i < n_basic_blocks; ++i)
938 {
939 basic_block bb = BASIC_BLOCK (i);
940
941 for (e = bb->succ; e ; e = n)
942 {
943 n = e->succ_next;
944 free (e);
945 }
946
947 bb->succ = 0;
948 bb->pred = 0;
949 }
950
951 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
952 {
953 n = e->succ_next;
954 free (e);
955 }
956
957 ENTRY_BLOCK_PTR->succ = 0;
958 EXIT_BLOCK_PTR->pred = 0;
959
960 n_edges = 0;
961 }
962
963 /* Identify the edges between basic blocks.
964
965 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
966 that are otherwise unreachable may be reachable with a non-local goto.
967
968 BB_EH_END is an array indexed by basic block number in which we record
969 the list of exception regions active at the end of the basic block. */
970
971 static void
972 make_edges (label_value_list)
973 rtx label_value_list;
974 {
975 int i;
976 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
977 sbitmap *edge_cache = NULL;
978
979 /* Assume no computed jump; revise as we create edges. */
980 current_function_has_computed_jump = 0;
981
982 /* Heavy use of computed goto in machine-generated code can lead to
983 nearly fully-connected CFGs. In that case we spend a significant
984 amount of time searching the edge lists for duplicates. */
985 if (forced_labels || label_value_list)
986 {
987 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
988 sbitmap_vector_zero (edge_cache, n_basic_blocks);
989 }
990
991 /* By nature of the way these get numbered, block 0 is always the entry. */
992 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
993
994 for (i = 0; i < n_basic_blocks; ++i)
995 {
996 basic_block bb = BASIC_BLOCK (i);
997 rtx insn, x;
998 enum rtx_code code;
999 int force_fallthru = 0;
1000
1001 /* Examine the last instruction of the block, and discover the
1002 ways we can leave the block. */
1003
1004 insn = bb->end;
1005 code = GET_CODE (insn);
1006
1007 /* A branch. */
1008 if (code == JUMP_INSN)
1009 {
1010 rtx tmp;
1011
1012 /* ??? Recognize a tablejump and do the right thing. */
1013 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1014 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1015 && GET_CODE (tmp) == JUMP_INSN
1016 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1017 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1018 {
1019 rtvec vec;
1020 int j;
1021
1022 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1023 vec = XVEC (PATTERN (tmp), 0);
1024 else
1025 vec = XVEC (PATTERN (tmp), 1);
1026
1027 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1028 make_label_edge (edge_cache, bb,
1029 XEXP (RTVEC_ELT (vec, j), 0), 0);
1030
1031 /* Some targets (eg, ARM) emit a conditional jump that also
1032 contains the out-of-range target. Scan for these and
1033 add an edge if necessary. */
1034 if ((tmp = single_set (insn)) != NULL
1035 && SET_DEST (tmp) == pc_rtx
1036 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1037 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1038 make_label_edge (edge_cache, bb,
1039 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1040
1041 #ifdef CASE_DROPS_THROUGH
1042 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1043 us naturally detecting fallthru into the next block. */
1044 force_fallthru = 1;
1045 #endif
1046 }
1047
1048 /* If this is a computed jump, then mark it as reaching
1049 everything on the label_value_list and forced_labels list. */
1050 else if (computed_jump_p (insn))
1051 {
1052 current_function_has_computed_jump = 1;
1053
1054 for (x = label_value_list; x; x = XEXP (x, 1))
1055 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1056
1057 for (x = forced_labels; x; x = XEXP (x, 1))
1058 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1059 }
1060
1061 /* Returns create an exit out. */
1062 else if (returnjump_p (insn))
1063 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1064
1065 /* Otherwise, we have a plain conditional or unconditional jump. */
1066 else
1067 {
1068 if (! JUMP_LABEL (insn))
1069 abort ();
1070 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1071 }
1072 }
1073
1074 /* If this is a sibling call insn, then this is in effect a
1075 combined call and return, and so we need an edge to the
1076 exit block. No need to worry about EH edges, since we
1077 wouldn't have created the sibling call in the first place. */
1078
1079 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1080 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1081 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1082 else
1083
1084 /* If this is a CALL_INSN, then mark it as reaching the active EH
1085 handler for this CALL_INSN. If we're handling asynchronous
1086 exceptions then any insn can reach any of the active handlers.
1087
1088 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1089
1090 if (code == CALL_INSN || asynchronous_exceptions)
1091 {
1092 /* Add any appropriate EH edges. We do this unconditionally
1093 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1094 on the call, and this needn't be within an EH region. */
1095 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1096
1097 /* If we have asynchronous exceptions, do the same for *all*
1098 exception regions active in the block. */
1099 if (asynchronous_exceptions
1100 && bb->eh_beg != bb->eh_end)
1101 {
1102 if (bb->eh_beg >= 0)
1103 make_eh_edge (edge_cache, eh_nest_info, bb,
1104 NULL_RTX, bb->eh_beg);
1105
1106 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1107 if (GET_CODE (x) == NOTE
1108 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1109 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1110 {
1111 int region = NOTE_EH_HANDLER (x);
1112 make_eh_edge (edge_cache, eh_nest_info, bb,
1113 NULL_RTX, region);
1114 }
1115 }
1116
1117 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1118 {
1119 /* ??? This could be made smarter: in some cases it's possible
1120 to tell that certain calls will not do a nonlocal goto.
1121
1122 For example, if the nested functions that do the nonlocal
1123 gotos do not have their addresses taken, then only calls to
1124 those functions or to other nested functions that use them
1125 could possibly do nonlocal gotos. */
1126 /* We do know that a REG_EH_REGION note with a value less
1127 than 0 is guaranteed not to perform a non-local goto. */
1128 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1129 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1130 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1131 make_label_edge (edge_cache, bb, XEXP (x, 0),
1132 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1133 }
1134 }
1135
1136 /* We know something about the structure of the function __throw in
1137 libgcc2.c. It is the only function that ever contains eh_stub
1138 labels. It modifies its return address so that the last block
1139 returns to one of the eh_stub labels within it. So we have to
1140 make additional edges in the flow graph. */
1141 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1142 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1143
1144 /* Find out if we can drop through to the next block. */
1145 insn = next_nonnote_insn (insn);
1146 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1147 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1148 else if (i + 1 < n_basic_blocks)
1149 {
1150 rtx tmp = BLOCK_HEAD (i + 1);
1151 if (GET_CODE (tmp) == NOTE)
1152 tmp = next_nonnote_insn (tmp);
1153 if (force_fallthru || insn == tmp)
1154 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1155 }
1156 }
1157
1158 free_eh_nesting_info (eh_nest_info);
1159 if (edge_cache)
1160 sbitmap_vector_free (edge_cache);
1161 }
1162
1163 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1164 about the edge that is accumulated between calls. */
1165
1166 void
1167 make_edge (edge_cache, src, dst, flags)
1168 sbitmap *edge_cache;
1169 basic_block src, dst;
1170 int flags;
1171 {
1172 int use_edge_cache;
1173 edge e;
1174
1175 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1176 many edges to them, and we didn't allocate memory for it. */
1177 use_edge_cache = (edge_cache
1178 && src != ENTRY_BLOCK_PTR
1179 && dst != EXIT_BLOCK_PTR);
1180
1181 /* Make sure we don't add duplicate edges. */
1182 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1183 for (e = src->succ; e ; e = e->succ_next)
1184 if (e->dest == dst)
1185 {
1186 e->flags |= flags;
1187 return;
1188 }
1189
1190 e = (edge) xcalloc (1, sizeof (*e));
1191 n_edges++;
1192
1193 e->succ_next = src->succ;
1194 e->pred_next = dst->pred;
1195 e->src = src;
1196 e->dest = dst;
1197 e->flags = flags;
1198
1199 src->succ = e;
1200 dst->pred = e;
1201
1202 if (use_edge_cache)
1203 SET_BIT (edge_cache[src->index], dst->index);
1204 }
1205
1206 /* Create an edge from a basic block to a label. */
1207
1208 static void
1209 make_label_edge (edge_cache, src, label, flags)
1210 sbitmap *edge_cache;
1211 basic_block src;
1212 rtx label;
1213 int flags;
1214 {
1215 if (GET_CODE (label) != CODE_LABEL)
1216 abort ();
1217
1218 /* If the label was never emitted, this insn is junk, but avoid a
1219 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1220 as a result of a syntax error and a diagnostic has already been
1221 printed. */
1222
1223 if (INSN_UID (label) == 0)
1224 return;
1225
1226 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1227 }
1228
1229 /* Create the edges generated by INSN in REGION. */
1230
1231 static void
1232 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1233 sbitmap *edge_cache;
1234 eh_nesting_info *eh_nest_info;
1235 basic_block src;
1236 rtx insn;
1237 int region;
1238 {
1239 handler_info **handler_list;
1240 int num, is_call;
1241
1242 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1243 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1244 while (--num >= 0)
1245 {
1246 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1247 EDGE_ABNORMAL | EDGE_EH | is_call);
1248 }
1249 }
1250
1251 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1252 dangerous if we intend to move basic blocks around. Move such notes
1253 into the following block. */
1254
1255 static void
1256 move_stray_eh_region_notes ()
1257 {
1258 int i;
1259 basic_block b1, b2;
1260
1261 if (n_basic_blocks < 2)
1262 return;
1263
1264 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1265 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1266 {
1267 rtx insn, next, list = NULL_RTX;
1268
1269 b1 = BASIC_BLOCK (i);
1270 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1271 {
1272 next = NEXT_INSN (insn);
1273 if (GET_CODE (insn) == NOTE
1274 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1275 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1276 {
1277 /* Unlink from the insn chain. */
1278 NEXT_INSN (PREV_INSN (insn)) = next;
1279 PREV_INSN (next) = PREV_INSN (insn);
1280
1281 /* Queue it. */
1282 NEXT_INSN (insn) = list;
1283 list = insn;
1284 }
1285 }
1286
1287 if (list == NULL_RTX)
1288 continue;
1289
1290 /* Find where to insert these things. */
1291 insn = b2->head;
1292 if (GET_CODE (insn) == CODE_LABEL)
1293 insn = NEXT_INSN (insn);
1294
1295 while (list)
1296 {
1297 next = NEXT_INSN (list);
1298 add_insn_after (list, insn);
1299 list = next;
1300 }
1301 }
1302 }
1303
1304 /* Recompute eh_beg/eh_end for each basic block. */
1305
1306 static void
1307 record_active_eh_regions (f)
1308 rtx f;
1309 {
1310 rtx insn, eh_list = NULL_RTX;
1311 int i = 0;
1312 basic_block bb = BASIC_BLOCK (0);
1313
1314 for (insn = f; insn ; insn = NEXT_INSN (insn))
1315 {
1316 if (bb->head == insn)
1317 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1318
1319 if (GET_CODE (insn) == NOTE)
1320 {
1321 int kind = NOTE_LINE_NUMBER (insn);
1322 if (kind == NOTE_INSN_EH_REGION_BEG)
1323 eh_list = alloc_INSN_LIST (insn, eh_list);
1324 else if (kind == NOTE_INSN_EH_REGION_END)
1325 {
1326 rtx t = XEXP (eh_list, 1);
1327 free_INSN_LIST_node (eh_list);
1328 eh_list = t;
1329 }
1330 }
1331
1332 if (bb->end == insn)
1333 {
1334 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1335 i += 1;
1336 if (i == n_basic_blocks)
1337 break;
1338 bb = BASIC_BLOCK (i);
1339 }
1340 }
1341 }
1342
1343 /* Identify critical edges and set the bits appropriately. */
1344
1345 static void
1346 mark_critical_edges ()
1347 {
1348 int i, n = n_basic_blocks;
1349 basic_block bb;
1350
1351 /* We begin with the entry block. This is not terribly important now,
1352 but could be if a front end (Fortran) implemented alternate entry
1353 points. */
1354 bb = ENTRY_BLOCK_PTR;
1355 i = -1;
1356
1357 while (1)
1358 {
1359 edge e;
1360
1361 /* (1) Critical edges must have a source with multiple successors. */
1362 if (bb->succ && bb->succ->succ_next)
1363 {
1364 for (e = bb->succ; e ; e = e->succ_next)
1365 {
1366 /* (2) Critical edges must have a destination with multiple
1367 predecessors. Note that we know there is at least one
1368 predecessor -- the edge we followed to get here. */
1369 if (e->dest->pred->pred_next)
1370 e->flags |= EDGE_CRITICAL;
1371 else
1372 e->flags &= ~EDGE_CRITICAL;
1373 }
1374 }
1375 else
1376 {
1377 for (e = bb->succ; e ; e = e->succ_next)
1378 e->flags &= ~EDGE_CRITICAL;
1379 }
1380
1381 if (++i >= n)
1382 break;
1383 bb = BASIC_BLOCK (i);
1384 }
1385 }
1386 \f
1387 /* Split a (typically critical) edge. Return the new block.
1388 Abort on abnormal edges.
1389
1390 ??? The code generally expects to be called on critical edges.
1391 The case of a block ending in an unconditional jump to a
1392 block with multiple predecessors is not handled optimally. */
1393
1394 basic_block
1395 split_edge (edge_in)
1396 edge edge_in;
1397 {
1398 basic_block old_pred, bb, old_succ;
1399 edge edge_out;
1400 rtx bb_note;
1401 int i, j;
1402
1403 /* Abnormal edges cannot be split. */
1404 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1405 abort ();
1406
1407 old_pred = edge_in->src;
1408 old_succ = edge_in->dest;
1409
1410 /* Remove the existing edge from the destination's pred list. */
1411 {
1412 edge *pp;
1413 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1414 continue;
1415 *pp = edge_in->pred_next;
1416 edge_in->pred_next = NULL;
1417 }
1418
1419 /* Create the new structures. */
1420 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1421 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1422 n_edges++;
1423
1424 memset (bb, 0, sizeof (*bb));
1425
1426 /* ??? This info is likely going to be out of date very soon. */
1427 if (old_succ->global_live_at_start)
1428 {
1429 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1430 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1431 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1432 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1433 }
1434
1435 /* Wire them up. */
1436 bb->pred = edge_in;
1437 bb->succ = edge_out;
1438 bb->count = edge_in->count;
1439
1440 edge_in->dest = bb;
1441 edge_in->flags &= ~EDGE_CRITICAL;
1442
1443 edge_out->pred_next = old_succ->pred;
1444 edge_out->succ_next = NULL;
1445 edge_out->src = bb;
1446 edge_out->dest = old_succ;
1447 edge_out->flags = EDGE_FALLTHRU;
1448 edge_out->probability = REG_BR_PROB_BASE;
1449 edge_out->count = edge_in->count;
1450
1451 old_succ->pred = edge_out;
1452
1453 /* Tricky case -- if there existed a fallthru into the successor
1454 (and we're not it) we must add a new unconditional jump around
1455 the new block we're actually interested in.
1456
1457 Further, if that edge is critical, this means a second new basic
1458 block must be created to hold it. In order to simplify correct
1459 insn placement, do this before we touch the existing basic block
1460 ordering for the block we were really wanting. */
1461 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1462 {
1463 edge e;
1464 for (e = edge_out->pred_next; e ; e = e->pred_next)
1465 if (e->flags & EDGE_FALLTHRU)
1466 break;
1467
1468 if (e)
1469 {
1470 basic_block jump_block;
1471 rtx pos;
1472
1473 if ((e->flags & EDGE_CRITICAL) == 0
1474 && e->src != ENTRY_BLOCK_PTR)
1475 {
1476 /* Non critical -- we can simply add a jump to the end
1477 of the existing predecessor. */
1478 jump_block = e->src;
1479 }
1480 else
1481 {
1482 /* We need a new block to hold the jump. The simplest
1483 way to do the bulk of the work here is to recursively
1484 call ourselves. */
1485 jump_block = split_edge (e);
1486 e = jump_block->succ;
1487 }
1488
1489 /* Now add the jump insn ... */
1490 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1491 jump_block->end);
1492 jump_block->end = pos;
1493 if (basic_block_for_insn)
1494 set_block_for_insn (pos, jump_block);
1495 emit_barrier_after (pos);
1496
1497 /* ... let jump know that label is in use, ... */
1498 JUMP_LABEL (pos) = old_succ->head;
1499 ++LABEL_NUSES (old_succ->head);
1500
1501 /* ... and clear fallthru on the outgoing edge. */
1502 e->flags &= ~EDGE_FALLTHRU;
1503
1504 /* Continue splitting the interesting edge. */
1505 }
1506 }
1507
1508 /* Place the new block just in front of the successor. */
1509 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1510 if (old_succ == EXIT_BLOCK_PTR)
1511 j = n_basic_blocks - 1;
1512 else
1513 j = old_succ->index;
1514 for (i = n_basic_blocks - 1; i > j; --i)
1515 {
1516 basic_block tmp = BASIC_BLOCK (i - 1);
1517 BASIC_BLOCK (i) = tmp;
1518 tmp->index = i;
1519 }
1520 BASIC_BLOCK (i) = bb;
1521 bb->index = i;
1522
1523 /* Create the basic block note.
1524
1525 Where we place the note can have a noticable impact on the generated
1526 code. Consider this cfg:
1527
1528
1529 E
1530 |
1531 0
1532 / \
1533 +->1-->2--->E
1534 | |
1535 +--+
1536
1537 If we need to insert an insn on the edge from block 0 to block 1,
1538 we want to ensure the instructions we insert are outside of any
1539 loop notes that physically sit between block 0 and block 1. Otherwise
1540 we confuse the loop optimizer into thinking the loop is a phony. */
1541 if (old_succ != EXIT_BLOCK_PTR
1542 && PREV_INSN (old_succ->head)
1543 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1544 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1545 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1546 PREV_INSN (old_succ->head));
1547 else if (old_succ != EXIT_BLOCK_PTR)
1548 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1549 else
1550 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1551 NOTE_BASIC_BLOCK (bb_note) = bb;
1552 bb->head = bb->end = bb_note;
1553
1554 /* Not quite simple -- for non-fallthru edges, we must adjust the
1555 predecessor's jump instruction to target our new block. */
1556 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1557 {
1558 rtx tmp, insn = old_pred->end;
1559 rtx old_label = old_succ->head;
1560 rtx new_label = gen_label_rtx ();
1561
1562 if (GET_CODE (insn) != JUMP_INSN)
1563 abort ();
1564
1565 /* ??? Recognize a tablejump and adjust all matching cases. */
1566 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1567 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1568 && GET_CODE (tmp) == JUMP_INSN
1569 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1570 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1571 {
1572 rtvec vec;
1573 int j;
1574
1575 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1576 vec = XVEC (PATTERN (tmp), 0);
1577 else
1578 vec = XVEC (PATTERN (tmp), 1);
1579
1580 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1581 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1582 {
1583 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1584 --LABEL_NUSES (old_label);
1585 ++LABEL_NUSES (new_label);
1586 }
1587
1588 /* Handle casesi dispatch insns */
1589 if ((tmp = single_set (insn)) != NULL
1590 && SET_DEST (tmp) == pc_rtx
1591 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1592 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1593 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1594 {
1595 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1596 new_label);
1597 --LABEL_NUSES (old_label);
1598 ++LABEL_NUSES (new_label);
1599 }
1600 }
1601 else
1602 {
1603 /* This would have indicated an abnormal edge. */
1604 if (computed_jump_p (insn))
1605 abort ();
1606
1607 /* A return instruction can't be redirected. */
1608 if (returnjump_p (insn))
1609 abort ();
1610
1611 /* If the insn doesn't go where we think, we're confused. */
1612 if (JUMP_LABEL (insn) != old_label)
1613 abort ();
1614
1615 redirect_jump (insn, new_label, 0);
1616 }
1617
1618 emit_label_before (new_label, bb_note);
1619 bb->head = new_label;
1620 }
1621
1622 return bb;
1623 }
1624
1625 /* Queue instructions for insertion on an edge between two basic blocks.
1626 The new instructions and basic blocks (if any) will not appear in the
1627 CFG until commit_edge_insertions is called. */
1628
1629 void
1630 insert_insn_on_edge (pattern, e)
1631 rtx pattern;
1632 edge e;
1633 {
1634 /* We cannot insert instructions on an abnormal critical edge.
1635 It will be easier to find the culprit if we die now. */
1636 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1637 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1638 abort ();
1639
1640 if (e->insns == NULL_RTX)
1641 start_sequence ();
1642 else
1643 push_to_sequence (e->insns);
1644
1645 emit_insn (pattern);
1646
1647 e->insns = get_insns ();
1648 end_sequence();
1649 }
1650
1651 /* Update the CFG for the instructions queued on edge E. */
1652
1653 static void
1654 commit_one_edge_insertion (e)
1655 edge e;
1656 {
1657 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1658 basic_block bb;
1659
1660 /* Pull the insns off the edge now since the edge might go away. */
1661 insns = e->insns;
1662 e->insns = NULL_RTX;
1663
1664 /* Figure out where to put these things. If the destination has
1665 one predecessor, insert there. Except for the exit block. */
1666 if (e->dest->pred->pred_next == NULL
1667 && e->dest != EXIT_BLOCK_PTR)
1668 {
1669 bb = e->dest;
1670
1671 /* Get the location correct wrt a code label, and "nice" wrt
1672 a basic block note, and before everything else. */
1673 tmp = bb->head;
1674 if (GET_CODE (tmp) == CODE_LABEL)
1675 tmp = NEXT_INSN (tmp);
1676 if (GET_CODE (tmp) == NOTE
1677 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1678 tmp = NEXT_INSN (tmp);
1679 if (tmp == bb->head)
1680 before = tmp;
1681 else
1682 after = PREV_INSN (tmp);
1683 }
1684
1685 /* If the source has one successor and the edge is not abnormal,
1686 insert there. Except for the entry block. */
1687 else if ((e->flags & EDGE_ABNORMAL) == 0
1688 && e->src->succ->succ_next == NULL
1689 && e->src != ENTRY_BLOCK_PTR)
1690 {
1691 bb = e->src;
1692 /* It is possible to have a non-simple jump here. Consider a target
1693 where some forms of unconditional jumps clobber a register. This
1694 happens on the fr30 for example.
1695
1696 We know this block has a single successor, so we can just emit
1697 the queued insns before the jump. */
1698 if (GET_CODE (bb->end) == JUMP_INSN)
1699 {
1700 before = bb->end;
1701 }
1702 else
1703 {
1704 /* We'd better be fallthru, or we've lost track of what's what. */
1705 if ((e->flags & EDGE_FALLTHRU) == 0)
1706 abort ();
1707
1708 after = bb->end;
1709 }
1710 }
1711
1712 /* Otherwise we must split the edge. */
1713 else
1714 {
1715 bb = split_edge (e);
1716 after = bb->end;
1717 }
1718
1719 /* Now that we've found the spot, do the insertion. */
1720
1721 /* Set the new block number for these insns, if structure is allocated. */
1722 if (basic_block_for_insn)
1723 {
1724 rtx i;
1725 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1726 set_block_for_insn (i, bb);
1727 }
1728
1729 if (before)
1730 {
1731 emit_insns_before (insns, before);
1732 if (before == bb->head)
1733 bb->head = insns;
1734
1735 last = prev_nonnote_insn (before);
1736 }
1737 else
1738 {
1739 last = emit_insns_after (insns, after);
1740 if (after == bb->end)
1741 bb->end = last;
1742 }
1743
1744 if (returnjump_p (last))
1745 {
1746 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1747 This is not currently a problem because this only happens
1748 for the (single) epilogue, which already has a fallthru edge
1749 to EXIT. */
1750
1751 e = bb->succ;
1752 if (e->dest != EXIT_BLOCK_PTR
1753 || e->succ_next != NULL
1754 || (e->flags & EDGE_FALLTHRU) == 0)
1755 abort ();
1756 e->flags &= ~EDGE_FALLTHRU;
1757
1758 emit_barrier_after (last);
1759 bb->end = last;
1760
1761 if (before)
1762 flow_delete_insn (before);
1763 }
1764 else if (GET_CODE (last) == JUMP_INSN)
1765 abort ();
1766 }
1767
1768 /* Update the CFG for all queued instructions. */
1769
1770 void
1771 commit_edge_insertions ()
1772 {
1773 int i;
1774 basic_block bb;
1775
1776 #ifdef ENABLE_CHECKING
1777 verify_flow_info ();
1778 #endif
1779
1780 i = -1;
1781 bb = ENTRY_BLOCK_PTR;
1782 while (1)
1783 {
1784 edge e, next;
1785
1786 for (e = bb->succ; e ; e = next)
1787 {
1788 next = e->succ_next;
1789 if (e->insns)
1790 commit_one_edge_insertion (e);
1791 }
1792
1793 if (++i >= n_basic_blocks)
1794 break;
1795 bb = BASIC_BLOCK (i);
1796 }
1797 }
1798 \f
1799 /* Delete all unreachable basic blocks. */
1800
1801 static void
1802 delete_unreachable_blocks ()
1803 {
1804 basic_block *worklist, *tos;
1805 int deleted_handler;
1806 edge e;
1807 int i, n;
1808
1809 n = n_basic_blocks;
1810 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1811
1812 /* Use basic_block->aux as a marker. Clear them all. */
1813
1814 for (i = 0; i < n; ++i)
1815 BASIC_BLOCK (i)->aux = NULL;
1816
1817 /* Add our starting points to the worklist. Almost always there will
1818 be only one. It isn't inconcievable that we might one day directly
1819 support Fortran alternate entry points. */
1820
1821 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1822 {
1823 *tos++ = e->dest;
1824
1825 /* Mark the block with a handy non-null value. */
1826 e->dest->aux = e;
1827 }
1828
1829 /* Iterate: find everything reachable from what we've already seen. */
1830
1831 while (tos != worklist)
1832 {
1833 basic_block b = *--tos;
1834
1835 for (e = b->succ; e ; e = e->succ_next)
1836 if (!e->dest->aux)
1837 {
1838 *tos++ = e->dest;
1839 e->dest->aux = e;
1840 }
1841 }
1842
1843 /* Delete all unreachable basic blocks. Count down so that we don't
1844 interfere with the block renumbering that happens in flow_delete_block. */
1845
1846 deleted_handler = 0;
1847
1848 for (i = n - 1; i >= 0; --i)
1849 {
1850 basic_block b = BASIC_BLOCK (i);
1851
1852 if (b->aux != NULL)
1853 /* This block was found. Tidy up the mark. */
1854 b->aux = NULL;
1855 else
1856 deleted_handler |= flow_delete_block (b);
1857 }
1858
1859 tidy_fallthru_edges ();
1860
1861 /* If we deleted an exception handler, we may have EH region begin/end
1862 blocks to remove as well. */
1863 if (deleted_handler)
1864 delete_eh_regions ();
1865
1866 free (worklist);
1867 }
1868
1869 /* Find EH regions for which there is no longer a handler, and delete them. */
1870
1871 static void
1872 delete_eh_regions ()
1873 {
1874 rtx insn;
1875
1876 update_rethrow_references ();
1877
1878 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1879 if (GET_CODE (insn) == NOTE)
1880 {
1881 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1882 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1883 {
1884 int num = NOTE_EH_HANDLER (insn);
1885 /* A NULL handler indicates a region is no longer needed,
1886 as long as its rethrow label isn't used. */
1887 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1888 {
1889 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1890 NOTE_SOURCE_FILE (insn) = 0;
1891 }
1892 }
1893 }
1894 }
1895
1896 /* Return true if NOTE is not one of the ones that must be kept paired,
1897 so that we may simply delete them. */
1898
1899 static int
1900 can_delete_note_p (note)
1901 rtx note;
1902 {
1903 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1904 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1905 }
1906
1907 /* Unlink a chain of insns between START and FINISH, leaving notes
1908 that must be paired. */
1909
1910 void
1911 flow_delete_insn_chain (start, finish)
1912 rtx start, finish;
1913 {
1914 /* Unchain the insns one by one. It would be quicker to delete all
1915 of these with a single unchaining, rather than one at a time, but
1916 we need to keep the NOTE's. */
1917
1918 rtx next;
1919
1920 while (1)
1921 {
1922 next = NEXT_INSN (start);
1923 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1924 ;
1925 else if (GET_CODE (start) == CODE_LABEL
1926 && ! can_delete_label_p (start))
1927 {
1928 const char *name = LABEL_NAME (start);
1929 PUT_CODE (start, NOTE);
1930 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1931 NOTE_SOURCE_FILE (start) = name;
1932 }
1933 else
1934 next = flow_delete_insn (start);
1935
1936 if (start == finish)
1937 break;
1938 start = next;
1939 }
1940 }
1941
1942 /* Delete the insns in a (non-live) block. We physically delete every
1943 non-deleted-note insn, and update the flow graph appropriately.
1944
1945 Return nonzero if we deleted an exception handler. */
1946
1947 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1948 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1949
1950 int
1951 flow_delete_block (b)
1952 basic_block b;
1953 {
1954 int deleted_handler = 0;
1955 rtx insn, end, tmp;
1956
1957 /* If the head of this block is a CODE_LABEL, then it might be the
1958 label for an exception handler which can't be reached.
1959
1960 We need to remove the label from the exception_handler_label list
1961 and remove the associated NOTE_INSN_EH_REGION_BEG and
1962 NOTE_INSN_EH_REGION_END notes. */
1963
1964 insn = b->head;
1965
1966 never_reached_warning (insn);
1967
1968 if (GET_CODE (insn) == CODE_LABEL)
1969 {
1970 rtx x, *prev = &exception_handler_labels;
1971
1972 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1973 {
1974 if (XEXP (x, 0) == insn)
1975 {
1976 /* Found a match, splice this label out of the EH label list. */
1977 *prev = XEXP (x, 1);
1978 XEXP (x, 1) = NULL_RTX;
1979 XEXP (x, 0) = NULL_RTX;
1980
1981 /* Remove the handler from all regions */
1982 remove_handler (insn);
1983 deleted_handler = 1;
1984 break;
1985 }
1986 prev = &XEXP (x, 1);
1987 }
1988 }
1989
1990 /* Include any jump table following the basic block. */
1991 end = b->end;
1992 if (GET_CODE (end) == JUMP_INSN
1993 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1994 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1995 && GET_CODE (tmp) == JUMP_INSN
1996 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1997 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1998 end = tmp;
1999
2000 /* Include any barrier that may follow the basic block. */
2001 tmp = next_nonnote_insn (end);
2002 if (tmp && GET_CODE (tmp) == BARRIER)
2003 end = tmp;
2004
2005 /* Selectively delete the entire chain. */
2006 flow_delete_insn_chain (insn, end);
2007
2008 /* Remove the edges into and out of this block. Note that there may
2009 indeed be edges in, if we are removing an unreachable loop. */
2010 {
2011 edge e, next, *q;
2012
2013 for (e = b->pred; e ; e = next)
2014 {
2015 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2016 continue;
2017 *q = e->succ_next;
2018 next = e->pred_next;
2019 n_edges--;
2020 free (e);
2021 }
2022 for (e = b->succ; e ; e = next)
2023 {
2024 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2025 continue;
2026 *q = e->pred_next;
2027 next = e->succ_next;
2028 n_edges--;
2029 free (e);
2030 }
2031
2032 b->pred = NULL;
2033 b->succ = NULL;
2034 }
2035
2036 /* Remove the basic block from the array, and compact behind it. */
2037 expunge_block (b);
2038
2039 return deleted_handler;
2040 }
2041
2042 /* Remove block B from the basic block array and compact behind it. */
2043
2044 static void
2045 expunge_block (b)
2046 basic_block b;
2047 {
2048 int i, n = n_basic_blocks;
2049
2050 for (i = b->index; i + 1 < n; ++i)
2051 {
2052 basic_block x = BASIC_BLOCK (i + 1);
2053 BASIC_BLOCK (i) = x;
2054 x->index = i;
2055 }
2056
2057 basic_block_info->num_elements--;
2058 n_basic_blocks--;
2059 }
2060
2061 /* Delete INSN by patching it out. Return the next insn. */
2062
2063 rtx
2064 flow_delete_insn (insn)
2065 rtx insn;
2066 {
2067 rtx prev = PREV_INSN (insn);
2068 rtx next = NEXT_INSN (insn);
2069 rtx note;
2070
2071 PREV_INSN (insn) = NULL_RTX;
2072 NEXT_INSN (insn) = NULL_RTX;
2073 INSN_DELETED_P (insn) = 1;
2074
2075 if (prev)
2076 NEXT_INSN (prev) = next;
2077 if (next)
2078 PREV_INSN (next) = prev;
2079 else
2080 set_last_insn (prev);
2081
2082 if (GET_CODE (insn) == CODE_LABEL)
2083 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2084
2085 /* If deleting a jump, decrement the use count of the label. Deleting
2086 the label itself should happen in the normal course of block merging. */
2087 if (GET_CODE (insn) == JUMP_INSN
2088 && JUMP_LABEL (insn)
2089 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2090 LABEL_NUSES (JUMP_LABEL (insn))--;
2091
2092 /* Also if deleting an insn that references a label. */
2093 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2094 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2095 LABEL_NUSES (XEXP (note, 0))--;
2096
2097 return next;
2098 }
2099
2100 /* True if a given label can be deleted. */
2101
2102 static int
2103 can_delete_label_p (label)
2104 rtx label;
2105 {
2106 rtx x;
2107
2108 if (LABEL_PRESERVE_P (label))
2109 return 0;
2110
2111 for (x = forced_labels; x ; x = XEXP (x, 1))
2112 if (label == XEXP (x, 0))
2113 return 0;
2114 for (x = label_value_list; x ; x = XEXP (x, 1))
2115 if (label == XEXP (x, 0))
2116 return 0;
2117 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2118 if (label == XEXP (x, 0))
2119 return 0;
2120
2121 /* User declared labels must be preserved. */
2122 if (LABEL_NAME (label) != 0)
2123 return 0;
2124
2125 return 1;
2126 }
2127
2128 static int
2129 tail_recursion_label_p (label)
2130 rtx label;
2131 {
2132 rtx x;
2133
2134 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2135 if (label == XEXP (x, 0))
2136 return 1;
2137
2138 return 0;
2139 }
2140
2141 /* Blocks A and B are to be merged into a single block A. The insns
2142 are already contiguous, hence `nomove'. */
2143
2144 void
2145 merge_blocks_nomove (a, b)
2146 basic_block a, b;
2147 {
2148 edge e;
2149 rtx b_head, b_end, a_end;
2150 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2151 int b_empty = 0;
2152
2153 /* If there was a CODE_LABEL beginning B, delete it. */
2154 b_head = b->head;
2155 b_end = b->end;
2156 if (GET_CODE (b_head) == CODE_LABEL)
2157 {
2158 /* Detect basic blocks with nothing but a label. This can happen
2159 in particular at the end of a function. */
2160 if (b_head == b_end)
2161 b_empty = 1;
2162 del_first = del_last = b_head;
2163 b_head = NEXT_INSN (b_head);
2164 }
2165
2166 /* Delete the basic block note. */
2167 if (GET_CODE (b_head) == NOTE
2168 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2169 {
2170 if (b_head == b_end)
2171 b_empty = 1;
2172 if (! del_last)
2173 del_first = b_head;
2174 del_last = b_head;
2175 b_head = NEXT_INSN (b_head);
2176 }
2177
2178 /* If there was a jump out of A, delete it. */
2179 a_end = a->end;
2180 if (GET_CODE (a_end) == JUMP_INSN)
2181 {
2182 rtx prev;
2183
2184 prev = prev_nonnote_insn (a_end);
2185 if (!prev)
2186 prev = a->head;
2187
2188 del_first = a_end;
2189
2190 #ifdef HAVE_cc0
2191 /* If this was a conditional jump, we need to also delete
2192 the insn that set cc0. */
2193 if (prev && sets_cc0_p (prev))
2194 {
2195 rtx tmp = prev;
2196 prev = prev_nonnote_insn (prev);
2197 if (!prev)
2198 prev = a->head;
2199 del_first = tmp;
2200 }
2201 #endif
2202
2203 a_end = prev;
2204 }
2205 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2206 del_first = NEXT_INSN (a_end);
2207
2208 /* Delete everything marked above as well as crap that might be
2209 hanging out between the two blocks. */
2210 flow_delete_insn_chain (del_first, del_last);
2211
2212 /* Normally there should only be one successor of A and that is B, but
2213 partway though the merge of blocks for conditional_execution we'll
2214 be merging a TEST block with THEN and ELSE successors. Free the
2215 whole lot of them and hope the caller knows what they're doing. */
2216 while (a->succ)
2217 remove_edge (a->succ);
2218
2219 /* Adjust the edges out of B for the new owner. */
2220 for (e = b->succ; e ; e = e->succ_next)
2221 e->src = a;
2222 a->succ = b->succ;
2223
2224 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2225 b->pred = b->succ = NULL;
2226
2227 /* Reassociate the insns of B with A. */
2228 if (!b_empty)
2229 {
2230 if (basic_block_for_insn)
2231 {
2232 BLOCK_FOR_INSN (b_head) = a;
2233 while (b_head != b_end)
2234 {
2235 b_head = NEXT_INSN (b_head);
2236 BLOCK_FOR_INSN (b_head) = a;
2237 }
2238 }
2239 a_end = b_end;
2240 }
2241 a->end = a_end;
2242
2243 expunge_block (b);
2244 }
2245
2246 /* Blocks A and B are to be merged into a single block. A has no incoming
2247 fallthru edge, so it can be moved before B without adding or modifying
2248 any jumps (aside from the jump from A to B). */
2249
2250 static int
2251 merge_blocks_move_predecessor_nojumps (a, b)
2252 basic_block a, b;
2253 {
2254 rtx start, end, barrier;
2255 int index;
2256
2257 start = a->head;
2258 end = a->end;
2259
2260 barrier = next_nonnote_insn (end);
2261 if (GET_CODE (barrier) != BARRIER)
2262 abort ();
2263 flow_delete_insn (barrier);
2264
2265 /* Move block and loop notes out of the chain so that we do not
2266 disturb their order.
2267
2268 ??? A better solution would be to squeeze out all the non-nested notes
2269 and adjust the block trees appropriately. Even better would be to have
2270 a tighter connection between block trees and rtl so that this is not
2271 necessary. */
2272 start = squeeze_notes (start, end);
2273
2274 /* Scramble the insn chain. */
2275 if (end != PREV_INSN (b->head))
2276 reorder_insns (start, end, PREV_INSN (b->head));
2277
2278 if (rtl_dump_file)
2279 {
2280 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2281 a->index, b->index);
2282 }
2283
2284 /* Swap the records for the two blocks around. Although we are deleting B,
2285 A is now where B was and we want to compact the BB array from where
2286 A used to be. */
2287 BASIC_BLOCK(a->index) = b;
2288 BASIC_BLOCK(b->index) = a;
2289 index = a->index;
2290 a->index = b->index;
2291 b->index = index;
2292
2293 /* Now blocks A and B are contiguous. Merge them. */
2294 merge_blocks_nomove (a, b);
2295
2296 return 1;
2297 }
2298
2299 /* Blocks A and B are to be merged into a single block. B has no outgoing
2300 fallthru edge, so it can be moved after A without adding or modifying
2301 any jumps (aside from the jump from A to B). */
2302
2303 static int
2304 merge_blocks_move_successor_nojumps (a, b)
2305 basic_block a, b;
2306 {
2307 rtx start, end, barrier;
2308
2309 start = b->head;
2310 end = b->end;
2311 barrier = NEXT_INSN (end);
2312
2313 /* Recognize a jump table following block B. */
2314 if (GET_CODE (barrier) == CODE_LABEL
2315 && NEXT_INSN (barrier)
2316 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2317 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2318 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2319 {
2320 end = NEXT_INSN (barrier);
2321 barrier = NEXT_INSN (end);
2322 }
2323
2324 /* There had better have been a barrier there. Delete it. */
2325 if (GET_CODE (barrier) != BARRIER)
2326 abort ();
2327 flow_delete_insn (barrier);
2328
2329 /* Move block and loop notes out of the chain so that we do not
2330 disturb their order.
2331
2332 ??? A better solution would be to squeeze out all the non-nested notes
2333 and adjust the block trees appropriately. Even better would be to have
2334 a tighter connection between block trees and rtl so that this is not
2335 necessary. */
2336 start = squeeze_notes (start, end);
2337
2338 /* Scramble the insn chain. */
2339 reorder_insns (start, end, a->end);
2340
2341 /* Now blocks A and B are contiguous. Merge them. */
2342 merge_blocks_nomove (a, b);
2343
2344 if (rtl_dump_file)
2345 {
2346 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2347 b->index, a->index);
2348 }
2349
2350 return 1;
2351 }
2352
2353 /* Attempt to merge basic blocks that are potentially non-adjacent.
2354 Return true iff the attempt succeeded. */
2355
2356 static int
2357 merge_blocks (e, b, c)
2358 edge e;
2359 basic_block b, c;
2360 {
2361 /* If C has a tail recursion label, do not merge. There is no
2362 edge recorded from the call_placeholder back to this label, as
2363 that would make optimize_sibling_and_tail_recursive_calls more
2364 complex for no gain. */
2365 if (GET_CODE (c->head) == CODE_LABEL
2366 && tail_recursion_label_p (c->head))
2367 return 0;
2368
2369 /* If B has a fallthru edge to C, no need to move anything. */
2370 if (e->flags & EDGE_FALLTHRU)
2371 {
2372 merge_blocks_nomove (b, c);
2373
2374 if (rtl_dump_file)
2375 {
2376 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2377 b->index, c->index);
2378 }
2379
2380 return 1;
2381 }
2382 else
2383 {
2384 edge tmp_edge;
2385 basic_block d;
2386 int c_has_outgoing_fallthru;
2387 int b_has_incoming_fallthru;
2388
2389 /* We must make sure to not munge nesting of exception regions,
2390 lexical blocks, and loop notes.
2391
2392 The first is taken care of by requiring that the active eh
2393 region at the end of one block always matches the active eh
2394 region at the beginning of the next block.
2395
2396 The later two are taken care of by squeezing out all the notes. */
2397
2398 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2399 executed and we may want to treat blocks which have two out
2400 edges, one normal, one abnormal as only having one edge for
2401 block merging purposes. */
2402
2403 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2404 if (tmp_edge->flags & EDGE_FALLTHRU)
2405 break;
2406 c_has_outgoing_fallthru = (tmp_edge != NULL);
2407
2408 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2409 if (tmp_edge->flags & EDGE_FALLTHRU)
2410 break;
2411 b_has_incoming_fallthru = (tmp_edge != NULL);
2412
2413 /* If B does not have an incoming fallthru, and the exception regions
2414 match, then it can be moved immediately before C without introducing
2415 or modifying jumps.
2416
2417 C can not be the first block, so we do not have to worry about
2418 accessing a non-existent block. */
2419 d = BASIC_BLOCK (c->index - 1);
2420 if (! b_has_incoming_fallthru
2421 && d->eh_end == b->eh_beg
2422 && b->eh_end == c->eh_beg)
2423 return merge_blocks_move_predecessor_nojumps (b, c);
2424
2425 /* Otherwise, we're going to try to move C after B. Make sure the
2426 exception regions match.
2427
2428 If B is the last basic block, then we must not try to access the
2429 block structure for block B + 1. Luckily in that case we do not
2430 need to worry about matching exception regions. */
2431 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2432 if (b->eh_end == c->eh_beg
2433 && (d == NULL || c->eh_end == d->eh_beg))
2434 {
2435 /* If C does not have an outgoing fallthru, then it can be moved
2436 immediately after B without introducing or modifying jumps. */
2437 if (! c_has_outgoing_fallthru)
2438 return merge_blocks_move_successor_nojumps (b, c);
2439
2440 /* Otherwise, we'll need to insert an extra jump, and possibly
2441 a new block to contain it. */
2442 /* ??? Not implemented yet. */
2443 }
2444
2445 return 0;
2446 }
2447 }
2448
2449 /* Top level driver for merge_blocks. */
2450
2451 static void
2452 try_merge_blocks ()
2453 {
2454 int i;
2455
2456 /* Attempt to merge blocks as made possible by edge removal. If a block
2457 has only one successor, and the successor has only one predecessor,
2458 they may be combined. */
2459
2460 for (i = 0; i < n_basic_blocks; )
2461 {
2462 basic_block c, b = BASIC_BLOCK (i);
2463 edge s;
2464
2465 /* A loop because chains of blocks might be combineable. */
2466 while ((s = b->succ) != NULL
2467 && s->succ_next == NULL
2468 && (s->flags & EDGE_EH) == 0
2469 && (c = s->dest) != EXIT_BLOCK_PTR
2470 && c->pred->pred_next == NULL
2471 /* If the jump insn has side effects, we can't kill the edge. */
2472 && (GET_CODE (b->end) != JUMP_INSN
2473 || onlyjump_p (b->end))
2474 && merge_blocks (s, b, c))
2475 continue;
2476
2477 /* Don't get confused by the index shift caused by deleting blocks. */
2478 i = b->index + 1;
2479 }
2480 }
2481
2482 /* The given edge should potentially be a fallthru edge. If that is in
2483 fact true, delete the jump and barriers that are in the way. */
2484
2485 void
2486 tidy_fallthru_edge (e, b, c)
2487 edge e;
2488 basic_block b, c;
2489 {
2490 rtx q;
2491
2492 /* ??? In a late-running flow pass, other folks may have deleted basic
2493 blocks by nopping out blocks, leaving multiple BARRIERs between here
2494 and the target label. They ought to be chastized and fixed.
2495
2496 We can also wind up with a sequence of undeletable labels between
2497 one block and the next.
2498
2499 So search through a sequence of barriers, labels, and notes for
2500 the head of block C and assert that we really do fall through. */
2501
2502 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2503 return;
2504
2505 /* Remove what will soon cease being the jump insn from the source block.
2506 If block B consisted only of this single jump, turn it into a deleted
2507 note. */
2508 q = b->end;
2509 if (GET_CODE (q) == JUMP_INSN
2510 && onlyjump_p (q)
2511 && (any_uncondjump_p (q)
2512 || (b->succ == e && e->succ_next == NULL)))
2513 {
2514 #ifdef HAVE_cc0
2515 /* If this was a conditional jump, we need to also delete
2516 the insn that set cc0. */
2517 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2518 q = PREV_INSN (q);
2519 #endif
2520
2521 if (b->head == q)
2522 {
2523 PUT_CODE (q, NOTE);
2524 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2525 NOTE_SOURCE_FILE (q) = 0;
2526 }
2527 else
2528 b->end = q = PREV_INSN (q);
2529 }
2530
2531 /* Selectively unlink the sequence. */
2532 if (q != PREV_INSN (c->head))
2533 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2534
2535 e->flags |= EDGE_FALLTHRU;
2536 }
2537
2538 /* Fix up edges that now fall through, or rather should now fall through
2539 but previously required a jump around now deleted blocks. Simplify
2540 the search by only examining blocks numerically adjacent, since this
2541 is how find_basic_blocks created them. */
2542
2543 static void
2544 tidy_fallthru_edges ()
2545 {
2546 int i;
2547
2548 for (i = 1; i < n_basic_blocks; ++i)
2549 {
2550 basic_block b = BASIC_BLOCK (i - 1);
2551 basic_block c = BASIC_BLOCK (i);
2552 edge s;
2553
2554 /* We care about simple conditional or unconditional jumps with
2555 a single successor.
2556
2557 If we had a conditional branch to the next instruction when
2558 find_basic_blocks was called, then there will only be one
2559 out edge for the block which ended with the conditional
2560 branch (since we do not create duplicate edges).
2561
2562 Furthermore, the edge will be marked as a fallthru because we
2563 merge the flags for the duplicate edges. So we do not want to
2564 check that the edge is not a FALLTHRU edge. */
2565 if ((s = b->succ) != NULL
2566 && s->succ_next == NULL
2567 && s->dest == c
2568 /* If the jump insn has side effects, we can't tidy the edge. */
2569 && (GET_CODE (b->end) != JUMP_INSN
2570 || onlyjump_p (b->end)))
2571 tidy_fallthru_edge (s, b, c);
2572 }
2573 }
2574 \f
2575 /* Perform data flow analysis.
2576 F is the first insn of the function; FLAGS is a set of PROP_* flags
2577 to be used in accumulating flow info. */
2578
2579 void
2580 life_analysis (f, file, flags)
2581 rtx f;
2582 FILE *file;
2583 int flags;
2584 {
2585 #ifdef ELIMINABLE_REGS
2586 register int i;
2587 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2588 #endif
2589
2590 /* Record which registers will be eliminated. We use this in
2591 mark_used_regs. */
2592
2593 CLEAR_HARD_REG_SET (elim_reg_set);
2594
2595 #ifdef ELIMINABLE_REGS
2596 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2597 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2598 #else
2599 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2600 #endif
2601
2602 if (! optimize)
2603 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2604
2605 /* The post-reload life analysis have (on a global basis) the same
2606 registers live as was computed by reload itself. elimination
2607 Otherwise offsets and such may be incorrect.
2608
2609 Reload will make some registers as live even though they do not
2610 appear in the rtl.
2611
2612 We don't want to create new auto-incs after reload, since they
2613 are unlikely to be useful and can cause problems with shared
2614 stack slots. */
2615 if (reload_completed)
2616 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2617
2618 /* We want alias analysis information for local dead store elimination. */
2619 if (flags & PROP_SCAN_DEAD_CODE)
2620 init_alias_analysis ();
2621
2622 /* Always remove no-op moves. Do this before other processing so
2623 that we don't have to keep re-scanning them. */
2624 delete_noop_moves (f);
2625
2626 /* Some targets can emit simpler epilogues if they know that sp was
2627 not ever modified during the function. After reload, of course,
2628 we've already emitted the epilogue so there's no sense searching. */
2629 if (! reload_completed)
2630 notice_stack_pointer_modification (f);
2631
2632 /* Allocate and zero out data structures that will record the
2633 data from lifetime analysis. */
2634 allocate_reg_life_data ();
2635 allocate_bb_life_data ();
2636
2637 /* Find the set of registers live on function exit. */
2638 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2639
2640 /* "Update" life info from zero. It'd be nice to begin the
2641 relaxation with just the exit and noreturn blocks, but that set
2642 is not immediately handy. */
2643
2644 if (flags & PROP_REG_INFO)
2645 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2646 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2647
2648 /* Clean up. */
2649 if (flags & PROP_SCAN_DEAD_CODE)
2650 end_alias_analysis ();
2651
2652 if (file)
2653 dump_flow_info (file);
2654
2655 free_basic_block_vars (1);
2656 }
2657
2658 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2659 Search for REGNO. If found, abort if it is not wider than word_mode. */
2660
2661 static int
2662 verify_wide_reg_1 (px, pregno)
2663 rtx *px;
2664 void *pregno;
2665 {
2666 rtx x = *px;
2667 unsigned int regno = *(int *) pregno;
2668
2669 if (GET_CODE (x) == REG && REGNO (x) == regno)
2670 {
2671 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2672 abort ();
2673 return 1;
2674 }
2675 return 0;
2676 }
2677
2678 /* A subroutine of verify_local_live_at_start. Search through insns
2679 between HEAD and END looking for register REGNO. */
2680
2681 static void
2682 verify_wide_reg (regno, head, end)
2683 int regno;
2684 rtx head, end;
2685 {
2686 while (1)
2687 {
2688 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2689 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2690 return;
2691 if (head == end)
2692 break;
2693 head = NEXT_INSN (head);
2694 }
2695
2696 /* We didn't find the register at all. Something's way screwy. */
2697 abort ();
2698 }
2699
2700 /* A subroutine of update_life_info. Verify that there are no untoward
2701 changes in live_at_start during a local update. */
2702
2703 static void
2704 verify_local_live_at_start (new_live_at_start, bb)
2705 regset new_live_at_start;
2706 basic_block bb;
2707 {
2708 if (reload_completed)
2709 {
2710 /* After reload, there are no pseudos, nor subregs of multi-word
2711 registers. The regsets should exactly match. */
2712 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2713 abort ();
2714 }
2715 else
2716 {
2717 int i;
2718
2719 /* Find the set of changed registers. */
2720 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2721
2722 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2723 {
2724 /* No registers should die. */
2725 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2726 abort ();
2727 /* Verify that the now-live register is wider than word_mode. */
2728 verify_wide_reg (i, bb->head, bb->end);
2729 });
2730 }
2731 }
2732
2733 /* Updates life information starting with the basic blocks set in BLOCKS.
2734 If BLOCKS is null, consider it to be the universal set.
2735
2736 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2737 we are only expecting local modifications to basic blocks. If we find
2738 extra registers live at the beginning of a block, then we either killed
2739 useful data, or we have a broken split that wants data not provided.
2740 If we find registers removed from live_at_start, that means we have
2741 a broken peephole that is killing a register it shouldn't.
2742
2743 ??? This is not true in one situation -- when a pre-reload splitter
2744 generates subregs of a multi-word pseudo, current life analysis will
2745 lose the kill. So we _can_ have a pseudo go live. How irritating.
2746
2747 Including PROP_REG_INFO does not properly refresh regs_ever_live
2748 unless the caller resets it to zero. */
2749
2750 void
2751 update_life_info (blocks, extent, prop_flags)
2752 sbitmap blocks;
2753 enum update_life_extent extent;
2754 int prop_flags;
2755 {
2756 regset tmp;
2757 regset_head tmp_head;
2758 int i;
2759
2760 tmp = INITIALIZE_REG_SET (tmp_head);
2761
2762 /* For a global update, we go through the relaxation process again. */
2763 if (extent != UPDATE_LIFE_LOCAL)
2764 {
2765 calculate_global_regs_live (blocks, blocks,
2766 prop_flags & PROP_SCAN_DEAD_CODE);
2767
2768 /* If asked, remove notes from the blocks we'll update. */
2769 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2770 count_or_remove_death_notes (blocks, 1);
2771 }
2772
2773 if (blocks)
2774 {
2775 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2776 {
2777 basic_block bb = BASIC_BLOCK (i);
2778
2779 COPY_REG_SET (tmp, bb->global_live_at_end);
2780 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2781
2782 if (extent == UPDATE_LIFE_LOCAL)
2783 verify_local_live_at_start (tmp, bb);
2784 });
2785 }
2786 else
2787 {
2788 for (i = n_basic_blocks - 1; i >= 0; --i)
2789 {
2790 basic_block bb = BASIC_BLOCK (i);
2791
2792 COPY_REG_SET (tmp, bb->global_live_at_end);
2793 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2794
2795 if (extent == UPDATE_LIFE_LOCAL)
2796 verify_local_live_at_start (tmp, bb);
2797 }
2798 }
2799
2800 FREE_REG_SET (tmp);
2801
2802 if (prop_flags & PROP_REG_INFO)
2803 {
2804 /* The only pseudos that are live at the beginning of the function
2805 are those that were not set anywhere in the function. local-alloc
2806 doesn't know how to handle these correctly, so mark them as not
2807 local to any one basic block. */
2808 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2809 FIRST_PSEUDO_REGISTER, i,
2810 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2811
2812 /* We have a problem with any pseudoreg that lives across the setjmp.
2813 ANSI says that if a user variable does not change in value between
2814 the setjmp and the longjmp, then the longjmp preserves it. This
2815 includes longjmp from a place where the pseudo appears dead.
2816 (In principle, the value still exists if it is in scope.)
2817 If the pseudo goes in a hard reg, some other value may occupy
2818 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2819 Conclusion: such a pseudo must not go in a hard reg. */
2820 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2821 FIRST_PSEUDO_REGISTER, i,
2822 {
2823 if (regno_reg_rtx[i] != 0)
2824 {
2825 REG_LIVE_LENGTH (i) = -1;
2826 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2827 }
2828 });
2829 }
2830 }
2831
2832 /* Free the variables allocated by find_basic_blocks.
2833
2834 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2835
2836 void
2837 free_basic_block_vars (keep_head_end_p)
2838 int keep_head_end_p;
2839 {
2840 if (basic_block_for_insn)
2841 {
2842 VARRAY_FREE (basic_block_for_insn);
2843 basic_block_for_insn = NULL;
2844 }
2845
2846 if (! keep_head_end_p)
2847 {
2848 clear_edges ();
2849 VARRAY_FREE (basic_block_info);
2850 n_basic_blocks = 0;
2851
2852 ENTRY_BLOCK_PTR->aux = NULL;
2853 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2854 EXIT_BLOCK_PTR->aux = NULL;
2855 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2856 }
2857 }
2858
2859 /* Return nonzero if the destination of SET equals the source. */
2860 static int
2861 set_noop_p (set)
2862 rtx set;
2863 {
2864 rtx src = SET_SRC (set);
2865 rtx dst = SET_DEST (set);
2866
2867 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2868 {
2869 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2870 return 0;
2871 src = SUBREG_REG (src);
2872 dst = SUBREG_REG (dst);
2873 }
2874
2875 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2876 && REGNO (src) == REGNO (dst));
2877 }
2878
2879 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2880 value to itself. */
2881 static int
2882 noop_move_p (insn)
2883 rtx insn;
2884 {
2885 rtx pat = PATTERN (insn);
2886
2887 /* Insns carrying these notes are useful later on. */
2888 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2889 return 0;
2890
2891 if (GET_CODE (pat) == SET && set_noop_p (pat))
2892 return 1;
2893
2894 if (GET_CODE (pat) == PARALLEL)
2895 {
2896 int i;
2897 /* If nothing but SETs of registers to themselves,
2898 this insn can also be deleted. */
2899 for (i = 0; i < XVECLEN (pat, 0); i++)
2900 {
2901 rtx tem = XVECEXP (pat, 0, i);
2902
2903 if (GET_CODE (tem) == USE
2904 || GET_CODE (tem) == CLOBBER)
2905 continue;
2906
2907 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2908 return 0;
2909 }
2910
2911 return 1;
2912 }
2913 return 0;
2914 }
2915
2916 /* Delete any insns that copy a register to itself. */
2917
2918 static void
2919 delete_noop_moves (f)
2920 rtx f;
2921 {
2922 rtx insn;
2923 for (insn = f; insn; insn = NEXT_INSN (insn))
2924 {
2925 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2926 {
2927 PUT_CODE (insn, NOTE);
2928 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2929 NOTE_SOURCE_FILE (insn) = 0;
2930 }
2931 }
2932 }
2933
2934 /* Determine if the stack pointer is constant over the life of the function.
2935 Only useful before prologues have been emitted. */
2936
2937 static void
2938 notice_stack_pointer_modification_1 (x, pat, data)
2939 rtx x;
2940 rtx pat ATTRIBUTE_UNUSED;
2941 void *data ATTRIBUTE_UNUSED;
2942 {
2943 if (x == stack_pointer_rtx
2944 /* The stack pointer is only modified indirectly as the result
2945 of a push until later in flow. See the comments in rtl.texi
2946 regarding Embedded Side-Effects on Addresses. */
2947 || (GET_CODE (x) == MEM
2948 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2949 || GET_CODE (XEXP (x, 0)) == PRE_INC
2950 || GET_CODE (XEXP (x, 0)) == POST_DEC
2951 || GET_CODE (XEXP (x, 0)) == POST_INC)
2952 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2953 current_function_sp_is_unchanging = 0;
2954 }
2955
2956 static void
2957 notice_stack_pointer_modification (f)
2958 rtx f;
2959 {
2960 rtx insn;
2961
2962 /* Assume that the stack pointer is unchanging if alloca hasn't
2963 been used. */
2964 current_function_sp_is_unchanging = !current_function_calls_alloca;
2965 if (! current_function_sp_is_unchanging)
2966 return;
2967
2968 for (insn = f; insn; insn = NEXT_INSN (insn))
2969 {
2970 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2971 {
2972 /* Check if insn modifies the stack pointer. */
2973 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2974 NULL);
2975 if (! current_function_sp_is_unchanging)
2976 return;
2977 }
2978 }
2979 }
2980
2981 /* Mark a register in SET. Hard registers in large modes get all
2982 of their component registers set as well. */
2983 static void
2984 mark_reg (reg, xset)
2985 rtx reg;
2986 void *xset;
2987 {
2988 regset set = (regset) xset;
2989 int regno = REGNO (reg);
2990
2991 if (GET_MODE (reg) == BLKmode)
2992 abort ();
2993
2994 SET_REGNO_REG_SET (set, regno);
2995 if (regno < FIRST_PSEUDO_REGISTER)
2996 {
2997 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2998 while (--n > 0)
2999 SET_REGNO_REG_SET (set, regno + n);
3000 }
3001 }
3002
3003 /* Mark those regs which are needed at the end of the function as live
3004 at the end of the last basic block. */
3005 static void
3006 mark_regs_live_at_end (set)
3007 regset set;
3008 {
3009 int i;
3010
3011 /* If exiting needs the right stack value, consider the stack pointer
3012 live at the end of the function. */
3013 if ((HAVE_epilogue && reload_completed)
3014 || ! EXIT_IGNORE_STACK
3015 || (! FRAME_POINTER_REQUIRED
3016 && ! current_function_calls_alloca
3017 && flag_omit_frame_pointer)
3018 || current_function_sp_is_unchanging)
3019 {
3020 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3021 }
3022
3023 /* Mark the frame pointer if needed at the end of the function. If
3024 we end up eliminating it, it will be removed from the live list
3025 of each basic block by reload. */
3026
3027 if (! reload_completed || frame_pointer_needed)
3028 {
3029 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3030 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3031 /* If they are different, also mark the hard frame pointer as live */
3032 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3033 #endif
3034 }
3035
3036 #ifdef PIC_OFFSET_TABLE_REGNUM
3037 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3038 /* Many architectures have a GP register even without flag_pic.
3039 Assume the pic register is not in use, or will be handled by
3040 other means, if it is not fixed. */
3041 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3042 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3043 #endif
3044 #endif
3045
3046 /* Mark all global registers, and all registers used by the epilogue
3047 as being live at the end of the function since they may be
3048 referenced by our caller. */
3049 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3050 if (global_regs[i]
3051 #ifdef EPILOGUE_USES
3052 || EPILOGUE_USES (i)
3053 #endif
3054 )
3055 SET_REGNO_REG_SET (set, i);
3056
3057 /* Mark all call-saved registers that we actaully used. */
3058 if (HAVE_epilogue && reload_completed)
3059 {
3060 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3061 if (! call_used_regs[i] && regs_ever_live[i])
3062 SET_REGNO_REG_SET (set, i);
3063 }
3064
3065 /* Mark function return value. */
3066 diddle_return_value (mark_reg, set);
3067 }
3068
3069 /* Callback function for for_each_successor_phi. DATA is a regset.
3070 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3071 INSN, in the regset. */
3072
3073 static int
3074 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3075 rtx insn ATTRIBUTE_UNUSED;
3076 int dest_regno ATTRIBUTE_UNUSED;
3077 int src_regno;
3078 void *data;
3079 {
3080 regset live = (regset) data;
3081 SET_REGNO_REG_SET (live, src_regno);
3082 return 0;
3083 }
3084
3085 /* Propagate global life info around the graph of basic blocks. Begin
3086 considering blocks with their corresponding bit set in BLOCKS_IN.
3087 If BLOCKS_IN is null, consider it the universal set.
3088
3089 BLOCKS_OUT is set for every block that was changed. */
3090
3091 static void
3092 calculate_global_regs_live (blocks_in, blocks_out, flags)
3093 sbitmap blocks_in, blocks_out;
3094 int flags;
3095 {
3096 basic_block *queue, *qhead, *qtail, *qend;
3097 regset tmp, new_live_at_end;
3098 regset_head tmp_head;
3099 regset_head new_live_at_end_head;
3100 int i;
3101
3102 tmp = INITIALIZE_REG_SET (tmp_head);
3103 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3104
3105 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3106 because the `head == tail' style test for an empty queue doesn't
3107 work with a full queue. */
3108 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3109 qtail = queue;
3110 qhead = qend = queue + n_basic_blocks + 2;
3111
3112 /* Clear out the garbage that might be hanging out in bb->aux. */
3113 for (i = n_basic_blocks - 1; i >= 0; --i)
3114 BASIC_BLOCK (i)->aux = NULL;
3115
3116 /* Queue the blocks set in the initial mask. Do this in reverse block
3117 number order so that we are more likely for the first round to do
3118 useful work. We use AUX non-null to flag that the block is queued. */
3119 if (blocks_in)
3120 {
3121 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3122 {
3123 basic_block bb = BASIC_BLOCK (i);
3124 *--qhead = bb;
3125 bb->aux = bb;
3126 });
3127 }
3128 else
3129 {
3130 for (i = 0; i < n_basic_blocks; ++i)
3131 {
3132 basic_block bb = BASIC_BLOCK (i);
3133 *--qhead = bb;
3134 bb->aux = bb;
3135 }
3136 }
3137
3138 if (blocks_out)
3139 sbitmap_zero (blocks_out);
3140
3141 while (qhead != qtail)
3142 {
3143 int rescan, changed;
3144 basic_block bb;
3145 edge e;
3146
3147 bb = *qhead++;
3148 if (qhead == qend)
3149 qhead = queue;
3150 bb->aux = NULL;
3151
3152 /* Begin by propogating live_at_start from the successor blocks. */
3153 CLEAR_REG_SET (new_live_at_end);
3154 for (e = bb->succ; e ; e = e->succ_next)
3155 {
3156 basic_block sb = e->dest;
3157 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3158 }
3159
3160 /* Force the stack pointer to be live -- which might not already be
3161 the case for blocks within infinite loops. */
3162 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3163
3164 /* Regs used in phi nodes are not included in
3165 global_live_at_start, since they are live only along a
3166 particular edge. Set those regs that are live because of a
3167 phi node alternative corresponding to this particular block. */
3168 if (in_ssa_form)
3169 for_each_successor_phi (bb, &set_phi_alternative_reg,
3170 new_live_at_end);
3171
3172 if (bb == ENTRY_BLOCK_PTR)
3173 {
3174 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3175 continue;
3176 }
3177
3178 /* On our first pass through this block, we'll go ahead and continue.
3179 Recognize first pass by local_set NULL. On subsequent passes, we
3180 get to skip out early if live_at_end wouldn't have changed. */
3181
3182 if (bb->local_set == NULL)
3183 {
3184 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3185 rescan = 1;
3186 }
3187 else
3188 {
3189 /* If any bits were removed from live_at_end, we'll have to
3190 rescan the block. This wouldn't be necessary if we had
3191 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3192 local_live is really dependent on live_at_end. */
3193 CLEAR_REG_SET (tmp);
3194 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3195 new_live_at_end, BITMAP_AND_COMPL);
3196
3197 if (! rescan)
3198 {
3199 /* Find the set of changed bits. Take this opportunity
3200 to notice that this set is empty and early out. */
3201 CLEAR_REG_SET (tmp);
3202 changed = bitmap_operation (tmp, bb->global_live_at_end,
3203 new_live_at_end, BITMAP_XOR);
3204 if (! changed)
3205 continue;
3206
3207 /* If any of the changed bits overlap with local_set,
3208 we'll have to rescan the block. Detect overlap by
3209 the AND with ~local_set turning off bits. */
3210 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3211 BITMAP_AND_COMPL);
3212 }
3213 }
3214
3215 /* Let our caller know that BB changed enough to require its
3216 death notes updated. */
3217 if (blocks_out)
3218 SET_BIT (blocks_out, bb->index);
3219
3220 if (! rescan)
3221 {
3222 /* Add to live_at_start the set of all registers in
3223 new_live_at_end that aren't in the old live_at_end. */
3224
3225 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3226 BITMAP_AND_COMPL);
3227 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3228
3229 changed = bitmap_operation (bb->global_live_at_start,
3230 bb->global_live_at_start,
3231 tmp, BITMAP_IOR);
3232 if (! changed)
3233 continue;
3234 }
3235 else
3236 {
3237 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3238
3239 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3240 into live_at_start. */
3241 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3242
3243 /* If live_at start didn't change, no need to go farther. */
3244 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3245 continue;
3246
3247 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3248 }
3249
3250 /* Queue all predecessors of BB so that we may re-examine
3251 their live_at_end. */
3252 for (e = bb->pred; e ; e = e->pred_next)
3253 {
3254 basic_block pb = e->src;
3255 if (pb->aux == NULL)
3256 {
3257 *qtail++ = pb;
3258 if (qtail == qend)
3259 qtail = queue;
3260 pb->aux = pb;
3261 }
3262 }
3263 }
3264
3265 FREE_REG_SET (tmp);
3266 FREE_REG_SET (new_live_at_end);
3267
3268 if (blocks_out)
3269 {
3270 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3271 {
3272 basic_block bb = BASIC_BLOCK (i);
3273 FREE_REG_SET (bb->local_set);
3274 });
3275 }
3276 else
3277 {
3278 for (i = n_basic_blocks - 1; i >= 0; --i)
3279 {
3280 basic_block bb = BASIC_BLOCK (i);
3281 FREE_REG_SET (bb->local_set);
3282 }
3283 }
3284
3285 free (queue);
3286 }
3287 \f
3288 /* Subroutines of life analysis. */
3289
3290 /* Allocate the permanent data structures that represent the results
3291 of life analysis. Not static since used also for stupid life analysis. */
3292
3293 void
3294 allocate_bb_life_data ()
3295 {
3296 register int i;
3297
3298 for (i = 0; i < n_basic_blocks; i++)
3299 {
3300 basic_block bb = BASIC_BLOCK (i);
3301
3302 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3303 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3304 }
3305
3306 ENTRY_BLOCK_PTR->global_live_at_end
3307 = OBSTACK_ALLOC_REG_SET (function_obstack);
3308 EXIT_BLOCK_PTR->global_live_at_start
3309 = OBSTACK_ALLOC_REG_SET (function_obstack);
3310
3311 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3312 }
3313
3314 void
3315 allocate_reg_life_data ()
3316 {
3317 int i;
3318
3319 max_regno = max_reg_num ();
3320
3321 /* Recalculate the register space, in case it has grown. Old style
3322 vector oriented regsets would set regset_{size,bytes} here also. */
3323 allocate_reg_info (max_regno, FALSE, FALSE);
3324
3325 /* Reset all the data we'll collect in propagate_block and its
3326 subroutines. */
3327 for (i = 0; i < max_regno; i++)
3328 {
3329 REG_N_SETS (i) = 0;
3330 REG_N_REFS (i) = 0;
3331 REG_N_DEATHS (i) = 0;
3332 REG_N_CALLS_CROSSED (i) = 0;
3333 REG_LIVE_LENGTH (i) = 0;
3334 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3335 }
3336 }
3337
3338 /* Delete dead instructions for propagate_block. */
3339
3340 static void
3341 propagate_block_delete_insn (bb, insn)
3342 basic_block bb;
3343 rtx insn;
3344 {
3345 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3346
3347 /* If the insn referred to a label, and that label was attached to
3348 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3349 pretty much mandatory to delete it, because the ADDR_VEC may be
3350 referencing labels that no longer exist. */
3351
3352 if (inote)
3353 {
3354 rtx label = XEXP (inote, 0);
3355 rtx next;
3356
3357 if (LABEL_NUSES (label) == 1
3358 && (next = next_nonnote_insn (label)) != NULL
3359 && GET_CODE (next) == JUMP_INSN
3360 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3361 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3362 {
3363 rtx pat = PATTERN (next);
3364 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3365 int len = XVECLEN (pat, diff_vec_p);
3366 int i;
3367
3368 for (i = 0; i < len; i++)
3369 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3370
3371 flow_delete_insn (next);
3372 }
3373 }
3374
3375 if (bb->end == insn)
3376 bb->end = PREV_INSN (insn);
3377 flow_delete_insn (insn);
3378 }
3379
3380 /* Delete dead libcalls for propagate_block. Return the insn
3381 before the libcall. */
3382
3383 static rtx
3384 propagate_block_delete_libcall (bb, insn, note)
3385 basic_block bb;
3386 rtx insn, note;
3387 {
3388 rtx first = XEXP (note, 0);
3389 rtx before = PREV_INSN (first);
3390
3391 if (insn == bb->end)
3392 bb->end = before;
3393
3394 flow_delete_insn_chain (first, insn);
3395 return before;
3396 }
3397
3398 /* Update the life-status of regs for one insn. Return the previous insn. */
3399
3400 rtx
3401 propagate_one_insn (pbi, insn)
3402 struct propagate_block_info *pbi;
3403 rtx insn;
3404 {
3405 rtx prev = PREV_INSN (insn);
3406 int flags = pbi->flags;
3407 int insn_is_dead = 0;
3408 int libcall_is_dead = 0;
3409 rtx note;
3410 int i;
3411
3412 if (! INSN_P (insn))
3413 return prev;
3414
3415 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3416 if (flags & PROP_SCAN_DEAD_CODE)
3417 {
3418 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3419 REG_NOTES (insn));
3420 libcall_is_dead = (insn_is_dead && note != 0
3421 && libcall_dead_p (pbi, note, insn));
3422 }
3423
3424 /* We almost certainly don't want to delete prologue or epilogue
3425 instructions. Warn about probable compiler losage. */
3426 if (insn_is_dead
3427 && reload_completed
3428 && (((HAVE_epilogue || HAVE_prologue)
3429 && prologue_epilogue_contains (insn))
3430 || (HAVE_sibcall_epilogue
3431 && sibcall_epilogue_contains (insn))))
3432 {
3433 if (flags & PROP_KILL_DEAD_CODE)
3434 {
3435 warning ("ICE: would have deleted prologue/epilogue insn");
3436 if (!inhibit_warnings)
3437 debug_rtx (insn);
3438 }
3439 libcall_is_dead = insn_is_dead = 0;
3440 }
3441
3442 /* If an instruction consists of just dead store(s) on final pass,
3443 delete it. */
3444 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3445 {
3446 /* Record sets. Do this even for dead instructions, since they
3447 would have killed the values if they hadn't been deleted. */
3448 mark_set_regs (pbi, PATTERN (insn), insn);
3449
3450 /* CC0 is now known to be dead. Either this insn used it,
3451 in which case it doesn't anymore, or clobbered it,
3452 so the next insn can't use it. */
3453 pbi->cc0_live = 0;
3454
3455 if (libcall_is_dead)
3456 {
3457 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3458 insn = NEXT_INSN (prev);
3459 }
3460 else
3461 propagate_block_delete_insn (pbi->bb, insn);
3462
3463 return prev;
3464 }
3465
3466 /* See if this is an increment or decrement that can be merged into
3467 a following memory address. */
3468 #ifdef AUTO_INC_DEC
3469 {
3470 register rtx x = single_set (insn);
3471
3472 /* Does this instruction increment or decrement a register? */
3473 if ((flags & PROP_AUTOINC)
3474 && x != 0
3475 && GET_CODE (SET_DEST (x)) == REG
3476 && (GET_CODE (SET_SRC (x)) == PLUS
3477 || GET_CODE (SET_SRC (x)) == MINUS)
3478 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3479 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3480 /* Ok, look for a following memory ref we can combine with.
3481 If one is found, change the memory ref to a PRE_INC
3482 or PRE_DEC, cancel this insn, and return 1.
3483 Return 0 if nothing has been done. */
3484 && try_pre_increment_1 (pbi, insn))
3485 return prev;
3486 }
3487 #endif /* AUTO_INC_DEC */
3488
3489 CLEAR_REG_SET (pbi->new_set);
3490
3491 /* If this is not the final pass, and this insn is copying the value of
3492 a library call and it's dead, don't scan the insns that perform the
3493 library call, so that the call's arguments are not marked live. */
3494 if (libcall_is_dead)
3495 {
3496 /* Record the death of the dest reg. */
3497 mark_set_regs (pbi, PATTERN (insn), insn);
3498
3499 insn = XEXP (note, 0);
3500 return PREV_INSN (insn);
3501 }
3502 else if (GET_CODE (PATTERN (insn)) == SET
3503 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3504 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3505 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3506 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3507 /* We have an insn to pop a constant amount off the stack.
3508 (Such insns use PLUS regardless of the direction of the stack,
3509 and any insn to adjust the stack by a constant is always a pop.)
3510 These insns, if not dead stores, have no effect on life. */
3511 ;
3512 else
3513 {
3514 /* Any regs live at the time of a call instruction must not go
3515 in a register clobbered by calls. Find all regs now live and
3516 record this for them. */
3517
3518 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3519 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3520 { REG_N_CALLS_CROSSED (i)++; });
3521
3522 /* Record sets. Do this even for dead instructions, since they
3523 would have killed the values if they hadn't been deleted. */
3524 mark_set_regs (pbi, PATTERN (insn), insn);
3525
3526 if (GET_CODE (insn) == CALL_INSN)
3527 {
3528 register int i;
3529 rtx note, cond;
3530
3531 cond = NULL_RTX;
3532 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3533 cond = COND_EXEC_TEST (PATTERN (insn));
3534
3535 /* Non-constant calls clobber memory. */
3536 if (! CONST_CALL_P (insn))
3537 free_EXPR_LIST_list (&pbi->mem_set_list);
3538
3539 /* There may be extra registers to be clobbered. */
3540 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3541 note;
3542 note = XEXP (note, 1))
3543 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3544 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3545 cond, insn, pbi->flags);
3546
3547 /* Calls change all call-used and global registers. */
3548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3549 if (call_used_regs[i] && ! global_regs[i]
3550 && ! fixed_regs[i])
3551 {
3552 /* We do not want REG_UNUSED notes for these registers. */
3553 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3554 cond, insn,
3555 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3556 }
3557 }
3558
3559 /* If an insn doesn't use CC0, it becomes dead since we assume
3560 that every insn clobbers it. So show it dead here;
3561 mark_used_regs will set it live if it is referenced. */
3562 pbi->cc0_live = 0;
3563
3564 /* Record uses. */
3565 if (! insn_is_dead)
3566 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3567
3568 /* Sometimes we may have inserted something before INSN (such as a move)
3569 when we make an auto-inc. So ensure we will scan those insns. */
3570 #ifdef AUTO_INC_DEC
3571 prev = PREV_INSN (insn);
3572 #endif
3573
3574 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3575 {
3576 register int i;
3577 rtx note, cond;
3578
3579 cond = NULL_RTX;
3580 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3581 cond = COND_EXEC_TEST (PATTERN (insn));
3582
3583 /* Calls use their arguments. */
3584 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3585 note;
3586 note = XEXP (note, 1))
3587 if (GET_CODE (XEXP (note, 0)) == USE)
3588 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3589 cond, insn);
3590
3591 /* The stack ptr is used (honorarily) by a CALL insn. */
3592 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3593
3594 /* Calls may also reference any of the global registers,
3595 so they are made live. */
3596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3597 if (global_regs[i])
3598 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3599 cond, insn);
3600 }
3601 }
3602
3603 /* On final pass, update counts of how many insns in which each reg
3604 is live. */
3605 if (flags & PROP_REG_INFO)
3606 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3607 { REG_LIVE_LENGTH (i)++; });
3608
3609 return prev;
3610 }
3611
3612 /* Initialize a propagate_block_info struct for public consumption.
3613 Note that the structure itself is opaque to this file, but that
3614 the user can use the regsets provided here. */
3615
3616 struct propagate_block_info *
3617 init_propagate_block_info (bb, live, local_set, flags)
3618 basic_block bb;
3619 regset live;
3620 regset local_set;
3621 int flags;
3622 {
3623 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3624
3625 pbi->bb = bb;
3626 pbi->reg_live = live;
3627 pbi->mem_set_list = NULL_RTX;
3628 pbi->local_set = local_set;
3629 pbi->cc0_live = 0;
3630 pbi->flags = flags;
3631
3632 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3633 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3634 else
3635 pbi->reg_next_use = NULL;
3636
3637 pbi->new_set = BITMAP_XMALLOC ();
3638
3639 #ifdef HAVE_conditional_execution
3640 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3641 free_reg_cond_life_info);
3642 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3643
3644 /* If this block ends in a conditional branch, for each register live
3645 from one side of the branch and not the other, record the register
3646 as conditionally dead. */
3647 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3648 && GET_CODE (bb->end) == JUMP_INSN
3649 && any_condjump_p (bb->end))
3650 {
3651 regset_head diff_head;
3652 regset diff = INITIALIZE_REG_SET (diff_head);
3653 basic_block bb_true, bb_false;
3654 rtx cond_true, cond_false;
3655 int i;
3656
3657 /* Identify the successor blocks. */
3658 bb_true = bb->succ->dest;
3659 if (bb->succ->succ_next != NULL)
3660 {
3661 bb_false = bb->succ->succ_next->dest;
3662
3663 if (bb->succ->flags & EDGE_FALLTHRU)
3664 {
3665 basic_block t = bb_false;
3666 bb_false = bb_true;
3667 bb_true = t;
3668 }
3669 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3670 abort ();
3671 }
3672 else
3673 {
3674 /* This can happen with a conditional jump to the next insn. */
3675 if (JUMP_LABEL (bb->end) != bb_true->head)
3676 abort ();
3677
3678 /* Simplest way to do nothing. */
3679 bb_false = bb_true;
3680 }
3681
3682 /* Extract the condition from the branch. */
3683 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3684 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3685 GET_MODE (cond_true), XEXP (cond_true, 0),
3686 XEXP (cond_true, 1));
3687 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3688 {
3689 rtx t = cond_false;
3690 cond_false = cond_true;
3691 cond_true = t;
3692 }
3693
3694 /* Compute which register lead different lives in the successors. */
3695 if (bitmap_operation (diff, bb_true->global_live_at_start,
3696 bb_false->global_live_at_start, BITMAP_XOR))
3697 {
3698 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3699 abort ();
3700 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3701
3702 /* For each such register, mark it conditionally dead. */
3703 EXECUTE_IF_SET_IN_REG_SET
3704 (diff, 0, i,
3705 {
3706 struct reg_cond_life_info *rcli;
3707 rtx cond;
3708
3709 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3710
3711 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3712 cond = cond_false;
3713 else
3714 cond = cond_true;
3715 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3716
3717 splay_tree_insert (pbi->reg_cond_dead, i,
3718 (splay_tree_value) rcli);
3719 });
3720 }
3721
3722 FREE_REG_SET (diff);
3723 }
3724 #endif
3725
3726 /* If this block has no successors, any stores to the frame that aren't
3727 used later in the block are dead. So make a pass over the block
3728 recording any such that are made and show them dead at the end. We do
3729 a very conservative and simple job here. */
3730 if ((flags & PROP_SCAN_DEAD_CODE)
3731 && (bb->succ == NULL
3732 || (bb->succ->succ_next == NULL
3733 && bb->succ->dest == EXIT_BLOCK_PTR)))
3734 {
3735 rtx insn;
3736 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3737 if (GET_CODE (insn) == INSN
3738 && GET_CODE (PATTERN (insn)) == SET
3739 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3740 {
3741 rtx mem = SET_DEST (PATTERN (insn));
3742
3743 if (XEXP (mem, 0) == frame_pointer_rtx
3744 || (GET_CODE (XEXP (mem, 0)) == PLUS
3745 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3746 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3747 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3748 }
3749 }
3750
3751 return pbi;
3752 }
3753
3754 /* Release a propagate_block_info struct. */
3755
3756 void
3757 free_propagate_block_info (pbi)
3758 struct propagate_block_info *pbi;
3759 {
3760 free_EXPR_LIST_list (&pbi->mem_set_list);
3761
3762 BITMAP_XFREE (pbi->new_set);
3763
3764 #ifdef HAVE_conditional_execution
3765 splay_tree_delete (pbi->reg_cond_dead);
3766 BITMAP_XFREE (pbi->reg_cond_reg);
3767 #endif
3768
3769 if (pbi->reg_next_use)
3770 free (pbi->reg_next_use);
3771
3772 free (pbi);
3773 }
3774
3775 /* Compute the registers live at the beginning of a basic block BB from
3776 those live at the end.
3777
3778 When called, REG_LIVE contains those live at the end. On return, it
3779 contains those live at the beginning.
3780
3781 LOCAL_SET, if non-null, will be set with all registers killed by
3782 this basic block. */
3783
3784 void
3785 propagate_block (bb, live, local_set, flags)
3786 basic_block bb;
3787 regset live;
3788 regset local_set;
3789 int flags;
3790 {
3791 struct propagate_block_info *pbi;
3792 rtx insn, prev;
3793
3794 pbi = init_propagate_block_info (bb, live, local_set, flags);
3795
3796 if (flags & PROP_REG_INFO)
3797 {
3798 register int i;
3799
3800 /* Process the regs live at the end of the block.
3801 Mark them as not local to any one basic block. */
3802 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3803 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3804 }
3805
3806 /* Scan the block an insn at a time from end to beginning. */
3807
3808 for (insn = bb->end; ; insn = prev)
3809 {
3810 /* If this is a call to `setjmp' et al, warn if any
3811 non-volatile datum is live. */
3812 if ((flags & PROP_REG_INFO)
3813 && GET_CODE (insn) == NOTE
3814 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3815 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3816
3817 prev = propagate_one_insn (pbi, insn);
3818
3819 if (insn == bb->head)
3820 break;
3821 }
3822
3823 free_propagate_block_info (pbi);
3824 }
3825 \f
3826 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3827 (SET expressions whose destinations are registers dead after the insn).
3828 NEEDED is the regset that says which regs are alive after the insn.
3829
3830 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3831
3832 If X is the entire body of an insn, NOTES contains the reg notes
3833 pertaining to the insn. */
3834
3835 static int
3836 insn_dead_p (pbi, x, call_ok, notes)
3837 struct propagate_block_info *pbi;
3838 rtx x;
3839 int call_ok;
3840 rtx notes ATTRIBUTE_UNUSED;
3841 {
3842 enum rtx_code code = GET_CODE (x);
3843
3844 #ifdef AUTO_INC_DEC
3845 /* If flow is invoked after reload, we must take existing AUTO_INC
3846 expresions into account. */
3847 if (reload_completed)
3848 {
3849 for ( ; notes; notes = XEXP (notes, 1))
3850 {
3851 if (REG_NOTE_KIND (notes) == REG_INC)
3852 {
3853 int regno = REGNO (XEXP (notes, 0));
3854
3855 /* Don't delete insns to set global regs. */
3856 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3857 || REGNO_REG_SET_P (pbi->reg_live, regno))
3858 return 0;
3859 }
3860 }
3861 }
3862 #endif
3863
3864 /* If setting something that's a reg or part of one,
3865 see if that register's altered value will be live. */
3866
3867 if (code == SET)
3868 {
3869 rtx r = SET_DEST (x);
3870
3871 #ifdef HAVE_cc0
3872 if (GET_CODE (r) == CC0)
3873 return ! pbi->cc0_live;
3874 #endif
3875
3876 /* A SET that is a subroutine call cannot be dead. */
3877 if (GET_CODE (SET_SRC (x)) == CALL)
3878 {
3879 if (! call_ok)
3880 return 0;
3881 }
3882
3883 /* Don't eliminate loads from volatile memory or volatile asms. */
3884 else if (volatile_refs_p (SET_SRC (x)))
3885 return 0;
3886
3887 if (GET_CODE (r) == MEM)
3888 {
3889 rtx temp;
3890
3891 if (MEM_VOLATILE_P (r))
3892 return 0;
3893
3894 /* Walk the set of memory locations we are currently tracking
3895 and see if one is an identical match to this memory location.
3896 If so, this memory write is dead (remember, we're walking
3897 backwards from the end of the block to the start). */
3898 temp = pbi->mem_set_list;
3899 while (temp)
3900 {
3901 if (rtx_equal_p (XEXP (temp, 0), r))
3902 return 1;
3903 temp = XEXP (temp, 1);
3904 }
3905 }
3906 else
3907 {
3908 while (GET_CODE (r) == SUBREG
3909 || GET_CODE (r) == STRICT_LOW_PART
3910 || GET_CODE (r) == ZERO_EXTRACT)
3911 r = XEXP (r, 0);
3912
3913 if (GET_CODE (r) == REG)
3914 {
3915 int regno = REGNO (r);
3916
3917 /* Obvious. */
3918 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3919 return 0;
3920
3921 /* If this is a hard register, verify that subsequent
3922 words are not needed. */
3923 if (regno < FIRST_PSEUDO_REGISTER)
3924 {
3925 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3926
3927 while (--n > 0)
3928 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3929 return 0;
3930 }
3931
3932 /* Don't delete insns to set global regs. */
3933 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3934 return 0;
3935
3936 /* Make sure insns to set the stack pointer aren't deleted. */
3937 if (regno == STACK_POINTER_REGNUM)
3938 return 0;
3939
3940 /* Make sure insns to set the frame pointer aren't deleted. */
3941 if (regno == FRAME_POINTER_REGNUM
3942 && (! reload_completed || frame_pointer_needed))
3943 return 0;
3944 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3945 if (regno == HARD_FRAME_POINTER_REGNUM
3946 && (! reload_completed || frame_pointer_needed))
3947 return 0;
3948 #endif
3949
3950 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3951 /* Make sure insns to set arg pointer are never deleted
3952 (if the arg pointer isn't fixed, there will be a USE
3953 for it, so we can treat it normally). */
3954 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3955 return 0;
3956 #endif
3957
3958 #ifdef PIC_OFFSET_TABLE_REGNUM
3959 /* Before reload, do not allow sets of the pic register
3960 to be deleted. Reload can insert references to
3961 constant pool memory anywhere in the function, making
3962 the PIC register live where it wasn't before. */
3963 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3964 && ! reload_completed)
3965 return 0;
3966 #endif
3967
3968 /* Otherwise, the set is dead. */
3969 return 1;
3970 }
3971 }
3972 }
3973
3974 /* If performing several activities, insn is dead if each activity
3975 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3976 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3977 worth keeping. */
3978 else if (code == PARALLEL)
3979 {
3980 int i = XVECLEN (x, 0);
3981
3982 for (i--; i >= 0; i--)
3983 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3984 && GET_CODE (XVECEXP (x, 0, i)) != USE
3985 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3986 return 0;
3987
3988 return 1;
3989 }
3990
3991 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3992 is not necessarily true for hard registers. */
3993 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3994 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3995 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3996 return 1;
3997
3998 /* We do not check other CLOBBER or USE here. An insn consisting of just
3999 a CLOBBER or just a USE should not be deleted. */
4000 return 0;
4001 }
4002
4003 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4004 return 1 if the entire library call is dead.
4005 This is true if INSN copies a register (hard or pseudo)
4006 and if the hard return reg of the call insn is dead.
4007 (The caller should have tested the destination of the SET inside
4008 INSN already for death.)
4009
4010 If this insn doesn't just copy a register, then we don't
4011 have an ordinary libcall. In that case, cse could not have
4012 managed to substitute the source for the dest later on,
4013 so we can assume the libcall is dead.
4014
4015 PBI is the block info giving pseudoregs live before this insn.
4016 NOTE is the REG_RETVAL note of the insn. */
4017
4018 static int
4019 libcall_dead_p (pbi, note, insn)
4020 struct propagate_block_info *pbi;
4021 rtx note;
4022 rtx insn;
4023 {
4024 rtx x = single_set (insn);
4025
4026 if (x)
4027 {
4028 register rtx r = SET_SRC (x);
4029 if (GET_CODE (r) == REG)
4030 {
4031 rtx call = XEXP (note, 0);
4032 rtx call_pat;
4033 register int i;
4034
4035 /* Find the call insn. */
4036 while (call != insn && GET_CODE (call) != CALL_INSN)
4037 call = NEXT_INSN (call);
4038
4039 /* If there is none, do nothing special,
4040 since ordinary death handling can understand these insns. */
4041 if (call == insn)
4042 return 0;
4043
4044 /* See if the hard reg holding the value is dead.
4045 If this is a PARALLEL, find the call within it. */
4046 call_pat = PATTERN (call);
4047 if (GET_CODE (call_pat) == PARALLEL)
4048 {
4049 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4050 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4051 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4052 break;
4053
4054 /* This may be a library call that is returning a value
4055 via invisible pointer. Do nothing special, since
4056 ordinary death handling can understand these insns. */
4057 if (i < 0)
4058 return 0;
4059
4060 call_pat = XVECEXP (call_pat, 0, i);
4061 }
4062
4063 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4064 }
4065 }
4066 return 1;
4067 }
4068
4069 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4070 live at function entry. Don't count global register variables, variables
4071 in registers that can be used for function arg passing, or variables in
4072 fixed hard registers. */
4073
4074 int
4075 regno_uninitialized (regno)
4076 int regno;
4077 {
4078 if (n_basic_blocks == 0
4079 || (regno < FIRST_PSEUDO_REGISTER
4080 && (global_regs[regno]
4081 || fixed_regs[regno]
4082 || FUNCTION_ARG_REGNO_P (regno))))
4083 return 0;
4084
4085 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4086 }
4087
4088 /* 1 if register REGNO was alive at a place where `setjmp' was called
4089 and was set more than once or is an argument.
4090 Such regs may be clobbered by `longjmp'. */
4091
4092 int
4093 regno_clobbered_at_setjmp (regno)
4094 int regno;
4095 {
4096 if (n_basic_blocks == 0)
4097 return 0;
4098
4099 return ((REG_N_SETS (regno) > 1
4100 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4101 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4102 }
4103 \f
4104 /* INSN references memory, possibly using autoincrement addressing modes.
4105 Find any entries on the mem_set_list that need to be invalidated due
4106 to an address change. */
4107
4108 static void
4109 invalidate_mems_from_autoinc (pbi, insn)
4110 struct propagate_block_info *pbi;
4111 rtx insn;
4112 {
4113 rtx note = REG_NOTES (insn);
4114 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4115 {
4116 if (REG_NOTE_KIND (note) == REG_INC)
4117 {
4118 rtx temp = pbi->mem_set_list;
4119 rtx prev = NULL_RTX;
4120 rtx next;
4121
4122 while (temp)
4123 {
4124 next = XEXP (temp, 1);
4125 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4126 {
4127 /* Splice temp out of list. */
4128 if (prev)
4129 XEXP (prev, 1) = next;
4130 else
4131 pbi->mem_set_list = next;
4132 free_EXPR_LIST_node (temp);
4133 }
4134 else
4135 prev = temp;
4136 temp = next;
4137 }
4138 }
4139 }
4140 }
4141
4142 /* Process the registers that are set within X. Their bits are set to
4143 1 in the regset DEAD, because they are dead prior to this insn.
4144
4145 If INSN is nonzero, it is the insn being processed.
4146
4147 FLAGS is the set of operations to perform. */
4148
4149 static void
4150 mark_set_regs (pbi, x, insn)
4151 struct propagate_block_info *pbi;
4152 rtx x, insn;
4153 {
4154 rtx cond = NULL_RTX;
4155 rtx link;
4156 enum rtx_code code;
4157
4158 if (insn)
4159 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4160 {
4161 if (REG_NOTE_KIND (link) == REG_INC)
4162 mark_set_1 (pbi, SET, XEXP (link, 0),
4163 (GET_CODE (x) == COND_EXEC
4164 ? COND_EXEC_TEST (x) : NULL_RTX),
4165 insn, pbi->flags);
4166 }
4167 retry:
4168 switch (code = GET_CODE (x))
4169 {
4170 case SET:
4171 case CLOBBER:
4172 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4173 return;
4174
4175 case COND_EXEC:
4176 cond = COND_EXEC_TEST (x);
4177 x = COND_EXEC_CODE (x);
4178 goto retry;
4179
4180 case PARALLEL:
4181 {
4182 register int i;
4183 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4184 {
4185 rtx sub = XVECEXP (x, 0, i);
4186 switch (code = GET_CODE (sub))
4187 {
4188 case COND_EXEC:
4189 if (cond != NULL_RTX)
4190 abort ();
4191
4192 cond = COND_EXEC_TEST (sub);
4193 sub = COND_EXEC_CODE (sub);
4194 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4195 break;
4196 /* FALLTHRU */
4197
4198 case SET:
4199 case CLOBBER:
4200 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4201 break;
4202
4203 default:
4204 break;
4205 }
4206 }
4207 break;
4208 }
4209
4210 default:
4211 break;
4212 }
4213 }
4214
4215 /* Process a single SET rtx, X. */
4216
4217 static void
4218 mark_set_1 (pbi, code, reg, cond, insn, flags)
4219 struct propagate_block_info *pbi;
4220 enum rtx_code code;
4221 rtx reg, cond, insn;
4222 int flags;
4223 {
4224 int regno_first = -1, regno_last = -1;
4225 int not_dead = 0;
4226 int i;
4227
4228 /* Some targets place small structures in registers for
4229 return values of functions. We have to detect this
4230 case specially here to get correct flow information. */
4231 if (GET_CODE (reg) == PARALLEL
4232 && GET_MODE (reg) == BLKmode)
4233 {
4234 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4235 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4236 return;
4237 }
4238
4239 /* Modifying just one hardware register of a multi-reg value or just a
4240 byte field of a register does not mean the value from before this insn
4241 is now dead. Of course, if it was dead after it's unused now. */
4242
4243 switch (GET_CODE (reg))
4244 {
4245 case ZERO_EXTRACT:
4246 case SIGN_EXTRACT:
4247 case STRICT_LOW_PART:
4248 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4249 do
4250 reg = XEXP (reg, 0);
4251 while (GET_CODE (reg) == SUBREG
4252 || GET_CODE (reg) == ZERO_EXTRACT
4253 || GET_CODE (reg) == SIGN_EXTRACT
4254 || GET_CODE (reg) == STRICT_LOW_PART);
4255 if (GET_CODE (reg) == MEM)
4256 break;
4257 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4258 /* FALLTHRU */
4259
4260 case REG:
4261 regno_last = regno_first = REGNO (reg);
4262 if (regno_first < FIRST_PSEUDO_REGISTER)
4263 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4264 break;
4265
4266 case SUBREG:
4267 if (GET_CODE (SUBREG_REG (reg)) == REG)
4268 {
4269 enum machine_mode outer_mode = GET_MODE (reg);
4270 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4271
4272 /* Identify the range of registers affected. This is moderately
4273 tricky for hard registers. See alter_subreg. */
4274
4275 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4276 if (regno_first < FIRST_PSEUDO_REGISTER)
4277 {
4278 #ifdef ALTER_HARD_SUBREG
4279 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4280 inner_mode, regno_first);
4281 #else
4282 regno_first += SUBREG_WORD (reg);
4283 #endif
4284 regno_last = (regno_first
4285 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4286
4287 /* Since we've just adjusted the register number ranges, make
4288 sure REG matches. Otherwise some_was_live will be clear
4289 when it shouldn't have been, and we'll create incorrect
4290 REG_UNUSED notes. */
4291 reg = gen_rtx_REG (outer_mode, regno_first);
4292 }
4293 else
4294 {
4295 /* If the number of words in the subreg is less than the number
4296 of words in the full register, we have a well-defined partial
4297 set. Otherwise the high bits are undefined.
4298
4299 This is only really applicable to pseudos, since we just took
4300 care of multi-word hard registers. */
4301 if (((GET_MODE_SIZE (outer_mode)
4302 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4303 < ((GET_MODE_SIZE (inner_mode)
4304 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4305 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4306
4307 reg = SUBREG_REG (reg);
4308 }
4309 }
4310 else
4311 reg = SUBREG_REG (reg);
4312 break;
4313
4314 default:
4315 break;
4316 }
4317
4318 /* If this set is a MEM, then it kills any aliased writes.
4319 If this set is a REG, then it kills any MEMs which use the reg. */
4320 if (flags & PROP_SCAN_DEAD_CODE)
4321 {
4322 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4323 {
4324 rtx temp = pbi->mem_set_list;
4325 rtx prev = NULL_RTX;
4326 rtx next;
4327
4328 while (temp)
4329 {
4330 next = XEXP (temp, 1);
4331 if ((GET_CODE (reg) == MEM
4332 && output_dependence (XEXP (temp, 0), reg))
4333 || (GET_CODE (reg) == REG
4334 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4335 {
4336 /* Splice this entry out of the list. */
4337 if (prev)
4338 XEXP (prev, 1) = next;
4339 else
4340 pbi->mem_set_list = next;
4341 free_EXPR_LIST_node (temp);
4342 }
4343 else
4344 prev = temp;
4345 temp = next;
4346 }
4347 }
4348
4349 /* If the memory reference had embedded side effects (autoincrement
4350 address modes. Then we may need to kill some entries on the
4351 memory set list. */
4352 if (insn && GET_CODE (reg) == MEM)
4353 invalidate_mems_from_autoinc (pbi, insn);
4354
4355 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4356 /* ??? With more effort we could track conditional memory life. */
4357 && ! cond
4358 /* We do not know the size of a BLKmode store, so we do not track
4359 them for redundant store elimination. */
4360 && GET_MODE (reg) != BLKmode
4361 /* There are no REG_INC notes for SP, so we can't assume we'll see
4362 everything that invalidates it. To be safe, don't eliminate any
4363 stores though SP; none of them should be redundant anyway. */
4364 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4365 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4366 }
4367
4368 if (GET_CODE (reg) == REG
4369 && ! (regno_first == FRAME_POINTER_REGNUM
4370 && (! reload_completed || frame_pointer_needed))
4371 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4372 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4373 && (! reload_completed || frame_pointer_needed))
4374 #endif
4375 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4376 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4377 #endif
4378 )
4379 {
4380 int some_was_live = 0, some_was_dead = 0;
4381
4382 for (i = regno_first; i <= regno_last; ++i)
4383 {
4384 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4385 if (pbi->local_set)
4386 SET_REGNO_REG_SET (pbi->local_set, i);
4387 if (code != CLOBBER)
4388 SET_REGNO_REG_SET (pbi->new_set, i);
4389
4390 some_was_live |= needed_regno;
4391 some_was_dead |= ! needed_regno;
4392 }
4393
4394 #ifdef HAVE_conditional_execution
4395 /* Consider conditional death in deciding that the register needs
4396 a death note. */
4397 if (some_was_live && ! not_dead
4398 /* The stack pointer is never dead. Well, not strictly true,
4399 but it's very difficult to tell from here. Hopefully
4400 combine_stack_adjustments will fix up the most egregious
4401 errors. */
4402 && regno_first != STACK_POINTER_REGNUM)
4403 {
4404 for (i = regno_first; i <= regno_last; ++i)
4405 if (! mark_regno_cond_dead (pbi, i, cond))
4406 not_dead = 1;
4407 }
4408 #endif
4409
4410 /* Additional data to record if this is the final pass. */
4411 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4412 | PROP_DEATH_NOTES | PROP_AUTOINC))
4413 {
4414 register rtx y;
4415 register int blocknum = pbi->bb->index;
4416
4417 y = NULL_RTX;
4418 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4419 {
4420 y = pbi->reg_next_use[regno_first];
4421
4422 /* The next use is no longer next, since a store intervenes. */
4423 for (i = regno_first; i <= regno_last; ++i)
4424 pbi->reg_next_use[i] = 0;
4425 }
4426
4427 if (flags & PROP_REG_INFO)
4428 {
4429 for (i = regno_first; i <= regno_last; ++i)
4430 {
4431 /* Count (weighted) references, stores, etc. This counts a
4432 register twice if it is modified, but that is correct. */
4433 REG_N_SETS (i) += 1;
4434 REG_N_REFS (i) += (optimize_size ? 1
4435 : pbi->bb->loop_depth + 1);
4436
4437 /* The insns where a reg is live are normally counted
4438 elsewhere, but we want the count to include the insn
4439 where the reg is set, and the normal counting mechanism
4440 would not count it. */
4441 REG_LIVE_LENGTH (i) += 1;
4442 }
4443
4444 /* If this is a hard reg, record this function uses the reg. */
4445 if (regno_first < FIRST_PSEUDO_REGISTER)
4446 {
4447 for (i = regno_first; i <= regno_last; i++)
4448 regs_ever_live[i] = 1;
4449 }
4450 else
4451 {
4452 /* Keep track of which basic blocks each reg appears in. */
4453 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4454 REG_BASIC_BLOCK (regno_first) = blocknum;
4455 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4456 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4457 }
4458 }
4459
4460 if (! some_was_dead)
4461 {
4462 if (flags & PROP_LOG_LINKS)
4463 {
4464 /* Make a logical link from the next following insn
4465 that uses this register, back to this insn.
4466 The following insns have already been processed.
4467
4468 We don't build a LOG_LINK for hard registers containing
4469 in ASM_OPERANDs. If these registers get replaced,
4470 we might wind up changing the semantics of the insn,
4471 even if reload can make what appear to be valid
4472 assignments later. */
4473 if (y && (BLOCK_NUM (y) == blocknum)
4474 && (regno_first >= FIRST_PSEUDO_REGISTER
4475 || asm_noperands (PATTERN (y)) < 0))
4476 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4477 }
4478 }
4479 else if (not_dead)
4480 ;
4481 else if (! some_was_live)
4482 {
4483 if (flags & PROP_REG_INFO)
4484 REG_N_DEATHS (regno_first) += 1;
4485
4486 if (flags & PROP_DEATH_NOTES)
4487 {
4488 /* Note that dead stores have already been deleted
4489 when possible. If we get here, we have found a
4490 dead store that cannot be eliminated (because the
4491 same insn does something useful). Indicate this
4492 by marking the reg being set as dying here. */
4493 REG_NOTES (insn)
4494 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4495 }
4496 }
4497 else
4498 {
4499 if (flags & PROP_DEATH_NOTES)
4500 {
4501 /* This is a case where we have a multi-word hard register
4502 and some, but not all, of the words of the register are
4503 needed in subsequent insns. Write REG_UNUSED notes
4504 for those parts that were not needed. This case should
4505 be rare. */
4506
4507 for (i = regno_first; i <= regno_last; ++i)
4508 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4509 REG_NOTES (insn)
4510 = alloc_EXPR_LIST (REG_UNUSED,
4511 gen_rtx_REG (reg_raw_mode[i], i),
4512 REG_NOTES (insn));
4513 }
4514 }
4515 }
4516
4517 /* Mark the register as being dead. */
4518 if (some_was_live
4519 && ! not_dead
4520 /* The stack pointer is never dead. Well, not strictly true,
4521 but it's very difficult to tell from here. Hopefully
4522 combine_stack_adjustments will fix up the most egregious
4523 errors. */
4524 && regno_first != STACK_POINTER_REGNUM)
4525 {
4526 for (i = regno_first; i <= regno_last; ++i)
4527 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4528 }
4529 }
4530 else if (GET_CODE (reg) == REG)
4531 {
4532 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4533 pbi->reg_next_use[regno_first] = 0;
4534 }
4535
4536 /* If this is the last pass and this is a SCRATCH, show it will be dying
4537 here and count it. */
4538 else if (GET_CODE (reg) == SCRATCH)
4539 {
4540 if (flags & PROP_DEATH_NOTES)
4541 REG_NOTES (insn)
4542 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4543 }
4544 }
4545 \f
4546 #ifdef HAVE_conditional_execution
4547 /* Mark REGNO conditionally dead. Return true if the register is
4548 now unconditionally dead. */
4549
4550 static int
4551 mark_regno_cond_dead (pbi, regno, cond)
4552 struct propagate_block_info *pbi;
4553 int regno;
4554 rtx cond;
4555 {
4556 /* If this is a store to a predicate register, the value of the
4557 predicate is changing, we don't know that the predicate as seen
4558 before is the same as that seen after. Flush all dependent
4559 conditions from reg_cond_dead. This will make all such
4560 conditionally live registers unconditionally live. */
4561 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4562 flush_reg_cond_reg (pbi, regno);
4563
4564 /* If this is an unconditional store, remove any conditional
4565 life that may have existed. */
4566 if (cond == NULL_RTX)
4567 splay_tree_remove (pbi->reg_cond_dead, regno);
4568 else
4569 {
4570 splay_tree_node node;
4571 struct reg_cond_life_info *rcli;
4572 rtx ncond;
4573
4574 /* Otherwise this is a conditional set. Record that fact.
4575 It may have been conditionally used, or there may be a
4576 subsequent set with a complimentary condition. */
4577
4578 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4579 if (node == NULL)
4580 {
4581 /* The register was unconditionally live previously.
4582 Record the current condition as the condition under
4583 which it is dead. */
4584 rcli = (struct reg_cond_life_info *)
4585 xmalloc (sizeof (*rcli));
4586 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4587 splay_tree_insert (pbi->reg_cond_dead, regno,
4588 (splay_tree_value) rcli);
4589
4590 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4591 REGNO (XEXP (cond, 0)));
4592
4593 /* Not unconditionaly dead. */
4594 return 0;
4595 }
4596 else
4597 {
4598 /* The register was conditionally live previously.
4599 Add the new condition to the old. */
4600 rcli = (struct reg_cond_life_info *) node->value;
4601 ncond = rcli->condition;
4602 ncond = ior_reg_cond (ncond, cond);
4603
4604 /* If the register is now unconditionally dead,
4605 remove the entry in the splay_tree. */
4606 if (ncond == const1_rtx)
4607 splay_tree_remove (pbi->reg_cond_dead, regno);
4608 else
4609 {
4610 rcli->condition = ncond;
4611
4612 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4613 REGNO (XEXP (cond, 0)));
4614
4615 /* Not unconditionaly dead. */
4616 return 0;
4617 }
4618 }
4619 }
4620
4621 return 1;
4622 }
4623
4624 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4625
4626 static void
4627 free_reg_cond_life_info (value)
4628 splay_tree_value value;
4629 {
4630 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4631 free_EXPR_LIST_list (&rcli->condition);
4632 free (rcli);
4633 }
4634
4635 /* Helper function for flush_reg_cond_reg. */
4636
4637 static int
4638 flush_reg_cond_reg_1 (node, data)
4639 splay_tree_node node;
4640 void *data;
4641 {
4642 struct reg_cond_life_info *rcli;
4643 int *xdata = (int *) data;
4644 unsigned int regno = xdata[0];
4645 rtx c, *prev;
4646
4647 /* Don't need to search if last flushed value was farther on in
4648 the in-order traversal. */
4649 if (xdata[1] >= (int) node->key)
4650 return 0;
4651
4652 /* Splice out portions of the expression that refer to regno. */
4653 rcli = (struct reg_cond_life_info *) node->value;
4654 c = *(prev = &rcli->condition);
4655 while (c)
4656 {
4657 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4658 {
4659 rtx next = XEXP (c, 1);
4660 free_EXPR_LIST_node (c);
4661 c = *prev = next;
4662 }
4663 else
4664 c = *(prev = &XEXP (c, 1));
4665 }
4666
4667 /* If the entire condition is now NULL, signal the node to be removed. */
4668 if (! rcli->condition)
4669 {
4670 xdata[1] = node->key;
4671 return -1;
4672 }
4673 else
4674 return 0;
4675 }
4676
4677 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4678
4679 static void
4680 flush_reg_cond_reg (pbi, regno)
4681 struct propagate_block_info *pbi;
4682 int regno;
4683 {
4684 int pair[2];
4685
4686 pair[0] = regno;
4687 pair[1] = -1;
4688 while (splay_tree_foreach (pbi->reg_cond_dead,
4689 flush_reg_cond_reg_1, pair) == -1)
4690 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4691
4692 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4693 }
4694
4695 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4696 We actually use EXPR_LIST to chain the sub-expressions together
4697 instead of IOR because it's easier to manipulate and we have
4698 the lists.c functions to reuse nodes.
4699
4700 Return a new rtl expression as appropriate. */
4701
4702 static rtx
4703 ior_reg_cond (old, x)
4704 rtx old, x;
4705 {
4706 enum rtx_code x_code;
4707 rtx x_reg;
4708 rtx c;
4709
4710 /* We expect these conditions to be of the form (eq reg 0). */
4711 x_code = GET_CODE (x);
4712 if (GET_RTX_CLASS (x_code) != '<'
4713 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4714 || XEXP (x, 1) != const0_rtx)
4715 abort ();
4716
4717 /* Search the expression for an existing sub-expression of X_REG. */
4718 for (c = old; c ; c = XEXP (c, 1))
4719 {
4720 rtx y = XEXP (c, 0);
4721 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4722 {
4723 /* If we find X already present in OLD, we need do nothing. */
4724 if (GET_CODE (y) == x_code)
4725 return old;
4726
4727 /* If we find X being a compliment of a condition in OLD,
4728 then the entire condition is true. */
4729 if (GET_CODE (y) == reverse_condition (x_code))
4730 return const1_rtx;
4731 }
4732 }
4733
4734 /* Otherwise just add to the chain. */
4735 return alloc_EXPR_LIST (0, x, old);
4736 }
4737
4738 static rtx
4739 not_reg_cond (x)
4740 rtx x;
4741 {
4742 enum rtx_code x_code;
4743 rtx x_reg;
4744
4745 /* We expect these conditions to be of the form (eq reg 0). */
4746 x_code = GET_CODE (x);
4747 if (GET_RTX_CLASS (x_code) != '<'
4748 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4749 || XEXP (x, 1) != const0_rtx)
4750 abort ();
4751
4752 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4753 VOIDmode, x_reg, const0_rtx),
4754 NULL_RTX);
4755 }
4756
4757 static rtx
4758 nand_reg_cond (old, x)
4759 rtx old, x;
4760 {
4761 enum rtx_code x_code;
4762 rtx x_reg;
4763 rtx c, *prev;
4764
4765 /* We expect these conditions to be of the form (eq reg 0). */
4766 x_code = GET_CODE (x);
4767 if (GET_RTX_CLASS (x_code) != '<'
4768 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4769 || XEXP (x, 1) != const0_rtx)
4770 abort ();
4771
4772 /* Search the expression for an existing sub-expression of X_REG. */
4773
4774 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4775 {
4776 rtx y = XEXP (c, 0);
4777 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4778 {
4779 /* If we find X already present in OLD, then we need to
4780 splice it out. */
4781 if (GET_CODE (y) == x_code)
4782 {
4783 *prev = XEXP (c, 1);
4784 free_EXPR_LIST_node (c);
4785 return old ? old : const0_rtx;
4786 }
4787
4788 /* If we find X being a compliment of a condition in OLD,
4789 then we need do nothing. */
4790 if (GET_CODE (y) == reverse_condition (x_code))
4791 return old;
4792 }
4793 }
4794
4795 /* Otherwise, by implication, the register in question is now live for
4796 the inverse of the condition X. */
4797 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4798 VOIDmode, x_reg, const0_rtx),
4799 old);
4800 }
4801 #endif /* HAVE_conditional_execution */
4802 \f
4803 #ifdef AUTO_INC_DEC
4804
4805 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4806 reference. */
4807
4808 static void
4809 find_auto_inc (pbi, x, insn)
4810 struct propagate_block_info *pbi;
4811 rtx x;
4812 rtx insn;
4813 {
4814 rtx addr = XEXP (x, 0);
4815 HOST_WIDE_INT offset = 0;
4816 rtx set;
4817
4818 /* Here we detect use of an index register which might be good for
4819 postincrement, postdecrement, preincrement, or predecrement. */
4820
4821 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4822 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4823
4824 if (GET_CODE (addr) == REG)
4825 {
4826 register rtx y;
4827 register int size = GET_MODE_SIZE (GET_MODE (x));
4828 rtx use;
4829 rtx incr;
4830 int regno = REGNO (addr);
4831
4832 /* Is the next use an increment that might make auto-increment? */
4833 if ((incr = pbi->reg_next_use[regno]) != 0
4834 && (set = single_set (incr)) != 0
4835 && GET_CODE (set) == SET
4836 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4837 /* Can't add side effects to jumps; if reg is spilled and
4838 reloaded, there's no way to store back the altered value. */
4839 && GET_CODE (insn) != JUMP_INSN
4840 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4841 && XEXP (y, 0) == addr
4842 && GET_CODE (XEXP (y, 1)) == CONST_INT
4843 && ((HAVE_POST_INCREMENT
4844 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4845 || (HAVE_POST_DECREMENT
4846 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4847 || (HAVE_PRE_INCREMENT
4848 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4849 || (HAVE_PRE_DECREMENT
4850 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4851 /* Make sure this reg appears only once in this insn. */
4852 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4853 use != 0 && use != (rtx) 1))
4854 {
4855 rtx q = SET_DEST (set);
4856 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4857 ? (offset ? PRE_INC : POST_INC)
4858 : (offset ? PRE_DEC : POST_DEC));
4859
4860 if (dead_or_set_p (incr, addr)
4861 /* Mustn't autoinc an eliminable register. */
4862 && (regno >= FIRST_PSEUDO_REGISTER
4863 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4864 {
4865 /* This is the simple case. Try to make the auto-inc. If
4866 we can't, we are done. Otherwise, we will do any
4867 needed updates below. */
4868 if (! validate_change (insn, &XEXP (x, 0),
4869 gen_rtx_fmt_e (inc_code, Pmode, addr),
4870 0))
4871 return;
4872 }
4873 else if (GET_CODE (q) == REG
4874 /* PREV_INSN used here to check the semi-open interval
4875 [insn,incr). */
4876 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4877 /* We must also check for sets of q as q may be
4878 a call clobbered hard register and there may
4879 be a call between PREV_INSN (insn) and incr. */
4880 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4881 {
4882 /* We have *p followed sometime later by q = p+size.
4883 Both p and q must be live afterward,
4884 and q is not used between INSN and its assignment.
4885 Change it to q = p, ...*q..., q = q+size.
4886 Then fall into the usual case. */
4887 rtx insns, temp;
4888
4889 start_sequence ();
4890 emit_move_insn (q, addr);
4891 insns = get_insns ();
4892 end_sequence ();
4893
4894 if (basic_block_for_insn)
4895 for (temp = insns; temp; temp = NEXT_INSN (temp))
4896 set_block_for_insn (temp, pbi->bb);
4897
4898 /* If we can't make the auto-inc, or can't make the
4899 replacement into Y, exit. There's no point in making
4900 the change below if we can't do the auto-inc and doing
4901 so is not correct in the pre-inc case. */
4902
4903 validate_change (insn, &XEXP (x, 0),
4904 gen_rtx_fmt_e (inc_code, Pmode, q),
4905 1);
4906 validate_change (incr, &XEXP (y, 0), q, 1);
4907 if (! apply_change_group ())
4908 return;
4909
4910 /* We now know we'll be doing this change, so emit the
4911 new insn(s) and do the updates. */
4912 emit_insns_before (insns, insn);
4913
4914 if (pbi->bb->head == insn)
4915 pbi->bb->head = insns;
4916
4917 /* INCR will become a NOTE and INSN won't contain a
4918 use of ADDR. If a use of ADDR was just placed in
4919 the insn before INSN, make that the next use.
4920 Otherwise, invalidate it. */
4921 if (GET_CODE (PREV_INSN (insn)) == INSN
4922 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4923 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4924 pbi->reg_next_use[regno] = PREV_INSN (insn);
4925 else
4926 pbi->reg_next_use[regno] = 0;
4927
4928 addr = q;
4929 regno = REGNO (q);
4930
4931 /* REGNO is now used in INCR which is below INSN, but it
4932 previously wasn't live here. If we don't mark it as
4933 live, we'll put a REG_DEAD note for it on this insn,
4934 which is incorrect. */
4935 SET_REGNO_REG_SET (pbi->reg_live, regno);
4936
4937 /* If there are any calls between INSN and INCR, show
4938 that REGNO now crosses them. */
4939 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4940 if (GET_CODE (temp) == CALL_INSN)
4941 REG_N_CALLS_CROSSED (regno)++;
4942 }
4943 else
4944 return;
4945
4946 /* If we haven't returned, it means we were able to make the
4947 auto-inc, so update the status. First, record that this insn
4948 has an implicit side effect. */
4949
4950 REG_NOTES (insn)
4951 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4952
4953 /* Modify the old increment-insn to simply copy
4954 the already-incremented value of our register. */
4955 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4956 abort ();
4957
4958 /* If that makes it a no-op (copying the register into itself) delete
4959 it so it won't appear to be a "use" and a "set" of this
4960 register. */
4961 if (SET_DEST (set) == addr)
4962 {
4963 /* If the original source was dead, it's dead now. */
4964 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4965 if (note && XEXP (note, 0) != addr)
4966 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4967
4968 PUT_CODE (incr, NOTE);
4969 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4970 NOTE_SOURCE_FILE (incr) = 0;
4971 }
4972
4973 if (regno >= FIRST_PSEUDO_REGISTER)
4974 {
4975 /* Count an extra reference to the reg. When a reg is
4976 incremented, spilling it is worse, so we want to make
4977 that less likely. */
4978 REG_N_REFS (regno) += (optimize_size ? 1
4979 : pbi->bb->loop_depth + 1);
4980
4981 /* Count the increment as a setting of the register,
4982 even though it isn't a SET in rtl. */
4983 REG_N_SETS (regno)++;
4984 }
4985 }
4986 }
4987 }
4988 #endif /* AUTO_INC_DEC */
4989 \f
4990 static void
4991 mark_used_reg (pbi, reg, cond, insn)
4992 struct propagate_block_info *pbi;
4993 rtx reg;
4994 rtx cond ATTRIBUTE_UNUSED;
4995 rtx insn;
4996 {
4997 int regno = REGNO (reg);
4998 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4999 int some_was_dead = ! some_was_live;
5000 int some_not_set;
5001 int n;
5002
5003 /* A hard reg in a wide mode may really be multiple registers.
5004 If so, mark all of them just like the first. */
5005 if (regno < FIRST_PSEUDO_REGISTER)
5006 {
5007 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5008 while (--n > 0)
5009 {
5010 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5011 some_was_live |= needed_regno;
5012 some_was_dead |= ! needed_regno;
5013 }
5014 }
5015
5016 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5017 {
5018 /* Record where each reg is used, so when the reg is set we know
5019 the next insn that uses it. */
5020 pbi->reg_next_use[regno] = insn;
5021 }
5022
5023 if (pbi->flags & PROP_REG_INFO)
5024 {
5025 if (regno < FIRST_PSEUDO_REGISTER)
5026 {
5027 /* If this is a register we are going to try to eliminate,
5028 don't mark it live here. If we are successful in
5029 eliminating it, it need not be live unless it is used for
5030 pseudos, in which case it will have been set live when it
5031 was allocated to the pseudos. If the register will not
5032 be eliminated, reload will set it live at that point.
5033
5034 Otherwise, record that this function uses this register. */
5035 /* ??? The PPC backend tries to "eliminate" on the pic
5036 register to itself. This should be fixed. In the mean
5037 time, hack around it. */
5038
5039 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5040 && (regno == FRAME_POINTER_REGNUM
5041 || regno == ARG_POINTER_REGNUM)))
5042 {
5043 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5044 do
5045 regs_ever_live[regno + --n] = 1;
5046 while (n > 0);
5047 }
5048 }
5049 else
5050 {
5051 /* Keep track of which basic block each reg appears in. */
5052
5053 register int blocknum = pbi->bb->index;
5054 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5055 REG_BASIC_BLOCK (regno) = blocknum;
5056 else if (REG_BASIC_BLOCK (regno) != blocknum)
5057 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5058
5059 /* Count (weighted) number of uses of each reg. */
5060 REG_N_REFS (regno) += (optimize_size ? 1
5061 : pbi->bb->loop_depth + 1);
5062 }
5063 }
5064
5065 /* Find out if any of the register was set this insn. */
5066 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5067 if (regno < FIRST_PSEUDO_REGISTER)
5068 {
5069 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5070 while (--n > 0)
5071 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5072 }
5073
5074 /* Record and count the insns in which a reg dies. If it is used in
5075 this insn and was dead below the insn then it dies in this insn.
5076 If it was set in this insn, we do not make a REG_DEAD note;
5077 likewise if we already made such a note. */
5078 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5079 && some_was_dead
5080 && some_not_set)
5081 {
5082 /* Check for the case where the register dying partially
5083 overlaps the register set by this insn. */
5084 if (regno < FIRST_PSEUDO_REGISTER
5085 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5086 {
5087 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5088 while (--n >= 0)
5089 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5090 }
5091
5092 /* If none of the words in X is needed, make a REG_DEAD note.
5093 Otherwise, we must make partial REG_DEAD notes. */
5094 if (! some_was_live)
5095 {
5096 if ((pbi->flags & PROP_DEATH_NOTES)
5097 && ! find_regno_note (insn, REG_DEAD, regno))
5098 REG_NOTES (insn)
5099 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5100
5101 if (pbi->flags & PROP_REG_INFO)
5102 REG_N_DEATHS (regno)++;
5103 }
5104 else
5105 {
5106 /* Don't make a REG_DEAD note for a part of a register
5107 that is set in the insn. */
5108
5109 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5110 for (; n >= regno; n--)
5111 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5112 && ! dead_or_set_regno_p (insn, n))
5113 REG_NOTES (insn)
5114 = alloc_EXPR_LIST (REG_DEAD,
5115 gen_rtx_REG (reg_raw_mode[n], n),
5116 REG_NOTES (insn));
5117 }
5118 }
5119
5120 SET_REGNO_REG_SET (pbi->reg_live, regno);
5121 if (regno < FIRST_PSEUDO_REGISTER)
5122 {
5123 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5124 while (--n > 0)
5125 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5126 }
5127
5128 #ifdef HAVE_conditional_execution
5129 /* If this is a conditional use, record that fact. If it is later
5130 conditionally set, we'll know to kill the register. */
5131 if (cond != NULL_RTX)
5132 {
5133 splay_tree_node node;
5134 struct reg_cond_life_info *rcli;
5135 rtx ncond;
5136
5137 if (some_was_live)
5138 {
5139 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5140 if (node == NULL)
5141 {
5142 /* The register was unconditionally live previously.
5143 No need to do anything. */
5144 }
5145 else
5146 {
5147 /* The register was conditionally live previously.
5148 Subtract the new life cond from the old death cond. */
5149 rcli = (struct reg_cond_life_info *) node->value;
5150 ncond = rcli->condition;
5151 ncond = nand_reg_cond (ncond, cond);
5152
5153 /* If the register is now unconditionally live, remove the
5154 entry in the splay_tree. */
5155 if (ncond == const0_rtx)
5156 {
5157 rcli->condition = NULL_RTX;
5158 splay_tree_remove (pbi->reg_cond_dead, regno);
5159 }
5160 else
5161 rcli->condition = ncond;
5162 }
5163 }
5164 else
5165 {
5166 /* The register was not previously live at all. Record
5167 the condition under which it is still dead. */
5168 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5169 rcli->condition = not_reg_cond (cond);
5170 splay_tree_insert (pbi->reg_cond_dead, regno,
5171 (splay_tree_value) rcli);
5172 }
5173 }
5174 else if (some_was_live)
5175 {
5176 splay_tree_node node;
5177 struct reg_cond_life_info *rcli;
5178
5179 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5180 if (node != NULL)
5181 {
5182 /* The register was conditionally live previously, but is now
5183 unconditionally so. Remove it from the conditionally dead
5184 list, so that a conditional set won't cause us to think
5185 it dead. */
5186 rcli = (struct reg_cond_life_info *) node->value;
5187 rcli->condition = NULL_RTX;
5188 splay_tree_remove (pbi->reg_cond_dead, regno);
5189 }
5190 }
5191
5192 #endif
5193 }
5194
5195 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5196 This is done assuming the registers needed from X are those that
5197 have 1-bits in PBI->REG_LIVE.
5198
5199 INSN is the containing instruction. If INSN is dead, this function
5200 is not called. */
5201
5202 static void
5203 mark_used_regs (pbi, x, cond, insn)
5204 struct propagate_block_info *pbi;
5205 rtx x, cond, insn;
5206 {
5207 register RTX_CODE code;
5208 register int regno;
5209 int flags = pbi->flags;
5210
5211 retry:
5212 code = GET_CODE (x);
5213 switch (code)
5214 {
5215 case LABEL_REF:
5216 case SYMBOL_REF:
5217 case CONST_INT:
5218 case CONST:
5219 case CONST_DOUBLE:
5220 case PC:
5221 case ADDR_VEC:
5222 case ADDR_DIFF_VEC:
5223 return;
5224
5225 #ifdef HAVE_cc0
5226 case CC0:
5227 pbi->cc0_live = 1;
5228 return;
5229 #endif
5230
5231 case CLOBBER:
5232 /* If we are clobbering a MEM, mark any registers inside the address
5233 as being used. */
5234 if (GET_CODE (XEXP (x, 0)) == MEM)
5235 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5236 return;
5237
5238 case MEM:
5239 /* Don't bother watching stores to mems if this is not the
5240 final pass. We'll not be deleting dead stores this round. */
5241 if (flags & PROP_SCAN_DEAD_CODE)
5242 {
5243 /* Invalidate the data for the last MEM stored, but only if MEM is
5244 something that can be stored into. */
5245 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5246 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5247 ; /* needn't clear the memory set list */
5248 else
5249 {
5250 rtx temp = pbi->mem_set_list;
5251 rtx prev = NULL_RTX;
5252 rtx next;
5253
5254 while (temp)
5255 {
5256 next = XEXP (temp, 1);
5257 if (anti_dependence (XEXP (temp, 0), x))
5258 {
5259 /* Splice temp out of the list. */
5260 if (prev)
5261 XEXP (prev, 1) = next;
5262 else
5263 pbi->mem_set_list = next;
5264 free_EXPR_LIST_node (temp);
5265 }
5266 else
5267 prev = temp;
5268 temp = next;
5269 }
5270 }
5271
5272 /* If the memory reference had embedded side effects (autoincrement
5273 address modes. Then we may need to kill some entries on the
5274 memory set list. */
5275 if (insn)
5276 invalidate_mems_from_autoinc (pbi, insn);
5277 }
5278
5279 #ifdef AUTO_INC_DEC
5280 if (flags & PROP_AUTOINC)
5281 find_auto_inc (pbi, x, insn);
5282 #endif
5283 break;
5284
5285 case SUBREG:
5286 #ifdef CLASS_CANNOT_CHANGE_MODE
5287 if (GET_CODE (SUBREG_REG (x)) == REG
5288 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5289 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5290 GET_MODE (SUBREG_REG (x))))
5291 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5292 #endif
5293
5294 /* While we're here, optimize this case. */
5295 x = SUBREG_REG (x);
5296 if (GET_CODE (x) != REG)
5297 goto retry;
5298 /* FALLTHRU */
5299
5300 case REG:
5301 /* See a register other than being set => mark it as needed. */
5302 mark_used_reg (pbi, x, cond, insn);
5303 return;
5304
5305 case SET:
5306 {
5307 register rtx testreg = SET_DEST (x);
5308 int mark_dest = 0;
5309
5310 /* If storing into MEM, don't show it as being used. But do
5311 show the address as being used. */
5312 if (GET_CODE (testreg) == MEM)
5313 {
5314 #ifdef AUTO_INC_DEC
5315 if (flags & PROP_AUTOINC)
5316 find_auto_inc (pbi, testreg, insn);
5317 #endif
5318 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5319 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5320 return;
5321 }
5322
5323 /* Storing in STRICT_LOW_PART is like storing in a reg
5324 in that this SET might be dead, so ignore it in TESTREG.
5325 but in some other ways it is like using the reg.
5326
5327 Storing in a SUBREG or a bit field is like storing the entire
5328 register in that if the register's value is not used
5329 then this SET is not needed. */
5330 while (GET_CODE (testreg) == STRICT_LOW_PART
5331 || GET_CODE (testreg) == ZERO_EXTRACT
5332 || GET_CODE (testreg) == SIGN_EXTRACT
5333 || GET_CODE (testreg) == SUBREG)
5334 {
5335 #ifdef CLASS_CANNOT_CHANGE_MODE
5336 if (GET_CODE (testreg) == SUBREG
5337 && GET_CODE (SUBREG_REG (testreg)) == REG
5338 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5339 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5340 GET_MODE (testreg)))
5341 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5342 #endif
5343
5344 /* Modifying a single register in an alternate mode
5345 does not use any of the old value. But these other
5346 ways of storing in a register do use the old value. */
5347 if (GET_CODE (testreg) == SUBREG
5348 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5349 ;
5350 else
5351 mark_dest = 1;
5352
5353 testreg = XEXP (testreg, 0);
5354 }
5355
5356 /* If this is a store into a register, recursively scan the
5357 value being stored. */
5358
5359 if ((GET_CODE (testreg) == PARALLEL
5360 && GET_MODE (testreg) == BLKmode)
5361 || (GET_CODE (testreg) == REG
5362 && (regno = REGNO (testreg),
5363 ! (regno == FRAME_POINTER_REGNUM
5364 && (! reload_completed || frame_pointer_needed)))
5365 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5366 && ! (regno == HARD_FRAME_POINTER_REGNUM
5367 && (! reload_completed || frame_pointer_needed))
5368 #endif
5369 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5370 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5371 #endif
5372 ))
5373 {
5374 if (mark_dest)
5375 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5376 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5377 return;
5378 }
5379 }
5380 break;
5381
5382 case ASM_OPERANDS:
5383 case UNSPEC_VOLATILE:
5384 case TRAP_IF:
5385 case ASM_INPUT:
5386 {
5387 /* Traditional and volatile asm instructions must be considered to use
5388 and clobber all hard registers, all pseudo-registers and all of
5389 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5390
5391 Consider for instance a volatile asm that changes the fpu rounding
5392 mode. An insn should not be moved across this even if it only uses
5393 pseudo-regs because it might give an incorrectly rounded result.
5394
5395 ?!? Unfortunately, marking all hard registers as live causes massive
5396 problems for the register allocator and marking all pseudos as live
5397 creates mountains of uninitialized variable warnings.
5398
5399 So for now, just clear the memory set list and mark any regs
5400 we can find in ASM_OPERANDS as used. */
5401 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5402 free_EXPR_LIST_list (&pbi->mem_set_list);
5403
5404 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5405 We can not just fall through here since then we would be confused
5406 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5407 traditional asms unlike their normal usage. */
5408 if (code == ASM_OPERANDS)
5409 {
5410 int j;
5411
5412 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5413 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5414 }
5415 break;
5416 }
5417
5418 case COND_EXEC:
5419 if (cond != NULL_RTX)
5420 abort ();
5421
5422 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5423
5424 cond = COND_EXEC_TEST (x);
5425 x = COND_EXEC_CODE (x);
5426 goto retry;
5427
5428 case PHI:
5429 /* We _do_not_ want to scan operands of phi nodes. Operands of
5430 a phi function are evaluated only when control reaches this
5431 block along a particular edge. Therefore, regs that appear
5432 as arguments to phi should not be added to the global live at
5433 start. */
5434 return;
5435
5436 default:
5437 break;
5438 }
5439
5440 /* Recursively scan the operands of this expression. */
5441
5442 {
5443 register const char *fmt = GET_RTX_FORMAT (code);
5444 register int i;
5445
5446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5447 {
5448 if (fmt[i] == 'e')
5449 {
5450 /* Tail recursive case: save a function call level. */
5451 if (i == 0)
5452 {
5453 x = XEXP (x, 0);
5454 goto retry;
5455 }
5456 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5457 }
5458 else if (fmt[i] == 'E')
5459 {
5460 register int j;
5461 for (j = 0; j < XVECLEN (x, i); j++)
5462 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5463 }
5464 }
5465 }
5466 }
5467 \f
5468 #ifdef AUTO_INC_DEC
5469
5470 static int
5471 try_pre_increment_1 (pbi, insn)
5472 struct propagate_block_info *pbi;
5473 rtx insn;
5474 {
5475 /* Find the next use of this reg. If in same basic block,
5476 make it do pre-increment or pre-decrement if appropriate. */
5477 rtx x = single_set (insn);
5478 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5479 * INTVAL (XEXP (SET_SRC (x), 1)));
5480 int regno = REGNO (SET_DEST (x));
5481 rtx y = pbi->reg_next_use[regno];
5482 if (y != 0
5483 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5484 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5485 mode would be better. */
5486 && ! dead_or_set_p (y, SET_DEST (x))
5487 && try_pre_increment (y, SET_DEST (x), amount))
5488 {
5489 /* We have found a suitable auto-increment
5490 and already changed insn Y to do it.
5491 So flush this increment-instruction. */
5492 PUT_CODE (insn, NOTE);
5493 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5494 NOTE_SOURCE_FILE (insn) = 0;
5495 /* Count a reference to this reg for the increment
5496 insn we are deleting. When a reg is incremented.
5497 spilling it is worse, so we want to make that
5498 less likely. */
5499 if (regno >= FIRST_PSEUDO_REGISTER)
5500 {
5501 REG_N_REFS (regno) += (optimize_size ? 1
5502 : pbi->bb->loop_depth + 1);
5503 REG_N_SETS (regno)++;
5504 }
5505 return 1;
5506 }
5507 return 0;
5508 }
5509
5510 /* Try to change INSN so that it does pre-increment or pre-decrement
5511 addressing on register REG in order to add AMOUNT to REG.
5512 AMOUNT is negative for pre-decrement.
5513 Returns 1 if the change could be made.
5514 This checks all about the validity of the result of modifying INSN. */
5515
5516 static int
5517 try_pre_increment (insn, reg, amount)
5518 rtx insn, reg;
5519 HOST_WIDE_INT amount;
5520 {
5521 register rtx use;
5522
5523 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5524 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5525 int pre_ok = 0;
5526 /* Nonzero if we can try to make a post-increment or post-decrement.
5527 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5528 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5529 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5530 int post_ok = 0;
5531
5532 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5533 int do_post = 0;
5534
5535 /* From the sign of increment, see which possibilities are conceivable
5536 on this target machine. */
5537 if (HAVE_PRE_INCREMENT && amount > 0)
5538 pre_ok = 1;
5539 if (HAVE_POST_INCREMENT && amount > 0)
5540 post_ok = 1;
5541
5542 if (HAVE_PRE_DECREMENT && amount < 0)
5543 pre_ok = 1;
5544 if (HAVE_POST_DECREMENT && amount < 0)
5545 post_ok = 1;
5546
5547 if (! (pre_ok || post_ok))
5548 return 0;
5549
5550 /* It is not safe to add a side effect to a jump insn
5551 because if the incremented register is spilled and must be reloaded
5552 there would be no way to store the incremented value back in memory. */
5553
5554 if (GET_CODE (insn) == JUMP_INSN)
5555 return 0;
5556
5557 use = 0;
5558 if (pre_ok)
5559 use = find_use_as_address (PATTERN (insn), reg, 0);
5560 if (post_ok && (use == 0 || use == (rtx) 1))
5561 {
5562 use = find_use_as_address (PATTERN (insn), reg, -amount);
5563 do_post = 1;
5564 }
5565
5566 if (use == 0 || use == (rtx) 1)
5567 return 0;
5568
5569 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5570 return 0;
5571
5572 /* See if this combination of instruction and addressing mode exists. */
5573 if (! validate_change (insn, &XEXP (use, 0),
5574 gen_rtx_fmt_e (amount > 0
5575 ? (do_post ? POST_INC : PRE_INC)
5576 : (do_post ? POST_DEC : PRE_DEC),
5577 Pmode, reg), 0))
5578 return 0;
5579
5580 /* Record that this insn now has an implicit side effect on X. */
5581 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5582 return 1;
5583 }
5584
5585 #endif /* AUTO_INC_DEC */
5586 \f
5587 /* Find the place in the rtx X where REG is used as a memory address.
5588 Return the MEM rtx that so uses it.
5589 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5590 (plus REG (const_int PLUSCONST)).
5591
5592 If such an address does not appear, return 0.
5593 If REG appears more than once, or is used other than in such an address,
5594 return (rtx)1. */
5595
5596 rtx
5597 find_use_as_address (x, reg, plusconst)
5598 register rtx x;
5599 rtx reg;
5600 HOST_WIDE_INT plusconst;
5601 {
5602 enum rtx_code code = GET_CODE (x);
5603 const char *fmt = GET_RTX_FORMAT (code);
5604 register int i;
5605 register rtx value = 0;
5606 register rtx tem;
5607
5608 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5609 return x;
5610
5611 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5612 && XEXP (XEXP (x, 0), 0) == reg
5613 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5614 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5615 return x;
5616
5617 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5618 {
5619 /* If REG occurs inside a MEM used in a bit-field reference,
5620 that is unacceptable. */
5621 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5622 return (rtx) (HOST_WIDE_INT) 1;
5623 }
5624
5625 if (x == reg)
5626 return (rtx) (HOST_WIDE_INT) 1;
5627
5628 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5629 {
5630 if (fmt[i] == 'e')
5631 {
5632 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5633 if (value == 0)
5634 value = tem;
5635 else if (tem != 0)
5636 return (rtx) (HOST_WIDE_INT) 1;
5637 }
5638 else if (fmt[i] == 'E')
5639 {
5640 register int j;
5641 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5642 {
5643 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5644 if (value == 0)
5645 value = tem;
5646 else if (tem != 0)
5647 return (rtx) (HOST_WIDE_INT) 1;
5648 }
5649 }
5650 }
5651
5652 return value;
5653 }
5654 \f
5655 /* Write information about registers and basic blocks into FILE.
5656 This is part of making a debugging dump. */
5657
5658 void
5659 dump_regset (r, outf)
5660 regset r;
5661 FILE *outf;
5662 {
5663 int i;
5664 if (r == NULL)
5665 {
5666 fputs (" (nil)", outf);
5667 return;
5668 }
5669
5670 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5671 {
5672 fprintf (outf, " %d", i);
5673 if (i < FIRST_PSEUDO_REGISTER)
5674 fprintf (outf, " [%s]",
5675 reg_names[i]);
5676 });
5677 }
5678
5679 void
5680 debug_regset (r)
5681 regset r;
5682 {
5683 dump_regset (r, stderr);
5684 putc ('\n', stderr);
5685 }
5686
5687 void
5688 dump_flow_info (file)
5689 FILE *file;
5690 {
5691 register int i;
5692 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5693
5694 fprintf (file, "%d registers.\n", max_regno);
5695 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5696 if (REG_N_REFS (i))
5697 {
5698 enum reg_class class, altclass;
5699 fprintf (file, "\nRegister %d used %d times across %d insns",
5700 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5701 if (REG_BASIC_BLOCK (i) >= 0)
5702 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5703 if (REG_N_SETS (i))
5704 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5705 (REG_N_SETS (i) == 1) ? "" : "s");
5706 if (REG_USERVAR_P (regno_reg_rtx[i]))
5707 fprintf (file, "; user var");
5708 if (REG_N_DEATHS (i) != 1)
5709 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5710 if (REG_N_CALLS_CROSSED (i) == 1)
5711 fprintf (file, "; crosses 1 call");
5712 else if (REG_N_CALLS_CROSSED (i))
5713 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5714 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5715 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5716 class = reg_preferred_class (i);
5717 altclass = reg_alternate_class (i);
5718 if (class != GENERAL_REGS || altclass != ALL_REGS)
5719 {
5720 if (altclass == ALL_REGS || class == ALL_REGS)
5721 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5722 else if (altclass == NO_REGS)
5723 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5724 else
5725 fprintf (file, "; pref %s, else %s",
5726 reg_class_names[(int) class],
5727 reg_class_names[(int) altclass]);
5728 }
5729 if (REGNO_POINTER_FLAG (i))
5730 fprintf (file, "; pointer");
5731 fprintf (file, ".\n");
5732 }
5733
5734 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5735 for (i = 0; i < n_basic_blocks; i++)
5736 {
5737 register basic_block bb = BASIC_BLOCK (i);
5738 register edge e;
5739
5740 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5741 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5742
5743 fprintf (file, "Predecessors: ");
5744 for (e = bb->pred; e ; e = e->pred_next)
5745 dump_edge_info (file, e, 0);
5746
5747 fprintf (file, "\nSuccessors: ");
5748 for (e = bb->succ; e ; e = e->succ_next)
5749 dump_edge_info (file, e, 1);
5750
5751 fprintf (file, "\nRegisters live at start:");
5752 dump_regset (bb->global_live_at_start, file);
5753
5754 fprintf (file, "\nRegisters live at end:");
5755 dump_regset (bb->global_live_at_end, file);
5756
5757 putc('\n', file);
5758 }
5759
5760 putc('\n', file);
5761 }
5762
5763 void
5764 debug_flow_info ()
5765 {
5766 dump_flow_info (stderr);
5767 }
5768
5769 static void
5770 dump_edge_info (file, e, do_succ)
5771 FILE *file;
5772 edge e;
5773 int do_succ;
5774 {
5775 basic_block side = (do_succ ? e->dest : e->src);
5776
5777 if (side == ENTRY_BLOCK_PTR)
5778 fputs (" ENTRY", file);
5779 else if (side == EXIT_BLOCK_PTR)
5780 fputs (" EXIT", file);
5781 else
5782 fprintf (file, " %d", side->index);
5783
5784 if (e->count)
5785 fprintf (file, " count:%d", e->count);
5786
5787 if (e->flags)
5788 {
5789 static const char * const bitnames[] = {
5790 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5791 };
5792 int comma = 0;
5793 int i, flags = e->flags;
5794
5795 fputc (' ', file);
5796 fputc ('(', file);
5797 for (i = 0; flags; i++)
5798 if (flags & (1 << i))
5799 {
5800 flags &= ~(1 << i);
5801
5802 if (comma)
5803 fputc (',', file);
5804 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5805 fputs (bitnames[i], file);
5806 else
5807 fprintf (file, "%d", i);
5808 comma = 1;
5809 }
5810 fputc (')', file);
5811 }
5812 }
5813
5814 \f
5815 /* Print out one basic block with live information at start and end. */
5816 void
5817 dump_bb (bb, outf)
5818 basic_block bb;
5819 FILE *outf;
5820 {
5821 rtx insn;
5822 rtx last;
5823 edge e;
5824
5825 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5826 bb->index, bb->loop_depth, bb->count);
5827 if (bb->eh_beg != -1 || bb->eh_end != -1)
5828 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5829 putc ('\n', outf);
5830
5831 fputs (";; Predecessors: ", outf);
5832 for (e = bb->pred; e ; e = e->pred_next)
5833 dump_edge_info (outf, e, 0);
5834 putc ('\n', outf);
5835
5836 fputs (";; Registers live at start:", outf);
5837 dump_regset (bb->global_live_at_start, outf);
5838 putc ('\n', outf);
5839
5840 for (insn = bb->head, last = NEXT_INSN (bb->end);
5841 insn != last;
5842 insn = NEXT_INSN (insn))
5843 print_rtl_single (outf, insn);
5844
5845 fputs (";; Registers live at end:", outf);
5846 dump_regset (bb->global_live_at_end, outf);
5847 putc ('\n', outf);
5848
5849 fputs (";; Successors: ", outf);
5850 for (e = bb->succ; e; e = e->succ_next)
5851 dump_edge_info (outf, e, 1);
5852 putc ('\n', outf);
5853 }
5854
5855 void
5856 debug_bb (bb)
5857 basic_block bb;
5858 {
5859 dump_bb (bb, stderr);
5860 }
5861
5862 void
5863 debug_bb_n (n)
5864 int n;
5865 {
5866 dump_bb (BASIC_BLOCK(n), stderr);
5867 }
5868
5869 /* Like print_rtl, but also print out live information for the start of each
5870 basic block. */
5871
5872 void
5873 print_rtl_with_bb (outf, rtx_first)
5874 FILE *outf;
5875 rtx rtx_first;
5876 {
5877 register rtx tmp_rtx;
5878
5879 if (rtx_first == 0)
5880 fprintf (outf, "(nil)\n");
5881 else
5882 {
5883 int i;
5884 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5885 int max_uid = get_max_uid ();
5886 basic_block *start = (basic_block *)
5887 xcalloc (max_uid, sizeof (basic_block));
5888 basic_block *end = (basic_block *)
5889 xcalloc (max_uid, sizeof (basic_block));
5890 enum bb_state *in_bb_p = (enum bb_state *)
5891 xcalloc (max_uid, sizeof (enum bb_state));
5892
5893 for (i = n_basic_blocks - 1; i >= 0; i--)
5894 {
5895 basic_block bb = BASIC_BLOCK (i);
5896 rtx x;
5897
5898 start[INSN_UID (bb->head)] = bb;
5899 end[INSN_UID (bb->end)] = bb;
5900 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5901 {
5902 enum bb_state state = IN_MULTIPLE_BB;
5903 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5904 state = IN_ONE_BB;
5905 in_bb_p[INSN_UID(x)] = state;
5906
5907 if (x == bb->end)
5908 break;
5909 }
5910 }
5911
5912 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5913 {
5914 int did_output;
5915 basic_block bb;
5916
5917 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5918 {
5919 fprintf (outf, ";; Start of basic block %d, registers live:",
5920 bb->index);
5921 dump_regset (bb->global_live_at_start, outf);
5922 putc ('\n', outf);
5923 }
5924
5925 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5926 && GET_CODE (tmp_rtx) != NOTE
5927 && GET_CODE (tmp_rtx) != BARRIER)
5928 fprintf (outf, ";; Insn is not within a basic block\n");
5929 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5930 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5931
5932 did_output = print_rtl_single (outf, tmp_rtx);
5933
5934 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5935 {
5936 fprintf (outf, ";; End of basic block %d, registers live:\n",
5937 bb->index);
5938 dump_regset (bb->global_live_at_end, outf);
5939 putc ('\n', outf);
5940 }
5941
5942 if (did_output)
5943 putc ('\n', outf);
5944 }
5945
5946 free (start);
5947 free (end);
5948 free (in_bb_p);
5949 }
5950
5951 if (current_function_epilogue_delay_list != 0)
5952 {
5953 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5954 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5955 tmp_rtx = XEXP (tmp_rtx, 1))
5956 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5957 }
5958 }
5959
5960 /* Compute dominator relationships using new flow graph structures. */
5961 void
5962 compute_flow_dominators (dominators, post_dominators)
5963 sbitmap *dominators;
5964 sbitmap *post_dominators;
5965 {
5966 int bb;
5967 sbitmap *temp_bitmap;
5968 edge e;
5969 basic_block *worklist, *workend, *qin, *qout;
5970 int qlen;
5971
5972 /* Allocate a worklist array/queue. Entries are only added to the
5973 list if they were not already on the list. So the size is
5974 bounded by the number of basic blocks. */
5975 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5976 workend = &worklist[n_basic_blocks];
5977
5978 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5979 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5980
5981 if (dominators)
5982 {
5983 /* The optimistic setting of dominators requires us to put every
5984 block on the work list initially. */
5985 qin = qout = worklist;
5986 for (bb = 0; bb < n_basic_blocks; bb++)
5987 {
5988 *qin++ = BASIC_BLOCK (bb);
5989 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5990 }
5991 qlen = n_basic_blocks;
5992 qin = worklist;
5993
5994 /* We want a maximal solution, so initially assume everything dominates
5995 everything else. */
5996 sbitmap_vector_ones (dominators, n_basic_blocks);
5997
5998 /* Mark successors of the entry block so we can identify them below. */
5999 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6000 e->dest->aux = ENTRY_BLOCK_PTR;
6001
6002 /* Iterate until the worklist is empty. */
6003 while (qlen)
6004 {
6005 /* Take the first entry off the worklist. */
6006 basic_block b = *qout++;
6007 if (qout >= workend)
6008 qout = worklist;
6009 qlen--;
6010
6011 bb = b->index;
6012
6013 /* Compute the intersection of the dominators of all the
6014 predecessor blocks.
6015
6016 If one of the predecessor blocks is the ENTRY block, then the
6017 intersection of the dominators of the predecessor blocks is
6018 defined as the null set. We can identify such blocks by the
6019 special value in the AUX field in the block structure. */
6020 if (b->aux == ENTRY_BLOCK_PTR)
6021 {
6022 /* Do not clear the aux field for blocks which are
6023 successors of the ENTRY block. That way we we never
6024 add them to the worklist again.
6025
6026 The intersect of dominators of the preds of this block is
6027 defined as the null set. */
6028 sbitmap_zero (temp_bitmap[bb]);
6029 }
6030 else
6031 {
6032 /* Clear the aux field of this block so it can be added to
6033 the worklist again if necessary. */
6034 b->aux = NULL;
6035 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6036 }
6037
6038 /* Make sure each block always dominates itself. */
6039 SET_BIT (temp_bitmap[bb], bb);
6040
6041 /* If the out state of this block changed, then we need to
6042 add the successors of this block to the worklist if they
6043 are not already on the worklist. */
6044 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6045 {
6046 for (e = b->succ; e; e = e->succ_next)
6047 {
6048 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6049 {
6050 *qin++ = e->dest;
6051 if (qin >= workend)
6052 qin = worklist;
6053 qlen++;
6054
6055 e->dest->aux = e;
6056 }
6057 }
6058 }
6059 }
6060 }
6061
6062 if (post_dominators)
6063 {
6064 /* The optimistic setting of dominators requires us to put every
6065 block on the work list initially. */
6066 qin = qout = worklist;
6067 for (bb = 0; bb < n_basic_blocks; bb++)
6068 {
6069 *qin++ = BASIC_BLOCK (bb);
6070 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6071 }
6072 qlen = n_basic_blocks;
6073 qin = worklist;
6074
6075 /* We want a maximal solution, so initially assume everything post
6076 dominates everything else. */
6077 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6078
6079 /* Mark predecessors of the exit block so we can identify them below. */
6080 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6081 e->src->aux = EXIT_BLOCK_PTR;
6082
6083 /* Iterate until the worklist is empty. */
6084 while (qlen)
6085 {
6086 /* Take the first entry off the worklist. */
6087 basic_block b = *qout++;
6088 if (qout >= workend)
6089 qout = worklist;
6090 qlen--;
6091
6092 bb = b->index;
6093
6094 /* Compute the intersection of the post dominators of all the
6095 successor blocks.
6096
6097 If one of the successor blocks is the EXIT block, then the
6098 intersection of the dominators of the successor blocks is
6099 defined as the null set. We can identify such blocks by the
6100 special value in the AUX field in the block structure. */
6101 if (b->aux == EXIT_BLOCK_PTR)
6102 {
6103 /* Do not clear the aux field for blocks which are
6104 predecessors of the EXIT block. That way we we never
6105 add them to the worklist again.
6106
6107 The intersect of dominators of the succs of this block is
6108 defined as the null set. */
6109 sbitmap_zero (temp_bitmap[bb]);
6110 }
6111 else
6112 {
6113 /* Clear the aux field of this block so it can be added to
6114 the worklist again if necessary. */
6115 b->aux = NULL;
6116 sbitmap_intersection_of_succs (temp_bitmap[bb],
6117 post_dominators, bb);
6118 }
6119
6120 /* Make sure each block always post dominates itself. */
6121 SET_BIT (temp_bitmap[bb], bb);
6122
6123 /* If the out state of this block changed, then we need to
6124 add the successors of this block to the worklist if they
6125 are not already on the worklist. */
6126 if (sbitmap_a_and_b (post_dominators[bb],
6127 post_dominators[bb],
6128 temp_bitmap[bb]))
6129 {
6130 for (e = b->pred; e; e = e->pred_next)
6131 {
6132 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6133 {
6134 *qin++ = e->src;
6135 if (qin >= workend)
6136 qin = worklist;
6137 qlen++;
6138
6139 e->src->aux = e;
6140 }
6141 }
6142 }
6143 }
6144 }
6145
6146 free (worklist);
6147 free (temp_bitmap);
6148 }
6149
6150 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6151
6152 void
6153 compute_immediate_dominators (idom, dominators)
6154 int *idom;
6155 sbitmap *dominators;
6156 {
6157 sbitmap *tmp;
6158 int b;
6159
6160 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6161
6162 /* Begin with tmp(n) = dom(n) - { n }. */
6163 for (b = n_basic_blocks; --b >= 0; )
6164 {
6165 sbitmap_copy (tmp[b], dominators[b]);
6166 RESET_BIT (tmp[b], b);
6167 }
6168
6169 /* Subtract out all of our dominator's dominators. */
6170 for (b = n_basic_blocks; --b >= 0; )
6171 {
6172 sbitmap tmp_b = tmp[b];
6173 int s;
6174
6175 for (s = n_basic_blocks; --s >= 0; )
6176 if (TEST_BIT (tmp_b, s))
6177 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6178 }
6179
6180 /* Find the one bit set in the bitmap and put it in the output array. */
6181 for (b = n_basic_blocks; --b >= 0; )
6182 {
6183 int t;
6184 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6185 }
6186
6187 sbitmap_vector_free (tmp);
6188 }
6189
6190 /* Recompute register set/reference counts immediately prior to register
6191 allocation.
6192
6193 This avoids problems with set/reference counts changing to/from values
6194 which have special meanings to the register allocators.
6195
6196 Additionally, the reference counts are the primary component used by the
6197 register allocators to prioritize pseudos for allocation to hard regs.
6198 More accurate reference counts generally lead to better register allocation.
6199
6200 F is the first insn to be scanned.
6201
6202 LOOP_STEP denotes how much loop_depth should be incremented per
6203 loop nesting level in order to increase the ref count more for
6204 references in a loop.
6205
6206 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6207 possibly other information which is used by the register allocators. */
6208
6209 void
6210 recompute_reg_usage (f, loop_step)
6211 rtx f ATTRIBUTE_UNUSED;
6212 int loop_step ATTRIBUTE_UNUSED;
6213 {
6214 allocate_reg_life_data ();
6215 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6216 }
6217
6218 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6219 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6220 of the number of registers that died. */
6221
6222 int
6223 count_or_remove_death_notes (blocks, kill)
6224 sbitmap blocks;
6225 int kill;
6226 {
6227 int i, count = 0;
6228
6229 for (i = n_basic_blocks - 1; i >= 0; --i)
6230 {
6231 basic_block bb;
6232 rtx insn;
6233
6234 if (blocks && ! TEST_BIT (blocks, i))
6235 continue;
6236
6237 bb = BASIC_BLOCK (i);
6238
6239 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6240 {
6241 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6242 {
6243 rtx *pprev = &REG_NOTES (insn);
6244 rtx link = *pprev;
6245
6246 while (link)
6247 {
6248 switch (REG_NOTE_KIND (link))
6249 {
6250 case REG_DEAD:
6251 if (GET_CODE (XEXP (link, 0)) == REG)
6252 {
6253 rtx reg = XEXP (link, 0);
6254 int n;
6255
6256 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6257 n = 1;
6258 else
6259 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6260 count += n;
6261 }
6262 /* FALLTHRU */
6263
6264 case REG_UNUSED:
6265 if (kill)
6266 {
6267 rtx next = XEXP (link, 1);
6268 free_EXPR_LIST_node (link);
6269 *pprev = link = next;
6270 break;
6271 }
6272 /* FALLTHRU */
6273
6274 default:
6275 pprev = &XEXP (link, 1);
6276 link = *pprev;
6277 break;
6278 }
6279 }
6280 }
6281
6282 if (insn == bb->end)
6283 break;
6284 }
6285 }
6286
6287 return count;
6288 }
6289
6290 /* Record INSN's block as BB. */
6291
6292 void
6293 set_block_for_insn (insn, bb)
6294 rtx insn;
6295 basic_block bb;
6296 {
6297 size_t uid = INSN_UID (insn);
6298 if (uid >= basic_block_for_insn->num_elements)
6299 {
6300 int new_size;
6301
6302 /* Add one-eighth the size so we don't keep calling xrealloc. */
6303 new_size = uid + (uid + 7) / 8;
6304
6305 VARRAY_GROW (basic_block_for_insn, new_size);
6306 }
6307 VARRAY_BB (basic_block_for_insn, uid) = bb;
6308 }
6309
6310 /* Record INSN's block number as BB. */
6311 /* ??? This has got to go. */
6312
6313 void
6314 set_block_num (insn, bb)
6315 rtx insn;
6316 int bb;
6317 {
6318 set_block_for_insn (insn, BASIC_BLOCK (bb));
6319 }
6320 \f
6321 /* Verify the CFG consistency. This function check some CFG invariants and
6322 aborts when something is wrong. Hope that this function will help to
6323 convert many optimization passes to preserve CFG consistent.
6324
6325 Currently it does following checks:
6326
6327 - test head/end pointers
6328 - overlapping of basic blocks
6329 - edge list corectness
6330 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6331 - tails of basic blocks (ensure that boundary is necesary)
6332 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6333 and NOTE_INSN_BASIC_BLOCK
6334 - check that all insns are in the basic blocks
6335 (except the switch handling code, barriers and notes)
6336 - check that all returns are followed by barriers
6337
6338 In future it can be extended check a lot of other stuff as well
6339 (reachability of basic blocks, life information, etc. etc.). */
6340
6341 void
6342 verify_flow_info ()
6343 {
6344 const int max_uid = get_max_uid ();
6345 const rtx rtx_first = get_insns ();
6346 rtx last_head = get_last_insn ();
6347 basic_block *bb_info;
6348 rtx x;
6349 int i, last_bb_num_seen, num_bb_notes, err = 0;
6350
6351 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6352
6353 for (i = n_basic_blocks - 1; i >= 0; i--)
6354 {
6355 basic_block bb = BASIC_BLOCK (i);
6356 rtx head = bb->head;
6357 rtx end = bb->end;
6358
6359 /* Verify the end of the basic block is in the INSN chain. */
6360 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6361 if (x == end)
6362 break;
6363 if (!x)
6364 {
6365 error ("End insn %d for block %d not found in the insn stream.",
6366 INSN_UID (end), bb->index);
6367 err = 1;
6368 }
6369
6370 /* Work backwards from the end to the head of the basic block
6371 to verify the head is in the RTL chain. */
6372 for ( ; x != NULL_RTX; x = PREV_INSN (x))
6373 {
6374 /* While walking over the insn chain, verify insns appear
6375 in only one basic block and initialize the BB_INFO array
6376 used by other passes. */
6377 if (bb_info[INSN_UID (x)] != NULL)
6378 {
6379 error ("Insn %d is in multiple basic blocks (%d and %d)",
6380 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6381 err = 1;
6382 }
6383 bb_info[INSN_UID (x)] = bb;
6384
6385 if (x == head)
6386 break;
6387 }
6388 if (!x)
6389 {
6390 error ("Head insn %d for block %d not found in the insn stream.",
6391 INSN_UID (head), bb->index);
6392 err = 1;
6393 }
6394
6395 last_head = x;
6396 }
6397
6398 /* Now check the basic blocks (boundaries etc.) */
6399 for (i = n_basic_blocks - 1; i >= 0; i--)
6400 {
6401 basic_block bb = BASIC_BLOCK (i);
6402 /* Check corectness of edge lists */
6403 edge e;
6404
6405 e = bb->succ;
6406 while (e)
6407 {
6408 if (e->src != bb)
6409 {
6410 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6411 bb->index);
6412 fprintf (stderr, "Predecessor: ");
6413 dump_edge_info (stderr, e, 0);
6414 fprintf (stderr, "\nSuccessor: ");
6415 dump_edge_info (stderr, e, 1);
6416 fflush (stderr);
6417 err = 1;
6418 }
6419 if (e->dest != EXIT_BLOCK_PTR)
6420 {
6421 edge e2 = e->dest->pred;
6422 while (e2 && e2 != e)
6423 e2 = e2->pred_next;
6424 if (!e2)
6425 {
6426 error ("Basic block %i edge lists are corrupted", bb->index);
6427 err = 1;
6428 }
6429 }
6430 e = e->succ_next;
6431 }
6432
6433 e = bb->pred;
6434 while (e)
6435 {
6436 if (e->dest != bb)
6437 {
6438 error ("Basic block %d pred edge is corrupted", bb->index);
6439 fputs ("Predecessor: ", stderr);
6440 dump_edge_info (stderr, e, 0);
6441 fputs ("\nSuccessor: ", stderr);
6442 dump_edge_info (stderr, e, 1);
6443 fputc ('\n', stderr);
6444 err = 1;
6445 }
6446 if (e->src != ENTRY_BLOCK_PTR)
6447 {
6448 edge e2 = e->src->succ;
6449 while (e2 && e2 != e)
6450 e2 = e2->succ_next;
6451 if (!e2)
6452 {
6453 error ("Basic block %i edge lists are corrupted", bb->index);
6454 err = 1;
6455 }
6456 }
6457 e = e->pred_next;
6458 }
6459
6460 /* OK pointers are correct. Now check the header of basic
6461 block. It ought to contain optional CODE_LABEL followed
6462 by NOTE_BASIC_BLOCK. */
6463 x = bb->head;
6464 if (GET_CODE (x) == CODE_LABEL)
6465 {
6466 if (bb->end == x)
6467 {
6468 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6469 bb->index);
6470 err = 1;
6471 }
6472 x = NEXT_INSN (x);
6473 }
6474 if (GET_CODE (x) != NOTE
6475 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6476 || NOTE_BASIC_BLOCK (x) != bb)
6477 {
6478 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6479 bb->index);
6480 err = 1;
6481 }
6482
6483 if (bb->end == x)
6484 {
6485 /* Do checks for empty blocks here */
6486 }
6487 else
6488 {
6489 x = NEXT_INSN (x);
6490 while (x)
6491 {
6492 if (GET_CODE (x) == NOTE
6493 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6494 {
6495 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6496 INSN_UID (x), bb->index);
6497 err = 1;
6498 }
6499
6500 if (x == bb->end)
6501 break;
6502
6503 if (GET_CODE (x) == JUMP_INSN
6504 || GET_CODE (x) == CODE_LABEL
6505 || GET_CODE (x) == BARRIER)
6506 {
6507 error ("In basic block %d:", bb->index);
6508 fatal_insn ("Flow control insn inside a basic block", x);
6509 }
6510
6511 x = NEXT_INSN (x);
6512 }
6513 }
6514 }
6515
6516 last_bb_num_seen = -1;
6517 num_bb_notes = 0;
6518 x = rtx_first;
6519 while (x)
6520 {
6521 if (GET_CODE (x) == NOTE
6522 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6523 {
6524 basic_block bb = NOTE_BASIC_BLOCK (x);
6525 num_bb_notes++;
6526 if (bb->index != last_bb_num_seen + 1)
6527 fatal ("Basic blocks not numbered consecutively");
6528 last_bb_num_seen = bb->index;
6529 }
6530
6531 if (!bb_info[INSN_UID (x)])
6532 {
6533 switch (GET_CODE (x))
6534 {
6535 case BARRIER:
6536 case NOTE:
6537 break;
6538
6539 case CODE_LABEL:
6540 /* An addr_vec is placed outside any block block. */
6541 if (NEXT_INSN (x)
6542 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6543 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6544 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6545 {
6546 x = NEXT_INSN (x);
6547 }
6548
6549 /* But in any case, non-deletable labels can appear anywhere. */
6550 break;
6551
6552 default:
6553 fatal_insn ("Insn outside basic block", x);
6554 }
6555 }
6556
6557 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6558 && GET_CODE (x) == JUMP_INSN
6559 && returnjump_p (x) && ! condjump_p (x)
6560 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6561 fatal_insn ("Return not followed by barrier", x);
6562
6563 x = NEXT_INSN (x);
6564 }
6565
6566 if (num_bb_notes != n_basic_blocks)
6567 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6568 num_bb_notes, n_basic_blocks);
6569
6570 if (err)
6571 abort ();
6572
6573 /* Clean up. */
6574 free (bb_info);
6575 }
6576 \f
6577 /* Functions to access an edge list with a vector representation.
6578 Enough data is kept such that given an index number, the
6579 pred and succ that edge represents can be determined, or
6580 given a pred and a succ, its index number can be returned.
6581 This allows algorithms which consume a lot of memory to
6582 represent the normally full matrix of edge (pred,succ) with a
6583 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6584 wasted space in the client code due to sparse flow graphs. */
6585
6586 /* This functions initializes the edge list. Basically the entire
6587 flowgraph is processed, and all edges are assigned a number,
6588 and the data structure is filled in. */
6589 struct edge_list *
6590 create_edge_list ()
6591 {
6592 struct edge_list *elist;
6593 edge e;
6594 int num_edges;
6595 int x;
6596 int block_count;
6597
6598 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6599
6600 num_edges = 0;
6601
6602 /* Determine the number of edges in the flow graph by counting successor
6603 edges on each basic block. */
6604 for (x = 0; x < n_basic_blocks; x++)
6605 {
6606 basic_block bb = BASIC_BLOCK (x);
6607
6608 for (e = bb->succ; e; e = e->succ_next)
6609 num_edges++;
6610 }
6611 /* Don't forget successors of the entry block. */
6612 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6613 num_edges++;
6614
6615 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6616 elist->num_blocks = block_count;
6617 elist->num_edges = num_edges;
6618 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6619
6620 num_edges = 0;
6621
6622 /* Follow successors of the entry block, and register these edges. */
6623 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6624 {
6625 elist->index_to_edge[num_edges] = e;
6626 num_edges++;
6627 }
6628
6629 for (x = 0; x < n_basic_blocks; x++)
6630 {
6631 basic_block bb = BASIC_BLOCK (x);
6632
6633 /* Follow all successors of blocks, and register these edges. */
6634 for (e = bb->succ; e; e = e->succ_next)
6635 {
6636 elist->index_to_edge[num_edges] = e;
6637 num_edges++;
6638 }
6639 }
6640 return elist;
6641 }
6642
6643 /* This function free's memory associated with an edge list. */
6644 void
6645 free_edge_list (elist)
6646 struct edge_list *elist;
6647 {
6648 if (elist)
6649 {
6650 free (elist->index_to_edge);
6651 free (elist);
6652 }
6653 }
6654
6655 /* This function provides debug output showing an edge list. */
6656 void
6657 print_edge_list (f, elist)
6658 FILE *f;
6659 struct edge_list *elist;
6660 {
6661 int x;
6662 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6663 elist->num_blocks - 2, elist->num_edges);
6664
6665 for (x = 0; x < elist->num_edges; x++)
6666 {
6667 fprintf (f, " %-4d - edge(", x);
6668 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6669 fprintf (f,"entry,");
6670 else
6671 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6672
6673 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6674 fprintf (f,"exit)\n");
6675 else
6676 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6677 }
6678 }
6679
6680 /* This function provides an internal consistency check of an edge list,
6681 verifying that all edges are present, and that there are no
6682 extra edges. */
6683 void
6684 verify_edge_list (f, elist)
6685 FILE *f;
6686 struct edge_list *elist;
6687 {
6688 int x, pred, succ, index;
6689 edge e;
6690
6691 for (x = 0; x < n_basic_blocks; x++)
6692 {
6693 basic_block bb = BASIC_BLOCK (x);
6694
6695 for (e = bb->succ; e; e = e->succ_next)
6696 {
6697 pred = e->src->index;
6698 succ = e->dest->index;
6699 index = EDGE_INDEX (elist, e->src, e->dest);
6700 if (index == EDGE_INDEX_NO_EDGE)
6701 {
6702 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6703 continue;
6704 }
6705 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6706 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6707 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6708 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6709 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6710 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6711 }
6712 }
6713 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6714 {
6715 pred = e->src->index;
6716 succ = e->dest->index;
6717 index = EDGE_INDEX (elist, e->src, e->dest);
6718 if (index == EDGE_INDEX_NO_EDGE)
6719 {
6720 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6721 continue;
6722 }
6723 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6724 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6725 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6726 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6727 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6728 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6729 }
6730 /* We've verified that all the edges are in the list, no lets make sure
6731 there are no spurious edges in the list. */
6732
6733 for (pred = 0 ; pred < n_basic_blocks; pred++)
6734 for (succ = 0 ; succ < n_basic_blocks; succ++)
6735 {
6736 basic_block p = BASIC_BLOCK (pred);
6737 basic_block s = BASIC_BLOCK (succ);
6738
6739 int found_edge = 0;
6740
6741 for (e = p->succ; e; e = e->succ_next)
6742 if (e->dest == s)
6743 {
6744 found_edge = 1;
6745 break;
6746 }
6747 for (e = s->pred; e; e = e->pred_next)
6748 if (e->src == p)
6749 {
6750 found_edge = 1;
6751 break;
6752 }
6753 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6754 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6755 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6756 pred, succ);
6757 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6758 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6759 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6760 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6761 BASIC_BLOCK (succ)));
6762 }
6763 for (succ = 0 ; succ < n_basic_blocks; succ++)
6764 {
6765 basic_block p = ENTRY_BLOCK_PTR;
6766 basic_block s = BASIC_BLOCK (succ);
6767
6768 int found_edge = 0;
6769
6770 for (e = p->succ; e; e = e->succ_next)
6771 if (e->dest == s)
6772 {
6773 found_edge = 1;
6774 break;
6775 }
6776 for (e = s->pred; e; e = e->pred_next)
6777 if (e->src == p)
6778 {
6779 found_edge = 1;
6780 break;
6781 }
6782 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6783 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6784 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6785 succ);
6786 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6787 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6788 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6789 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6790 BASIC_BLOCK (succ)));
6791 }
6792 for (pred = 0 ; pred < n_basic_blocks; pred++)
6793 {
6794 basic_block p = BASIC_BLOCK (pred);
6795 basic_block s = EXIT_BLOCK_PTR;
6796
6797 int found_edge = 0;
6798
6799 for (e = p->succ; e; e = e->succ_next)
6800 if (e->dest == s)
6801 {
6802 found_edge = 1;
6803 break;
6804 }
6805 for (e = s->pred; e; e = e->pred_next)
6806 if (e->src == p)
6807 {
6808 found_edge = 1;
6809 break;
6810 }
6811 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6812 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6813 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6814 pred);
6815 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6816 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6817 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6818 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6819 EXIT_BLOCK_PTR));
6820 }
6821 }
6822
6823 /* This routine will determine what, if any, edge there is between
6824 a specified predecessor and successor. */
6825
6826 int
6827 find_edge_index (edge_list, pred, succ)
6828 struct edge_list *edge_list;
6829 basic_block pred, succ;
6830 {
6831 int x;
6832 for (x = 0; x < NUM_EDGES (edge_list); x++)
6833 {
6834 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6835 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6836 return x;
6837 }
6838 return (EDGE_INDEX_NO_EDGE);
6839 }
6840
6841 /* This function will remove an edge from the flow graph. */
6842 void
6843 remove_edge (e)
6844 edge e;
6845 {
6846 edge last_pred = NULL;
6847 edge last_succ = NULL;
6848 edge tmp;
6849 basic_block src, dest;
6850 src = e->src;
6851 dest = e->dest;
6852 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6853 last_succ = tmp;
6854
6855 if (!tmp)
6856 abort ();
6857 if (last_succ)
6858 last_succ->succ_next = e->succ_next;
6859 else
6860 src->succ = e->succ_next;
6861
6862 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6863 last_pred = tmp;
6864
6865 if (!tmp)
6866 abort ();
6867 if (last_pred)
6868 last_pred->pred_next = e->pred_next;
6869 else
6870 dest->pred = e->pred_next;
6871
6872 n_edges--;
6873 free (e);
6874 }
6875
6876 /* This routine will remove any fake successor edges for a basic block.
6877 When the edge is removed, it is also removed from whatever predecessor
6878 list it is in. */
6879 static void
6880 remove_fake_successors (bb)
6881 basic_block bb;
6882 {
6883 edge e;
6884 for (e = bb->succ; e ; )
6885 {
6886 edge tmp = e;
6887 e = e->succ_next;
6888 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6889 remove_edge (tmp);
6890 }
6891 }
6892
6893 /* This routine will remove all fake edges from the flow graph. If
6894 we remove all fake successors, it will automatically remove all
6895 fake predecessors. */
6896 void
6897 remove_fake_edges ()
6898 {
6899 int x;
6900
6901 for (x = 0; x < n_basic_blocks; x++)
6902 remove_fake_successors (BASIC_BLOCK (x));
6903
6904 /* We've handled all successors except the entry block's. */
6905 remove_fake_successors (ENTRY_BLOCK_PTR);
6906 }
6907
6908 /* This function will add a fake edge between any block which has no
6909 successors, and the exit block. Some data flow equations require these
6910 edges to exist. */
6911 void
6912 add_noreturn_fake_exit_edges ()
6913 {
6914 int x;
6915
6916 for (x = 0; x < n_basic_blocks; x++)
6917 if (BASIC_BLOCK (x)->succ == NULL)
6918 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6919 }
6920
6921 /* Redirect an edge's successor from one block to another. */
6922
6923 void
6924 redirect_edge_succ (e, new_succ)
6925 edge e;
6926 basic_block new_succ;
6927 {
6928 edge *pe;
6929
6930 /* Disconnect the edge from the old successor block. */
6931 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
6932 continue;
6933 *pe = (*pe)->pred_next;
6934
6935 /* Reconnect the edge to the new successor block. */
6936 e->pred_next = new_succ->pred;
6937 new_succ->pred = e;
6938 e->dest = new_succ;
6939 }
6940
6941 /* Redirect an edge's predecessor from one block to another. */
6942
6943 void
6944 redirect_edge_pred (e, new_pred)
6945 edge e;
6946 basic_block new_pred;
6947 {
6948 edge *pe;
6949
6950 /* Disconnect the edge from the old predecessor block. */
6951 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
6952 continue;
6953 *pe = (*pe)->succ_next;
6954
6955 /* Reconnect the edge to the new predecessor block. */
6956 e->succ_next = new_pred->succ;
6957 new_pred->succ = e;
6958 e->src = new_pred;
6959 }
6960 \f
6961 /* Dump the list of basic blocks in the bitmap NODES. */
6962 static void
6963 flow_nodes_print (str, nodes, file)
6964 const char *str;
6965 const sbitmap nodes;
6966 FILE *file;
6967 {
6968 int node;
6969
6970 fprintf (file, "%s { ", str);
6971 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6972 fputs ("}\n", file);
6973 }
6974
6975
6976 /* Dump the list of exiting edges in the array EDGES. */
6977 static void
6978 flow_exits_print (str, edges, num_edges, file)
6979 const char *str;
6980 const edge *edges;
6981 int num_edges;
6982 FILE *file;
6983 {
6984 int i;
6985
6986 fprintf (file, "%s { ", str);
6987 for (i = 0; i < num_edges; i++)
6988 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6989 fputs ("}\n", file);
6990 }
6991
6992
6993 /* Dump loop related CFG information. */
6994 static void
6995 flow_loops_cfg_dump (loops, file)
6996 const struct loops *loops;
6997 FILE *file;
6998 {
6999 int i;
7000
7001 if (! loops->num || ! file || ! loops->cfg.dom)
7002 return;
7003
7004 for (i = 0; i < n_basic_blocks; i++)
7005 {
7006 edge succ;
7007
7008 fprintf (file, ";; %d succs { ", i);
7009 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7010 fprintf (file, "%d ", succ->dest->index);
7011 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7012 }
7013
7014
7015 /* Dump the DFS node order. */
7016 if (loops->cfg.dfs_order)
7017 {
7018 fputs (";; DFS order: ", file);
7019 for (i = 0; i < n_basic_blocks; i++)
7020 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7021 fputs ("\n", file);
7022 }
7023 }
7024
7025
7026 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7027 static int
7028 flow_loop_nested_p (outer, loop)
7029 struct loop *outer;
7030 struct loop *loop;
7031 {
7032 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7033 }
7034
7035
7036 /* Dump the loop information specified by LOOPS to the stream FILE. */
7037 void
7038 flow_loops_dump (loops, file, verbose)
7039 const struct loops *loops;
7040 FILE *file;
7041 int verbose;
7042 {
7043 int i;
7044 int num_loops;
7045
7046 num_loops = loops->num;
7047 if (! num_loops || ! file)
7048 return;
7049
7050 fprintf (file, ";; %d loops found, %d levels\n",
7051 num_loops, loops->levels);
7052
7053 for (i = 0; i < num_loops; i++)
7054 {
7055 struct loop *loop = &loops->array[i];
7056
7057 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7058 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7059 loop->header->index, loop->latch->index,
7060 loop->pre_header ? loop->pre_header->index : -1,
7061 loop->depth, loop->level,
7062 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7063 fprintf (file, ";; %d", loop->num_nodes);
7064 flow_nodes_print (" nodes", loop->nodes, file);
7065 fprintf (file, ";; %d", loop->num_exits);
7066 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7067
7068 if (loop->shared)
7069 {
7070 int j;
7071
7072 for (j = 0; j < i; j++)
7073 {
7074 struct loop *oloop = &loops->array[j];
7075
7076 if (loop->header == oloop->header)
7077 {
7078 int disjoint;
7079 int smaller;
7080
7081 smaller = loop->num_nodes < oloop->num_nodes;
7082
7083 /* If the union of LOOP and OLOOP is different than
7084 the larger of LOOP and OLOOP then LOOP and OLOOP
7085 must be disjoint. */
7086 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7087 smaller ? oloop : loop);
7088 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
7089 loop->header->index, i, j,
7090 disjoint ? "disjoint" : "nested");
7091 }
7092 }
7093 }
7094
7095 if (verbose)
7096 {
7097 /* Print diagnostics to compare our concept of a loop with
7098 what the loop notes say. */
7099 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7100 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7101 != NOTE_INSN_LOOP_BEG)
7102 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7103 INSN_UID (PREV_INSN (loop->first->head)));
7104 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7105 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7106 != NOTE_INSN_LOOP_END)
7107 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7108 INSN_UID (NEXT_INSN (loop->last->end)));
7109 }
7110 }
7111
7112 if (verbose)
7113 flow_loops_cfg_dump (loops, file);
7114 }
7115
7116
7117 /* Free all the memory allocated for LOOPS. */
7118 void
7119 flow_loops_free (loops)
7120 struct loops *loops;
7121 {
7122 if (loops->array)
7123 {
7124 int i;
7125
7126 if (! loops->num)
7127 abort ();
7128
7129 /* Free the loop descriptors. */
7130 for (i = 0; i < loops->num; i++)
7131 {
7132 struct loop *loop = &loops->array[i];
7133
7134 if (loop->nodes)
7135 sbitmap_free (loop->nodes);
7136 if (loop->exits)
7137 free (loop->exits);
7138 }
7139 free (loops->array);
7140 loops->array = NULL;
7141
7142 if (loops->cfg.dom)
7143 sbitmap_vector_free (loops->cfg.dom);
7144 if (loops->cfg.dfs_order)
7145 free (loops->cfg.dfs_order);
7146
7147 sbitmap_free (loops->shared_headers);
7148 }
7149 }
7150
7151
7152 /* Find the exits from the loop using the bitmap of loop nodes NODES
7153 and store in EXITS array. Return the number of exits from the
7154 loop. */
7155 static int
7156 flow_loop_exits_find (nodes, exits)
7157 const sbitmap nodes;
7158 edge **exits;
7159 {
7160 edge e;
7161 int node;
7162 int num_exits;
7163
7164 *exits = NULL;
7165
7166 /* Check all nodes within the loop to see if there are any
7167 successors not in the loop. Note that a node may have multiple
7168 exiting edges. */
7169 num_exits = 0;
7170 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7171 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7172 {
7173 basic_block dest = e->dest;
7174
7175 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7176 num_exits++;
7177 }
7178 });
7179
7180 if (! num_exits)
7181 return 0;
7182
7183 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7184
7185 /* Store all exiting edges into an array. */
7186 num_exits = 0;
7187 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7188 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7189 {
7190 basic_block dest = e->dest;
7191
7192 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7193 (*exits)[num_exits++] = e;
7194 }
7195 });
7196
7197 return num_exits;
7198 }
7199
7200
7201 /* Find the nodes contained within the loop with header HEADER and
7202 latch LATCH and store in NODES. Return the number of nodes within
7203 the loop. */
7204 static int
7205 flow_loop_nodes_find (header, latch, nodes)
7206 basic_block header;
7207 basic_block latch;
7208 sbitmap nodes;
7209 {
7210 basic_block *stack;
7211 int sp;
7212 int num_nodes = 0;
7213
7214 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7215 sp = 0;
7216
7217 /* Start with only the loop header in the set of loop nodes. */
7218 sbitmap_zero (nodes);
7219 SET_BIT (nodes, header->index);
7220 num_nodes++;
7221 header->loop_depth++;
7222
7223 /* Push the loop latch on to the stack. */
7224 if (! TEST_BIT (nodes, latch->index))
7225 {
7226 SET_BIT (nodes, latch->index);
7227 latch->loop_depth++;
7228 num_nodes++;
7229 stack[sp++] = latch;
7230 }
7231
7232 while (sp)
7233 {
7234 basic_block node;
7235 edge e;
7236
7237 node = stack[--sp];
7238 for (e = node->pred; e; e = e->pred_next)
7239 {
7240 basic_block ancestor = e->src;
7241
7242 /* If each ancestor not marked as part of loop, add to set of
7243 loop nodes and push on to stack. */
7244 if (ancestor != ENTRY_BLOCK_PTR
7245 && ! TEST_BIT (nodes, ancestor->index))
7246 {
7247 SET_BIT (nodes, ancestor->index);
7248 ancestor->loop_depth++;
7249 num_nodes++;
7250 stack[sp++] = ancestor;
7251 }
7252 }
7253 }
7254 free (stack);
7255 return num_nodes;
7256 }
7257
7258
7259 /* Compute the depth first search order and store in the array
7260 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7261 number of nodes visited. A depth first search tries to get as far
7262 away from the starting point as quickly as possible. */
7263 static int
7264 flow_depth_first_order_compute (dfs_order)
7265 int *dfs_order;
7266 {
7267 edge *stack;
7268 int sp;
7269 int dfsnum = 0;
7270 sbitmap visited;
7271
7272 /* Allocate stack for back-tracking up CFG. */
7273 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7274 sp = 0;
7275
7276 /* Allocate bitmap to track nodes that have been visited. */
7277 visited = sbitmap_alloc (n_basic_blocks);
7278
7279 /* None of the nodes in the CFG have been visited yet. */
7280 sbitmap_zero (visited);
7281
7282 /* Push the first edge on to the stack. */
7283 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7284
7285 while (sp)
7286 {
7287 edge e;
7288 basic_block src;
7289 basic_block dest;
7290
7291 /* Look at the edge on the top of the stack. */
7292 e = stack[sp - 1];
7293 src = e->src;
7294 dest = e->dest;
7295
7296 /* Check if the edge destination has been visited yet. */
7297 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7298 {
7299 /* Mark that we have visited the destination. */
7300 SET_BIT (visited, dest->index);
7301
7302 if (dest->succ)
7303 {
7304 /* Since the DEST node has been visited for the first
7305 time, check its successors. */
7306 stack[sp++] = dest->succ;
7307 }
7308 else
7309 {
7310 /* There are no successors for the DEST node so assign
7311 its DFS number. */
7312 dfs_order[n_basic_blocks - ++dfsnum] = dest->index;
7313 }
7314 }
7315 else
7316 {
7317 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7318 {
7319 /* There are no more successors for the SRC node
7320 so assign its DFS number. */
7321 dfs_order[n_basic_blocks - ++dfsnum] = src->index;
7322 }
7323
7324 if (e->succ_next)
7325 stack[sp - 1] = e->succ_next;
7326 else
7327 sp--;
7328 }
7329 }
7330
7331 free (stack);
7332 sbitmap_free (visited);
7333
7334 /* The number of nodes visited should not be greater than
7335 n_basic_blocks. */
7336 if (dfsnum > n_basic_blocks)
7337 abort ();
7338
7339 /* There are some nodes left in the CFG that are unreachable. */
7340 if (dfsnum < n_basic_blocks)
7341 abort ();
7342 return dfsnum;
7343 }
7344
7345
7346 /* Return the block for the pre-header of the loop with header
7347 HEADER where DOM specifies the dominator information. Return NULL if
7348 there is no pre-header. */
7349 static basic_block
7350 flow_loop_pre_header_find (header, dom)
7351 basic_block header;
7352 const sbitmap *dom;
7353 {
7354 basic_block pre_header;
7355 edge e;
7356
7357 /* If block p is a predecessor of the header and is the only block
7358 that the header does not dominate, then it is the pre-header. */
7359 pre_header = NULL;
7360 for (e = header->pred; e; e = e->pred_next)
7361 {
7362 basic_block node = e->src;
7363
7364 if (node != ENTRY_BLOCK_PTR
7365 && ! TEST_BIT (dom[node->index], header->index))
7366 {
7367 if (pre_header == NULL)
7368 pre_header = node;
7369 else
7370 {
7371 /* There are multiple edges into the header from outside
7372 the loop so there is no pre-header block. */
7373 pre_header = NULL;
7374 break;
7375 }
7376 }
7377 }
7378 return pre_header;
7379 }
7380
7381
7382 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7383 previously added. The insertion algorithm assumes that the loops
7384 are added in the order found by a depth first search of the CFG. */
7385 static void
7386 flow_loop_tree_node_add (prevloop, loop)
7387 struct loop *prevloop;
7388 struct loop *loop;
7389 {
7390
7391 if (flow_loop_nested_p (prevloop, loop))
7392 {
7393 prevloop->inner = loop;
7394 loop->outer = prevloop;
7395 return;
7396 }
7397
7398 while (prevloop->outer)
7399 {
7400 if (flow_loop_nested_p (prevloop->outer, loop))
7401 {
7402 prevloop->next = loop;
7403 loop->outer = prevloop->outer;
7404 return;
7405 }
7406 prevloop = prevloop->outer;
7407 }
7408
7409 prevloop->next = loop;
7410 loop->outer = NULL;
7411 }
7412
7413
7414 /* Build the loop hierarchy tree for LOOPS. */
7415 static void
7416 flow_loops_tree_build (loops)
7417 struct loops *loops;
7418 {
7419 int i;
7420 int num_loops;
7421
7422 num_loops = loops->num;
7423 if (! num_loops)
7424 return;
7425
7426 /* Root the loop hierarchy tree with the first loop found.
7427 Since we used a depth first search this should be the
7428 outermost loop. */
7429 loops->tree = &loops->array[0];
7430 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7431
7432 /* Add the remaining loops to the tree. */
7433 for (i = 1; i < num_loops; i++)
7434 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7435 }
7436
7437
7438 /* Helper function to compute loop nesting depth and enclosed loop level
7439 for the natural loop specified by LOOP at the loop depth DEPTH.
7440 Returns the loop level. */
7441 static int
7442 flow_loop_level_compute (loop, depth)
7443 struct loop *loop;
7444 int depth;
7445 {
7446 struct loop *inner;
7447 int level = 1;
7448
7449 if (! loop)
7450 return 0;
7451
7452 /* Traverse loop tree assigning depth and computing level as the
7453 maximum level of all the inner loops of this loop. The loop
7454 level is equivalent to the height of the loop in the loop tree
7455 and corresponds to the number of enclosed loop levels (including
7456 itself). */
7457 for (inner = loop->inner; inner; inner = inner->next)
7458 {
7459 int ilevel;
7460
7461 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7462
7463 if (ilevel > level)
7464 level = ilevel;
7465 }
7466 loop->level = level;
7467 loop->depth = depth;
7468 return level;
7469 }
7470
7471
7472 /* Compute the loop nesting depth and enclosed loop level for the loop
7473 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7474 level. */
7475
7476 static int
7477 flow_loops_level_compute (loops)
7478 struct loops *loops;
7479 {
7480 struct loop *loop;
7481 int level;
7482 int levels = 0;
7483
7484 /* Traverse all the outer level loops. */
7485 for (loop = loops->tree; loop; loop = loop->next)
7486 {
7487 level = flow_loop_level_compute (loop, 1);
7488 if (level > levels)
7489 levels = level;
7490 }
7491 return levels;
7492 }
7493
7494
7495 /* Find all the natural loops in the function and save in LOOPS structure
7496 and recalculate loop_depth information in basic block structures.
7497 Return the number of natural loops found. */
7498
7499 int
7500 flow_loops_find (loops)
7501 struct loops *loops;
7502 {
7503 int i;
7504 int b;
7505 int num_loops;
7506 edge e;
7507 sbitmap headers;
7508 sbitmap *dom;
7509 int *dfs_order;
7510
7511 loops->num = 0;
7512 loops->array = NULL;
7513 loops->tree = NULL;
7514 dfs_order = NULL;
7515
7516 /* Taking care of this degenerate case makes the rest of
7517 this code simpler. */
7518 if (n_basic_blocks == 0)
7519 return 0;
7520
7521 /* Compute the dominators. */
7522 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7523 compute_flow_dominators (dom, NULL);
7524
7525 /* Count the number of loop edges (back edges). This should be the
7526 same as the number of natural loops. Also clear the loop_depth
7527 and as we work from inner->outer in a loop nest we call
7528 find_loop_nodes_find which will increment loop_depth for nodes
7529 within the current loop, which happens to enclose inner loops. */
7530
7531 num_loops = 0;
7532 for (b = 0; b < n_basic_blocks; b++)
7533 {
7534 BASIC_BLOCK (b)->loop_depth = 0;
7535 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7536 {
7537 basic_block latch = e->src;
7538
7539 /* Look for back edges where a predecessor is dominated
7540 by this block. A natural loop has a single entry
7541 node (header) that dominates all the nodes in the
7542 loop. It also has single back edge to the header
7543 from a latch node. Note that multiple natural loops
7544 may share the same header. */
7545 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7546 num_loops++;
7547 }
7548 }
7549
7550 if (num_loops)
7551 {
7552 /* Compute depth first search order of the CFG so that outer
7553 natural loops will be found before inner natural loops. */
7554 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7555 flow_depth_first_order_compute (dfs_order);
7556
7557 /* Allocate loop structures. */
7558 loops->array
7559 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7560
7561 headers = sbitmap_alloc (n_basic_blocks);
7562 sbitmap_zero (headers);
7563
7564 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7565 sbitmap_zero (loops->shared_headers);
7566
7567 /* Find and record information about all the natural loops
7568 in the CFG. */
7569 num_loops = 0;
7570 for (b = 0; b < n_basic_blocks; b++)
7571 {
7572 basic_block header;
7573
7574 /* Search the nodes of the CFG in DFS order that we can find
7575 outer loops first. */
7576 header = BASIC_BLOCK (dfs_order[b]);
7577
7578 /* Look for all the possible latch blocks for this header. */
7579 for (e = header->pred; e; e = e->pred_next)
7580 {
7581 basic_block latch = e->src;
7582
7583 /* Look for back edges where a predecessor is dominated
7584 by this block. A natural loop has a single entry
7585 node (header) that dominates all the nodes in the
7586 loop. It also has single back edge to the header
7587 from a latch node. Note that multiple natural loops
7588 may share the same header. */
7589 if (latch != ENTRY_BLOCK_PTR
7590 && TEST_BIT (dom[latch->index], header->index))
7591 {
7592 struct loop *loop;
7593
7594 loop = loops->array + num_loops;
7595
7596 loop->header = header;
7597 loop->latch = latch;
7598
7599 /* Keep track of blocks that are loop headers so
7600 that we can tell which loops should be merged. */
7601 if (TEST_BIT (headers, header->index))
7602 SET_BIT (loops->shared_headers, header->index);
7603 SET_BIT (headers, header->index);
7604
7605 /* Find nodes contained within the loop. */
7606 loop->nodes = sbitmap_alloc (n_basic_blocks);
7607 loop->num_nodes
7608 = flow_loop_nodes_find (header, latch, loop->nodes);
7609
7610 /* Compute first and last blocks within the loop.
7611 These are often the same as the loop header and
7612 loop latch respectively, but this is not always
7613 the case. */
7614 loop->first
7615 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7616 loop->last
7617 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7618
7619 /* Find edges which exit the loop. Note that a node
7620 may have several exit edges. */
7621 loop->num_exits
7622 = flow_loop_exits_find (loop->nodes, &loop->exits);
7623
7624 /* Look to see if the loop has a pre-header node. */
7625 loop->pre_header
7626 = flow_loop_pre_header_find (header, dom);
7627
7628 num_loops++;
7629 }
7630 }
7631 }
7632
7633 /* Natural loops with shared headers may either be disjoint or
7634 nested. Disjoint loops with shared headers cannot be inner
7635 loops and should be merged. For now just mark loops that share
7636 headers. */
7637 for (i = 0; i < num_loops; i++)
7638 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7639 loops->array[i].shared = 1;
7640
7641 sbitmap_free (headers);
7642 }
7643
7644 loops->num = num_loops;
7645
7646 /* Save CFG derived information to avoid recomputing it. */
7647 loops->cfg.dom = dom;
7648 loops->cfg.dfs_order = dfs_order;
7649
7650 /* Build the loop hierarchy tree. */
7651 flow_loops_tree_build (loops);
7652
7653 /* Assign the loop nesting depth and enclosed loop level for each
7654 loop. */
7655 loops->levels = flow_loops_level_compute (loops);
7656
7657 return num_loops;
7658 }
7659
7660
7661 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7662
7663 int
7664 flow_loop_outside_edge_p (loop, e)
7665 const struct loop *loop;
7666 edge e;
7667 {
7668 if (e->dest != loop->header)
7669 abort ();
7670 return (e->src == ENTRY_BLOCK_PTR)
7671 || ! TEST_BIT (loop->nodes, e->src->index);
7672 }
7673
7674
7675 /* Clear LOG_LINKS fields of insns in a chain. */
7676
7677 void
7678 clear_log_links (insns)
7679 rtx insns;
7680 {
7681 rtx i;
7682 for (i = insns; i; i = NEXT_INSN (i))
7683 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7684 LOG_LINKS (i) = 0;
7685 }
7686
7687 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7688 correspond to the hard registers, if any, set in that map. This
7689 could be done far more efficiently by having all sorts of special-cases
7690 with moving single words, but probably isn't worth the trouble. */
7691
7692 void
7693 reg_set_to_hard_reg_set (to, from)
7694 HARD_REG_SET *to;
7695 bitmap from;
7696 {
7697 int i;
7698
7699 EXECUTE_IF_SET_IN_BITMAP
7700 (from, 0, i,
7701 {
7702 if (i >= FIRST_PSEUDO_REGISTER)
7703 return;
7704 SET_HARD_REG_BIT (*to, i);
7705 });
7706 }
7707