flow.c (commit_one_edge_insertion): Be prepared for a return insn to be inserted...
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
26
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
30
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
34
35 ** find_basic_blocks **
36
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
41
42 find_basic_blocks also finds any unreachable loops and deletes them.
43
44 ** life_analysis **
45
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
49
50 ** live-register info **
51
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
59
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
66
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
77
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
84
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
88
89 ** Other actions of life_analysis **
90
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
93
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
96
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
102
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
105
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
112
113 /* TODO:
114
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
120 */
121 \f
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
154
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
164
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
169
170 extern struct obstack *function_obstack;
171
172 /* Number of basic blocks in the current function. */
173
174 int n_basic_blocks;
175
176 /* Number of edges in the current function. */
177
178 int n_edges;
179
180 /* The basic block array. */
181
182 varray_type basic_block_info;
183
184 /* The special entry and exit blocks. */
185
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1 /* eh_beg, eh_end */
198 },
199 {
200 NULL, /* head */
201 NULL, /* end */
202 NULL, /* pred */
203 NULL, /* succ */
204 NULL, /* local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 EXIT_BLOCK, /* index */
209 0, /* loop_depth */
210 -1, -1 /* eh_beg, eh_end */
211 }
212 };
213
214 /* Nonzero if the second flow pass has completed. */
215 int flow2_completed;
216
217 /* Maximum register number used in this function, plus one. */
218
219 int max_regno;
220
221 /* Indexed by n, giving various register information */
222
223 varray_type reg_n_info;
224
225 /* Size of a regset for the current function,
226 in (1) bytes and (2) elements. */
227
228 int regset_bytes;
229 int regset_size;
230
231 /* Regset of regs live when calls to `setjmp'-like functions happen. */
232 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233
234 regset regs_live_at_setjmp;
235
236 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
237 that have to go in the same hard reg.
238 The first two regs in the list are a pair, and the next two
239 are another pair, etc. */
240 rtx regs_may_share;
241
242 /* Set of registers that may be eliminable. These are handled specially
243 in updating regs_ever_live. */
244
245 static HARD_REG_SET elim_reg_set;
246
247 /* The basic block structure for every insn, indexed by uid. */
248
249 varray_type basic_block_for_insn;
250
251 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
252 /* ??? Should probably be using LABEL_NUSES instead. It would take a
253 bit of surgery to be able to use or co-opt the routines in jump. */
254
255 static rtx label_value_list;
256 static rtx tail_recursion_label_list;
257
258 /* Holds information for tracking conditional register life information. */
259 struct reg_cond_life_info
260 {
261 /* An EXPR_LIST of conditions under which a register is dead. */
262 rtx condition;
263
264 /* ??? Could store mask of bytes that are dead, so that we could finally
265 track lifetimes of multi-word registers accessed via subregs. */
266 };
267
268 /* For use in communicating between propagate_block and its subroutines.
269 Holds all information needed to compute life and def-use information. */
270
271 struct propagate_block_info
272 {
273 /* The basic block we're considering. */
274 basic_block bb;
275
276 /* Bit N is set if register N is conditionally or unconditionally live. */
277 regset reg_live;
278
279 /* Bit N is set if register N is set this insn. */
280 regset new_set;
281
282 /* Element N is the next insn that uses (hard or pseudo) register N
283 within the current basic block; or zero, if there is no such insn. */
284 rtx *reg_next_use;
285
286 /* Contains a list of all the MEMs we are tracking for dead store
287 elimination. */
288 rtx mem_set_list;
289
290 /* If non-null, record the set of registers set in the basic block. */
291 regset local_set;
292
293 #ifdef HAVE_conditional_execution
294 /* Indexed by register number, holds a reg_cond_life_info for each
295 register that is not unconditionally live or dead. */
296 splay_tree reg_cond_dead;
297
298 /* Bit N is set if register N is in an expression in reg_cond_dead. */
299 regset reg_cond_reg;
300 #endif
301
302 /* Non-zero if the value of CC0 is live. */
303 int cc0_live;
304
305 /* Flags controling the set of information propagate_block collects. */
306 int flags;
307 };
308
309 /* Forward declarations */
310 static int count_basic_blocks PARAMS ((rtx));
311 static void find_basic_blocks_1 PARAMS ((rtx));
312 static void clear_edges PARAMS ((void));
313 static void make_edges PARAMS ((rtx));
314 static void make_label_edge PARAMS ((sbitmap *, basic_block,
315 rtx, int));
316 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
317 basic_block, rtx, int));
318 static void mark_critical_edges PARAMS ((void));
319 static void move_stray_eh_region_notes PARAMS ((void));
320 static void record_active_eh_regions PARAMS ((rtx));
321
322 static void commit_one_edge_insertion PARAMS ((edge));
323
324 static void delete_unreachable_blocks PARAMS ((void));
325 static void delete_eh_regions PARAMS ((void));
326 static int can_delete_note_p PARAMS ((rtx));
327 static void expunge_block PARAMS ((basic_block));
328 static int can_delete_label_p PARAMS ((rtx));
329 static int tail_recursion_label_p PARAMS ((rtx));
330 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
331 basic_block));
332 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
333 basic_block));
334 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
335 static void try_merge_blocks PARAMS ((void));
336 static void tidy_fallthru_edges PARAMS ((void));
337 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
338 static void verify_wide_reg PARAMS ((int, rtx, rtx));
339 static void verify_local_live_at_start PARAMS ((regset, basic_block));
340 static int set_noop_p PARAMS ((rtx));
341 static int noop_move_p PARAMS ((rtx));
342 static void delete_noop_moves PARAMS ((rtx));
343 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
344 static void notice_stack_pointer_modification PARAMS ((rtx));
345 static void mark_reg PARAMS ((rtx, void *));
346 static void mark_regs_live_at_end PARAMS ((regset));
347 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
348 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
349 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
350 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
351 static int insn_dead_p PARAMS ((struct propagate_block_info *,
352 rtx, int, rtx));
353 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
354 rtx, rtx, rtx));
355 static void mark_set_regs PARAMS ((struct propagate_block_info *,
356 rtx, rtx));
357 static void mark_set_1 PARAMS ((struct propagate_block_info *,
358 enum rtx_code, rtx, rtx,
359 rtx, int));
360 #ifdef HAVE_conditional_execution
361 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
362 int, rtx));
363 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
364 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
365 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
366 int));
367 static rtx ior_reg_cond PARAMS ((rtx, rtx));
368 static rtx not_reg_cond PARAMS ((rtx));
369 static rtx nand_reg_cond PARAMS ((rtx, rtx));
370 #endif
371 #ifdef AUTO_INC_DEC
372 static void find_auto_inc PARAMS ((struct propagate_block_info *,
373 rtx, rtx));
374 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
375 rtx));
376 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
377 #endif
378 static void mark_used_reg PARAMS ((struct propagate_block_info *,
379 rtx, rtx, rtx));
380 static void mark_used_regs PARAMS ((struct propagate_block_info *,
381 rtx, rtx, rtx));
382 void dump_flow_info PARAMS ((FILE *));
383 void debug_flow_info PARAMS ((void));
384 static void dump_edge_info PARAMS ((FILE *, edge, int));
385
386 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
387 rtx));
388 static void remove_fake_successors PARAMS ((basic_block));
389 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
390 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
391 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
392 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
393 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
394 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
395 static int flow_depth_first_order_compute PARAMS ((int *));
396 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
397 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
398 static void flow_loops_tree_build PARAMS ((struct loops *));
399 static int flow_loop_level_compute PARAMS ((struct loop *, int));
400 static int flow_loops_level_compute PARAMS ((struct loops *));
401 \f
402 /* Find basic blocks of the current function.
403 F is the first insn of the function and NREGS the number of register
404 numbers in use. */
405
406 void
407 find_basic_blocks (f, nregs, file)
408 rtx f;
409 int nregs ATTRIBUTE_UNUSED;
410 FILE *file ATTRIBUTE_UNUSED;
411 {
412 int max_uid;
413
414 /* Flush out existing data. */
415 if (basic_block_info != NULL)
416 {
417 int i;
418
419 clear_edges ();
420
421 /* Clear bb->aux on all extant basic blocks. We'll use this as a
422 tag for reuse during create_basic_block, just in case some pass
423 copies around basic block notes improperly. */
424 for (i = 0; i < n_basic_blocks; ++i)
425 BASIC_BLOCK (i)->aux = NULL;
426
427 VARRAY_FREE (basic_block_info);
428 }
429
430 n_basic_blocks = count_basic_blocks (f);
431
432 /* Size the basic block table. The actual structures will be allocated
433 by find_basic_blocks_1, since we want to keep the structure pointers
434 stable across calls to find_basic_blocks. */
435 /* ??? This whole issue would be much simpler if we called find_basic_blocks
436 exactly once, and thereafter we don't have a single long chain of
437 instructions at all until close to the end of compilation when we
438 actually lay them out. */
439
440 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
441
442 find_basic_blocks_1 (f);
443
444 /* Record the block to which an insn belongs. */
445 /* ??? This should be done another way, by which (perhaps) a label is
446 tagged directly with the basic block that it starts. It is used for
447 more than that currently, but IMO that is the only valid use. */
448
449 max_uid = get_max_uid ();
450 #ifdef AUTO_INC_DEC
451 /* Leave space for insns life_analysis makes in some cases for auto-inc.
452 These cases are rare, so we don't need too much space. */
453 max_uid += max_uid / 10;
454 #endif
455
456 compute_bb_for_insn (max_uid);
457
458 /* Discover the edges of our cfg. */
459 record_active_eh_regions (f);
460 make_edges (label_value_list);
461
462 /* Do very simple cleanup now, for the benefit of code that runs between
463 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
464 tidy_fallthru_edges ();
465
466 mark_critical_edges ();
467
468 #ifdef ENABLE_CHECKING
469 verify_flow_info ();
470 #endif
471 }
472
473 /* Count the basic blocks of the function. */
474
475 static int
476 count_basic_blocks (f)
477 rtx f;
478 {
479 register rtx insn;
480 register RTX_CODE prev_code;
481 register int count = 0;
482 int eh_region = 0;
483 int call_had_abnormal_edge = 0;
484
485 prev_code = JUMP_INSN;
486 for (insn = f; insn; insn = NEXT_INSN (insn))
487 {
488 register RTX_CODE code = GET_CODE (insn);
489
490 if (code == CODE_LABEL
491 || (GET_RTX_CLASS (code) == 'i'
492 && (prev_code == JUMP_INSN
493 || prev_code == BARRIER
494 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
495 count++;
496
497 /* Record whether this call created an edge. */
498 if (code == CALL_INSN)
499 {
500 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
501 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
502
503 call_had_abnormal_edge = 0;
504
505 /* If there is an EH region or rethrow, we have an edge. */
506 if ((eh_region && region > 0)
507 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
508 call_had_abnormal_edge = 1;
509 else if (nonlocal_goto_handler_labels && region >= 0)
510 /* If there is a nonlocal goto label and the specified
511 region number isn't -1, we have an edge. (0 means
512 no throw, but might have a nonlocal goto). */
513 call_had_abnormal_edge = 1;
514 }
515
516 if (code != NOTE)
517 prev_code = code;
518 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
519 ++eh_region;
520 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
521 --eh_region;
522 }
523
524 /* The rest of the compiler works a bit smoother when we don't have to
525 check for the edge case of do-nothing functions with no basic blocks. */
526 if (count == 0)
527 {
528 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
529 count = 1;
530 }
531
532 return count;
533 }
534
535 /* Find all basic blocks of the function whose first insn is F.
536
537 Collect and return a list of labels whose addresses are taken. This
538 will be used in make_edges for use with computed gotos. */
539
540 static void
541 find_basic_blocks_1 (f)
542 rtx f;
543 {
544 register rtx insn, next;
545 int i = 0;
546 rtx bb_note = NULL_RTX;
547 rtx eh_list = NULL_RTX;
548 rtx lvl = NULL_RTX;
549 rtx trll = NULL_RTX;
550 rtx head = NULL_RTX;
551 rtx end = NULL_RTX;
552
553 /* We process the instructions in a slightly different way than we did
554 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
555 closed out the previous block, so that it gets attached at the proper
556 place. Since this form should be equivalent to the previous,
557 count_basic_blocks continues to use the old form as a check. */
558
559 for (insn = f; insn; insn = next)
560 {
561 enum rtx_code code = GET_CODE (insn);
562
563 next = NEXT_INSN (insn);
564
565 switch (code)
566 {
567 case NOTE:
568 {
569 int kind = NOTE_LINE_NUMBER (insn);
570
571 /* Keep a LIFO list of the currently active exception notes. */
572 if (kind == NOTE_INSN_EH_REGION_BEG)
573 eh_list = alloc_INSN_LIST (insn, eh_list);
574 else if (kind == NOTE_INSN_EH_REGION_END)
575 {
576 rtx t = eh_list;
577
578 eh_list = XEXP (eh_list, 1);
579 free_INSN_LIST_node (t);
580 }
581
582 /* Look for basic block notes with which to keep the
583 basic_block_info pointers stable. Unthread the note now;
584 we'll put it back at the right place in create_basic_block.
585 Or not at all if we've already found a note in this block. */
586 else if (kind == NOTE_INSN_BASIC_BLOCK)
587 {
588 if (bb_note == NULL_RTX)
589 bb_note = insn;
590 else
591 next = flow_delete_insn (insn);
592 }
593 break;
594 }
595
596 case CODE_LABEL:
597 /* A basic block starts at a label. If we've closed one off due
598 to a barrier or some such, no need to do it again. */
599 if (head != NULL_RTX)
600 {
601 /* While we now have edge lists with which other portions of
602 the compiler might determine a call ending a basic block
603 does not imply an abnormal edge, it will be a bit before
604 everything can be updated. So continue to emit a noop at
605 the end of such a block. */
606 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
607 {
608 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
609 end = emit_insn_after (nop, end);
610 }
611
612 create_basic_block (i++, head, end, bb_note);
613 bb_note = NULL_RTX;
614 }
615
616 head = end = insn;
617 break;
618
619 case JUMP_INSN:
620 /* A basic block ends at a jump. */
621 if (head == NULL_RTX)
622 head = insn;
623 else
624 {
625 /* ??? Make a special check for table jumps. The way this
626 happens is truly and amazingly gross. We are about to
627 create a basic block that contains just a code label and
628 an addr*vec jump insn. Worse, an addr_diff_vec creates
629 its own natural loop.
630
631 Prevent this bit of brain damage, pasting things together
632 correctly in make_edges.
633
634 The correct solution involves emitting the table directly
635 on the tablejump instruction as a note, or JUMP_LABEL. */
636
637 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
638 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
639 {
640 head = end = NULL;
641 n_basic_blocks--;
642 break;
643 }
644 }
645 end = insn;
646 goto new_bb_inclusive;
647
648 case BARRIER:
649 /* A basic block ends at a barrier. It may be that an unconditional
650 jump already closed the basic block -- no need to do it again. */
651 if (head == NULL_RTX)
652 break;
653
654 /* While we now have edge lists with which other portions of the
655 compiler might determine a call ending a basic block does not
656 imply an abnormal edge, it will be a bit before everything can
657 be updated. So continue to emit a noop at the end of such a
658 block. */
659 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
660 {
661 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
662 end = emit_insn_after (nop, end);
663 }
664 goto new_bb_exclusive;
665
666 case CALL_INSN:
667 {
668 /* Record whether this call created an edge. */
669 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
670 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
671 int call_has_abnormal_edge = 0;
672
673 /* If this is a call placeholder, record its tail recursion
674 label, if any. */
675 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER
676 && XEXP (PATTERN (insn), 3) != NULL_RTX)
677 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
678
679 /* If there is an EH region or rethrow, we have an edge. */
680 if ((eh_list && region > 0)
681 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
682 call_has_abnormal_edge = 1;
683 else if (nonlocal_goto_handler_labels && region >= 0)
684 /* If there is a nonlocal goto label and the specified
685 region number isn't -1, we have an edge. (0 means
686 no throw, but might have a nonlocal goto). */
687 call_has_abnormal_edge = 1;
688
689 /* A basic block ends at a call that can either throw or
690 do a non-local goto. */
691 if (call_has_abnormal_edge)
692 {
693 new_bb_inclusive:
694 if (head == NULL_RTX)
695 head = insn;
696 end = insn;
697
698 new_bb_exclusive:
699 create_basic_block (i++, head, end, bb_note);
700 head = end = NULL_RTX;
701 bb_note = NULL_RTX;
702 break;
703 }
704 }
705 /* FALLTHRU */
706
707 default:
708 if (GET_RTX_CLASS (code) == 'i')
709 {
710 if (head == NULL_RTX)
711 head = insn;
712 end = insn;
713 }
714 break;
715 }
716
717 if (GET_RTX_CLASS (code) == 'i')
718 {
719 rtx note;
720
721 /* Make a list of all labels referred to other than by jumps
722 (which just don't have the REG_LABEL notes).
723
724 Make a special exception for labels followed by an ADDR*VEC,
725 as this would be a part of the tablejump setup code.
726
727 Make a special exception for the eh_return_stub_label, which
728 we know isn't part of any otherwise visible control flow. */
729
730 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
731 if (REG_NOTE_KIND (note) == REG_LABEL)
732 {
733 rtx lab = XEXP (note, 0), next;
734
735 if (lab == eh_return_stub_label)
736 ;
737 else if ((next = next_nonnote_insn (lab)) != NULL
738 && GET_CODE (next) == JUMP_INSN
739 && (GET_CODE (PATTERN (next)) == ADDR_VEC
740 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
741 ;
742 else if (GET_CODE (lab) == NOTE)
743 ;
744 else
745 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
746 }
747 }
748 }
749
750 if (head != NULL_RTX)
751 create_basic_block (i++, head, end, bb_note);
752 else if (bb_note)
753 flow_delete_insn (bb_note);
754
755 if (i != n_basic_blocks)
756 abort ();
757
758 label_value_list = lvl;
759 tail_recursion_label_list = trll;
760 }
761
762 /* Tidy the CFG by deleting unreachable code and whatnot. */
763
764 void
765 cleanup_cfg (f)
766 rtx f;
767 {
768 delete_unreachable_blocks ();
769 move_stray_eh_region_notes ();
770 record_active_eh_regions (f);
771 try_merge_blocks ();
772 mark_critical_edges ();
773
774 /* Kill the data we won't maintain. */
775 free_EXPR_LIST_list (&label_value_list);
776 free_EXPR_LIST_list (&tail_recursion_label_list);
777 }
778
779 /* Create a new basic block consisting of the instructions between
780 HEAD and END inclusive. Reuses the note and basic block struct
781 in BB_NOTE, if any. */
782
783 void
784 create_basic_block (index, head, end, bb_note)
785 int index;
786 rtx head, end, bb_note;
787 {
788 basic_block bb;
789
790 if (bb_note
791 && ! RTX_INTEGRATED_P (bb_note)
792 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
793 && bb->aux == NULL)
794 {
795 /* If we found an existing note, thread it back onto the chain. */
796
797 rtx after;
798
799 if (GET_CODE (head) == CODE_LABEL)
800 after = head;
801 else
802 {
803 after = PREV_INSN (head);
804 head = bb_note;
805 }
806
807 if (after != bb_note && NEXT_INSN (after) != bb_note)
808 reorder_insns (bb_note, bb_note, after);
809 }
810 else
811 {
812 /* Otherwise we must create a note and a basic block structure.
813 Since we allow basic block structs in rtl, give the struct
814 the same lifetime by allocating it off the function obstack
815 rather than using malloc. */
816
817 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
818 memset (bb, 0, sizeof (*bb));
819
820 if (GET_CODE (head) == CODE_LABEL)
821 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
822 else
823 {
824 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
825 head = bb_note;
826 }
827 NOTE_BASIC_BLOCK (bb_note) = bb;
828 }
829
830 /* Always include the bb note in the block. */
831 if (NEXT_INSN (end) == bb_note)
832 end = bb_note;
833
834 bb->head = head;
835 bb->end = end;
836 bb->index = index;
837 BASIC_BLOCK (index) = bb;
838
839 /* Tag the block so that we know it has been used when considering
840 other basic block notes. */
841 bb->aux = bb;
842 }
843 \f
844 /* Records the basic block struct in BB_FOR_INSN, for every instruction
845 indexed by INSN_UID. MAX is the size of the array. */
846
847 void
848 compute_bb_for_insn (max)
849 int max;
850 {
851 int i;
852
853 if (basic_block_for_insn)
854 VARRAY_FREE (basic_block_for_insn);
855 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
856
857 for (i = 0; i < n_basic_blocks; ++i)
858 {
859 basic_block bb = BASIC_BLOCK (i);
860 rtx insn, end;
861
862 end = bb->end;
863 insn = bb->head;
864 while (1)
865 {
866 int uid = INSN_UID (insn);
867 if (uid < max)
868 VARRAY_BB (basic_block_for_insn, uid) = bb;
869 if (insn == end)
870 break;
871 insn = NEXT_INSN (insn);
872 }
873 }
874 }
875
876 /* Free the memory associated with the edge structures. */
877
878 static void
879 clear_edges ()
880 {
881 int i;
882 edge n, e;
883
884 for (i = 0; i < n_basic_blocks; ++i)
885 {
886 basic_block bb = BASIC_BLOCK (i);
887
888 for (e = bb->succ; e ; e = n)
889 {
890 n = e->succ_next;
891 free (e);
892 }
893
894 bb->succ = 0;
895 bb->pred = 0;
896 }
897
898 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
899 {
900 n = e->succ_next;
901 free (e);
902 }
903
904 ENTRY_BLOCK_PTR->succ = 0;
905 EXIT_BLOCK_PTR->pred = 0;
906
907 n_edges = 0;
908 }
909
910 /* Identify the edges between basic blocks.
911
912 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
913 that are otherwise unreachable may be reachable with a non-local goto.
914
915 BB_EH_END is an array indexed by basic block number in which we record
916 the list of exception regions active at the end of the basic block. */
917
918 static void
919 make_edges (label_value_list)
920 rtx label_value_list;
921 {
922 int i;
923 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
924 sbitmap *edge_cache = NULL;
925
926 /* Assume no computed jump; revise as we create edges. */
927 current_function_has_computed_jump = 0;
928
929 /* Heavy use of computed goto in machine-generated code can lead to
930 nearly fully-connected CFGs. In that case we spend a significant
931 amount of time searching the edge lists for duplicates. */
932 if (forced_labels || label_value_list)
933 {
934 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
935 sbitmap_vector_zero (edge_cache, n_basic_blocks);
936 }
937
938 /* By nature of the way these get numbered, block 0 is always the entry. */
939 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
940
941 for (i = 0; i < n_basic_blocks; ++i)
942 {
943 basic_block bb = BASIC_BLOCK (i);
944 rtx insn, x;
945 enum rtx_code code;
946 int force_fallthru = 0;
947
948 /* Examine the last instruction of the block, and discover the
949 ways we can leave the block. */
950
951 insn = bb->end;
952 code = GET_CODE (insn);
953
954 /* A branch. */
955 if (code == JUMP_INSN)
956 {
957 rtx tmp;
958
959 /* ??? Recognize a tablejump and do the right thing. */
960 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
961 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
962 && GET_CODE (tmp) == JUMP_INSN
963 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
964 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
965 {
966 rtvec vec;
967 int j;
968
969 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
970 vec = XVEC (PATTERN (tmp), 0);
971 else
972 vec = XVEC (PATTERN (tmp), 1);
973
974 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
975 make_label_edge (edge_cache, bb,
976 XEXP (RTVEC_ELT (vec, j), 0), 0);
977
978 /* Some targets (eg, ARM) emit a conditional jump that also
979 contains the out-of-range target. Scan for these and
980 add an edge if necessary. */
981 if ((tmp = single_set (insn)) != NULL
982 && SET_DEST (tmp) == pc_rtx
983 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
984 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
985 make_label_edge (edge_cache, bb,
986 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
987
988 #ifdef CASE_DROPS_THROUGH
989 /* Silly VAXen. The ADDR_VEC is going to be in the way of
990 us naturally detecting fallthru into the next block. */
991 force_fallthru = 1;
992 #endif
993 }
994
995 /* If this is a computed jump, then mark it as reaching
996 everything on the label_value_list and forced_labels list. */
997 else if (computed_jump_p (insn))
998 {
999 current_function_has_computed_jump = 1;
1000
1001 for (x = label_value_list; x; x = XEXP (x, 1))
1002 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1003
1004 for (x = forced_labels; x; x = XEXP (x, 1))
1005 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1006 }
1007
1008 /* Returns create an exit out. */
1009 else if (returnjump_p (insn))
1010 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1011
1012 /* Otherwise, we have a plain conditional or unconditional jump. */
1013 else
1014 {
1015 if (! JUMP_LABEL (insn))
1016 abort ();
1017 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1018 }
1019 }
1020
1021 /* If this is a sibling call insn, then this is in effect a
1022 combined call and return, and so we need an edge to the
1023 exit block. No need to worry about EH edges, since we
1024 wouldn't have created the sibling call in the first place. */
1025
1026 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1027 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1028 else
1029
1030 /* If this is a CALL_INSN, then mark it as reaching the active EH
1031 handler for this CALL_INSN. If we're handling asynchronous
1032 exceptions then any insn can reach any of the active handlers.
1033
1034 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1035
1036 if (code == CALL_INSN || asynchronous_exceptions)
1037 {
1038 /* Add any appropriate EH edges. We do this unconditionally
1039 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1040 on the call, and this needn't be within an EH region. */
1041 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1042
1043 /* If we have asynchronous exceptions, do the same for *all*
1044 exception regions active in the block. */
1045 if (asynchronous_exceptions
1046 && bb->eh_beg != bb->eh_end)
1047 {
1048 if (bb->eh_beg >= 0)
1049 make_eh_edge (edge_cache, eh_nest_info, bb,
1050 NULL_RTX, bb->eh_beg);
1051
1052 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1053 if (GET_CODE (x) == NOTE
1054 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1055 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1056 {
1057 int region = NOTE_EH_HANDLER (x);
1058 make_eh_edge (edge_cache, eh_nest_info, bb,
1059 NULL_RTX, region);
1060 }
1061 }
1062
1063 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1064 {
1065 /* ??? This could be made smarter: in some cases it's possible
1066 to tell that certain calls will not do a nonlocal goto.
1067
1068 For example, if the nested functions that do the nonlocal
1069 gotos do not have their addresses taken, then only calls to
1070 those functions or to other nested functions that use them
1071 could possibly do nonlocal gotos. */
1072 /* We do know that a REG_EH_REGION note with a value less
1073 than 0 is guaranteed not to perform a non-local goto. */
1074 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1075 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1076 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1077 make_label_edge (edge_cache, bb, XEXP (x, 0),
1078 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1079 }
1080 }
1081
1082 /* We know something about the structure of the function __throw in
1083 libgcc2.c. It is the only function that ever contains eh_stub
1084 labels. It modifies its return address so that the last block
1085 returns to one of the eh_stub labels within it. So we have to
1086 make additional edges in the flow graph. */
1087 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1088 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1089
1090 /* Find out if we can drop through to the next block. */
1091 insn = next_nonnote_insn (insn);
1092 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1093 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1094 else if (i + 1 < n_basic_blocks)
1095 {
1096 rtx tmp = BLOCK_HEAD (i + 1);
1097 if (GET_CODE (tmp) == NOTE)
1098 tmp = next_nonnote_insn (tmp);
1099 if (force_fallthru || insn == tmp)
1100 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1101 }
1102 }
1103
1104 free_eh_nesting_info (eh_nest_info);
1105 if (edge_cache)
1106 sbitmap_vector_free (edge_cache);
1107 }
1108
1109 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1110 about the edge that is accumulated between calls. */
1111
1112 void
1113 make_edge (edge_cache, src, dst, flags)
1114 sbitmap *edge_cache;
1115 basic_block src, dst;
1116 int flags;
1117 {
1118 int use_edge_cache;
1119 edge e;
1120
1121 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1122 many edges to them, and we didn't allocate memory for it. */
1123 use_edge_cache = (edge_cache
1124 && src != ENTRY_BLOCK_PTR
1125 && dst != EXIT_BLOCK_PTR);
1126
1127 /* Make sure we don't add duplicate edges. */
1128 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1129 for (e = src->succ; e ; e = e->succ_next)
1130 if (e->dest == dst)
1131 {
1132 e->flags |= flags;
1133 return;
1134 }
1135
1136 e = (edge) xcalloc (1, sizeof (*e));
1137 n_edges++;
1138
1139 e->succ_next = src->succ;
1140 e->pred_next = dst->pred;
1141 e->src = src;
1142 e->dest = dst;
1143 e->flags = flags;
1144
1145 src->succ = e;
1146 dst->pred = e;
1147
1148 if (use_edge_cache)
1149 SET_BIT (edge_cache[src->index], dst->index);
1150 }
1151
1152 /* Create an edge from a basic block to a label. */
1153
1154 static void
1155 make_label_edge (edge_cache, src, label, flags)
1156 sbitmap *edge_cache;
1157 basic_block src;
1158 rtx label;
1159 int flags;
1160 {
1161 if (GET_CODE (label) != CODE_LABEL)
1162 abort ();
1163
1164 /* If the label was never emitted, this insn is junk, but avoid a
1165 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1166 as a result of a syntax error and a diagnostic has already been
1167 printed. */
1168
1169 if (INSN_UID (label) == 0)
1170 return;
1171
1172 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1173 }
1174
1175 /* Create the edges generated by INSN in REGION. */
1176
1177 static void
1178 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1179 sbitmap *edge_cache;
1180 eh_nesting_info *eh_nest_info;
1181 basic_block src;
1182 rtx insn;
1183 int region;
1184 {
1185 handler_info **handler_list;
1186 int num, is_call;
1187
1188 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1189 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1190 while (--num >= 0)
1191 {
1192 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1193 EDGE_ABNORMAL | EDGE_EH | is_call);
1194 }
1195 }
1196
1197 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1198 dangerous if we intend to move basic blocks around. Move such notes
1199 into the following block. */
1200
1201 static void
1202 move_stray_eh_region_notes ()
1203 {
1204 int i;
1205 basic_block b1, b2;
1206
1207 if (n_basic_blocks < 2)
1208 return;
1209
1210 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1211 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1212 {
1213 rtx insn, next, list = NULL_RTX;
1214
1215 b1 = BASIC_BLOCK (i);
1216 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1217 {
1218 next = NEXT_INSN (insn);
1219 if (GET_CODE (insn) == NOTE
1220 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1221 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1222 {
1223 /* Unlink from the insn chain. */
1224 NEXT_INSN (PREV_INSN (insn)) = next;
1225 PREV_INSN (next) = PREV_INSN (insn);
1226
1227 /* Queue it. */
1228 NEXT_INSN (insn) = list;
1229 list = insn;
1230 }
1231 }
1232
1233 if (list == NULL_RTX)
1234 continue;
1235
1236 /* Find where to insert these things. */
1237 insn = b2->head;
1238 if (GET_CODE (insn) == CODE_LABEL)
1239 insn = NEXT_INSN (insn);
1240
1241 while (list)
1242 {
1243 next = NEXT_INSN (list);
1244 add_insn_after (list, insn);
1245 list = next;
1246 }
1247 }
1248 }
1249
1250 /* Recompute eh_beg/eh_end for each basic block. */
1251
1252 static void
1253 record_active_eh_regions (f)
1254 rtx f;
1255 {
1256 rtx insn, eh_list = NULL_RTX;
1257 int i = 0;
1258 basic_block bb = BASIC_BLOCK (0);
1259
1260 for (insn = f; insn ; insn = NEXT_INSN (insn))
1261 {
1262 if (bb->head == insn)
1263 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1264
1265 if (GET_CODE (insn) == NOTE)
1266 {
1267 int kind = NOTE_LINE_NUMBER (insn);
1268 if (kind == NOTE_INSN_EH_REGION_BEG)
1269 eh_list = alloc_INSN_LIST (insn, eh_list);
1270 else if (kind == NOTE_INSN_EH_REGION_END)
1271 {
1272 rtx t = XEXP (eh_list, 1);
1273 free_INSN_LIST_node (eh_list);
1274 eh_list = t;
1275 }
1276 }
1277
1278 if (bb->end == insn)
1279 {
1280 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1281 i += 1;
1282 if (i == n_basic_blocks)
1283 break;
1284 bb = BASIC_BLOCK (i);
1285 }
1286 }
1287 }
1288
1289 /* Identify critical edges and set the bits appropriately. */
1290
1291 static void
1292 mark_critical_edges ()
1293 {
1294 int i, n = n_basic_blocks;
1295 basic_block bb;
1296
1297 /* We begin with the entry block. This is not terribly important now,
1298 but could be if a front end (Fortran) implemented alternate entry
1299 points. */
1300 bb = ENTRY_BLOCK_PTR;
1301 i = -1;
1302
1303 while (1)
1304 {
1305 edge e;
1306
1307 /* (1) Critical edges must have a source with multiple successors. */
1308 if (bb->succ && bb->succ->succ_next)
1309 {
1310 for (e = bb->succ; e ; e = e->succ_next)
1311 {
1312 /* (2) Critical edges must have a destination with multiple
1313 predecessors. Note that we know there is at least one
1314 predecessor -- the edge we followed to get here. */
1315 if (e->dest->pred->pred_next)
1316 e->flags |= EDGE_CRITICAL;
1317 else
1318 e->flags &= ~EDGE_CRITICAL;
1319 }
1320 }
1321 else
1322 {
1323 for (e = bb->succ; e ; e = e->succ_next)
1324 e->flags &= ~EDGE_CRITICAL;
1325 }
1326
1327 if (++i >= n)
1328 break;
1329 bb = BASIC_BLOCK (i);
1330 }
1331 }
1332 \f
1333 /* Split a (typically critical) edge. Return the new block.
1334 Abort on abnormal edges.
1335
1336 ??? The code generally expects to be called on critical edges.
1337 The case of a block ending in an unconditional jump to a
1338 block with multiple predecessors is not handled optimally. */
1339
1340 basic_block
1341 split_edge (edge_in)
1342 edge edge_in;
1343 {
1344 basic_block old_pred, bb, old_succ;
1345 edge edge_out;
1346 rtx bb_note;
1347 int i, j;
1348
1349 /* Abnormal edges cannot be split. */
1350 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1351 abort ();
1352
1353 old_pred = edge_in->src;
1354 old_succ = edge_in->dest;
1355
1356 /* Remove the existing edge from the destination's pred list. */
1357 {
1358 edge *pp;
1359 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1360 continue;
1361 *pp = edge_in->pred_next;
1362 edge_in->pred_next = NULL;
1363 }
1364
1365 /* Create the new structures. */
1366 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1367 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1368 n_edges++;
1369
1370 memset (bb, 0, sizeof (*bb));
1371
1372 /* ??? This info is likely going to be out of date very soon. */
1373 if (old_succ->global_live_at_start)
1374 {
1375 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1376 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1377 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1378 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1379 }
1380
1381 /* Wire them up. */
1382 bb->pred = edge_in;
1383 bb->succ = edge_out;
1384
1385 edge_in->dest = bb;
1386 edge_in->flags &= ~EDGE_CRITICAL;
1387
1388 edge_out->pred_next = old_succ->pred;
1389 edge_out->succ_next = NULL;
1390 edge_out->src = bb;
1391 edge_out->dest = old_succ;
1392 edge_out->flags = EDGE_FALLTHRU;
1393 edge_out->probability = REG_BR_PROB_BASE;
1394
1395 old_succ->pred = edge_out;
1396
1397 /* Tricky case -- if there existed a fallthru into the successor
1398 (and we're not it) we must add a new unconditional jump around
1399 the new block we're actually interested in.
1400
1401 Further, if that edge is critical, this means a second new basic
1402 block must be created to hold it. In order to simplify correct
1403 insn placement, do this before we touch the existing basic block
1404 ordering for the block we were really wanting. */
1405 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1406 {
1407 edge e;
1408 for (e = edge_out->pred_next; e ; e = e->pred_next)
1409 if (e->flags & EDGE_FALLTHRU)
1410 break;
1411
1412 if (e)
1413 {
1414 basic_block jump_block;
1415 rtx pos;
1416
1417 if ((e->flags & EDGE_CRITICAL) == 0
1418 && e->src != ENTRY_BLOCK_PTR)
1419 {
1420 /* Non critical -- we can simply add a jump to the end
1421 of the existing predecessor. */
1422 jump_block = e->src;
1423 }
1424 else
1425 {
1426 /* We need a new block to hold the jump. The simplest
1427 way to do the bulk of the work here is to recursively
1428 call ourselves. */
1429 jump_block = split_edge (e);
1430 e = jump_block->succ;
1431 }
1432
1433 /* Now add the jump insn ... */
1434 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1435 jump_block->end);
1436 jump_block->end = pos;
1437 if (basic_block_for_insn)
1438 set_block_for_insn (pos, jump_block);
1439 emit_barrier_after (pos);
1440
1441 /* ... let jump know that label is in use, ... */
1442 JUMP_LABEL (pos) = old_succ->head;
1443 ++LABEL_NUSES (old_succ->head);
1444
1445 /* ... and clear fallthru on the outgoing edge. */
1446 e->flags &= ~EDGE_FALLTHRU;
1447
1448 /* Continue splitting the interesting edge. */
1449 }
1450 }
1451
1452 /* Place the new block just in front of the successor. */
1453 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1454 if (old_succ == EXIT_BLOCK_PTR)
1455 j = n_basic_blocks - 1;
1456 else
1457 j = old_succ->index;
1458 for (i = n_basic_blocks - 1; i > j; --i)
1459 {
1460 basic_block tmp = BASIC_BLOCK (i - 1);
1461 BASIC_BLOCK (i) = tmp;
1462 tmp->index = i;
1463 }
1464 BASIC_BLOCK (i) = bb;
1465 bb->index = i;
1466
1467 /* Create the basic block note.
1468
1469 Where we place the note can have a noticable impact on the generated
1470 code. Consider this cfg:
1471
1472
1473 E
1474 |
1475 0
1476 / \
1477 +->1-->2--->E
1478 | |
1479 +--+
1480
1481 If we need to insert an insn on the edge from block 0 to block 1,
1482 we want to ensure the instructions we insert are outside of any
1483 loop notes that physically sit between block 0 and block 1. Otherwise
1484 we confuse the loop optimizer into thinking the loop is a phony. */
1485 if (old_succ != EXIT_BLOCK_PTR
1486 && PREV_INSN (old_succ->head)
1487 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1488 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1489 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1490 PREV_INSN (old_succ->head));
1491 else if (old_succ != EXIT_BLOCK_PTR)
1492 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1493 else
1494 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1495 NOTE_BASIC_BLOCK (bb_note) = bb;
1496 bb->head = bb->end = bb_note;
1497
1498 /* Not quite simple -- for non-fallthru edges, we must adjust the
1499 predecessor's jump instruction to target our new block. */
1500 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1501 {
1502 rtx tmp, insn = old_pred->end;
1503 rtx old_label = old_succ->head;
1504 rtx new_label = gen_label_rtx ();
1505
1506 if (GET_CODE (insn) != JUMP_INSN)
1507 abort ();
1508
1509 /* ??? Recognize a tablejump and adjust all matching cases. */
1510 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1511 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1512 && GET_CODE (tmp) == JUMP_INSN
1513 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1514 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1515 {
1516 rtvec vec;
1517 int j;
1518
1519 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1520 vec = XVEC (PATTERN (tmp), 0);
1521 else
1522 vec = XVEC (PATTERN (tmp), 1);
1523
1524 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1525 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1526 {
1527 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1528 --LABEL_NUSES (old_label);
1529 ++LABEL_NUSES (new_label);
1530 }
1531
1532 /* Handle casesi dispatch insns */
1533 if ((tmp = single_set (insn)) != NULL
1534 && SET_DEST (tmp) == pc_rtx
1535 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1536 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1537 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1538 {
1539 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1540 new_label);
1541 --LABEL_NUSES (old_label);
1542 ++LABEL_NUSES (new_label);
1543 }
1544 }
1545 else
1546 {
1547 /* This would have indicated an abnormal edge. */
1548 if (computed_jump_p (insn))
1549 abort ();
1550
1551 /* A return instruction can't be redirected. */
1552 if (returnjump_p (insn))
1553 abort ();
1554
1555 /* If the insn doesn't go where we think, we're confused. */
1556 if (JUMP_LABEL (insn) != old_label)
1557 abort ();
1558
1559 redirect_jump (insn, new_label);
1560 }
1561
1562 emit_label_before (new_label, bb_note);
1563 bb->head = new_label;
1564 }
1565
1566 return bb;
1567 }
1568
1569 /* Queue instructions for insertion on an edge between two basic blocks.
1570 The new instructions and basic blocks (if any) will not appear in the
1571 CFG until commit_edge_insertions is called. */
1572
1573 void
1574 insert_insn_on_edge (pattern, e)
1575 rtx pattern;
1576 edge e;
1577 {
1578 /* We cannot insert instructions on an abnormal critical edge.
1579 It will be easier to find the culprit if we die now. */
1580 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1581 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1582 abort ();
1583
1584 if (e->insns == NULL_RTX)
1585 start_sequence ();
1586 else
1587 push_to_sequence (e->insns);
1588
1589 emit_insn (pattern);
1590
1591 e->insns = get_insns ();
1592 end_sequence();
1593 }
1594
1595 /* Update the CFG for the instructions queued on edge E. */
1596
1597 static void
1598 commit_one_edge_insertion (e)
1599 edge e;
1600 {
1601 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1602 basic_block bb;
1603
1604 /* Pull the insns off the edge now since the edge might go away. */
1605 insns = e->insns;
1606 e->insns = NULL_RTX;
1607
1608 /* Figure out where to put these things. If the destination has
1609 one predecessor, insert there. Except for the exit block. */
1610 if (e->dest->pred->pred_next == NULL
1611 && e->dest != EXIT_BLOCK_PTR)
1612 {
1613 bb = e->dest;
1614
1615 /* Get the location correct wrt a code label, and "nice" wrt
1616 a basic block note, and before everything else. */
1617 tmp = bb->head;
1618 if (GET_CODE (tmp) == CODE_LABEL)
1619 tmp = NEXT_INSN (tmp);
1620 if (GET_CODE (tmp) == NOTE
1621 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1622 tmp = NEXT_INSN (tmp);
1623 if (tmp == bb->head)
1624 before = tmp;
1625 else
1626 after = PREV_INSN (tmp);
1627 }
1628
1629 /* If the source has one successor and the edge is not abnormal,
1630 insert there. Except for the entry block. */
1631 else if ((e->flags & EDGE_ABNORMAL) == 0
1632 && e->src->succ->succ_next == NULL
1633 && e->src != ENTRY_BLOCK_PTR)
1634 {
1635 bb = e->src;
1636 /* It is possible to have a non-simple jump here. Consider a target
1637 where some forms of unconditional jumps clobber a register. This
1638 happens on the fr30 for example.
1639
1640 We know this block has a single successor, so we can just emit
1641 the queued insns before the jump. */
1642 if (GET_CODE (bb->end) == JUMP_INSN)
1643 {
1644 before = bb->end;
1645 }
1646 else
1647 {
1648 /* We'd better be fallthru, or we've lost track of what's what. */
1649 if ((e->flags & EDGE_FALLTHRU) == 0)
1650 abort ();
1651
1652 after = bb->end;
1653 }
1654 }
1655
1656 /* Otherwise we must split the edge. */
1657 else
1658 {
1659 bb = split_edge (e);
1660 after = bb->end;
1661 }
1662
1663 /* Now that we've found the spot, do the insertion. */
1664
1665 /* Set the new block number for these insns, if structure is allocated. */
1666 if (basic_block_for_insn)
1667 {
1668 rtx i;
1669 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1670 set_block_for_insn (i, bb);
1671 }
1672
1673 if (before)
1674 {
1675 emit_insns_before (insns, before);
1676 if (before == bb->head)
1677 bb->head = insns;
1678
1679 last = prev_nonnote_insn (before);
1680 }
1681 else
1682 {
1683 last = emit_insns_after (insns, after);
1684 if (after == bb->end)
1685 bb->end = last;
1686 }
1687
1688 if (returnjump_p (last))
1689 {
1690 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1691 This is not currently a problem because this only happens
1692 for the (single) epilogue, which already has a fallthru edge
1693 to EXIT. */
1694
1695 e = bb->succ;
1696 if (e->dest != EXIT_BLOCK_PTR
1697 || e->succ_next != NULL
1698 || (e->flags & EDGE_FALLTHRU) == 0)
1699 abort ();
1700 e->flags &= ~EDGE_FALLTHRU;
1701
1702 emit_barrier_after (last);
1703 bb->end = last;
1704
1705 if (before)
1706 flow_delete_insn (before);
1707 }
1708 else if (GET_CODE (last) == JUMP_INSN)
1709 abort ();
1710 }
1711
1712 /* Update the CFG for all queued instructions. */
1713
1714 void
1715 commit_edge_insertions ()
1716 {
1717 int i;
1718 basic_block bb;
1719
1720 #ifdef ENABLE_CHECKING
1721 verify_flow_info ();
1722 #endif
1723
1724 i = -1;
1725 bb = ENTRY_BLOCK_PTR;
1726 while (1)
1727 {
1728 edge e, next;
1729
1730 for (e = bb->succ; e ; e = next)
1731 {
1732 next = e->succ_next;
1733 if (e->insns)
1734 commit_one_edge_insertion (e);
1735 }
1736
1737 if (++i >= n_basic_blocks)
1738 break;
1739 bb = BASIC_BLOCK (i);
1740 }
1741 }
1742 \f
1743 /* Delete all unreachable basic blocks. */
1744
1745 static void
1746 delete_unreachable_blocks ()
1747 {
1748 basic_block *worklist, *tos;
1749 int deleted_handler;
1750 edge e;
1751 int i, n;
1752
1753 n = n_basic_blocks;
1754 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1755
1756 /* Use basic_block->aux as a marker. Clear them all. */
1757
1758 for (i = 0; i < n; ++i)
1759 BASIC_BLOCK (i)->aux = NULL;
1760
1761 /* Add our starting points to the worklist. Almost always there will
1762 be only one. It isn't inconcievable that we might one day directly
1763 support Fortran alternate entry points. */
1764
1765 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1766 {
1767 *tos++ = e->dest;
1768
1769 /* Mark the block with a handy non-null value. */
1770 e->dest->aux = e;
1771 }
1772
1773 /* Iterate: find everything reachable from what we've already seen. */
1774
1775 while (tos != worklist)
1776 {
1777 basic_block b = *--tos;
1778
1779 for (e = b->succ; e ; e = e->succ_next)
1780 if (!e->dest->aux)
1781 {
1782 *tos++ = e->dest;
1783 e->dest->aux = e;
1784 }
1785 }
1786
1787 /* Delete all unreachable basic blocks. Count down so that we don't
1788 interfere with the block renumbering that happens in flow_delete_block. */
1789
1790 deleted_handler = 0;
1791
1792 for (i = n - 1; i >= 0; --i)
1793 {
1794 basic_block b = BASIC_BLOCK (i);
1795
1796 if (b->aux != NULL)
1797 /* This block was found. Tidy up the mark. */
1798 b->aux = NULL;
1799 else
1800 deleted_handler |= flow_delete_block (b);
1801 }
1802
1803 tidy_fallthru_edges ();
1804
1805 /* If we deleted an exception handler, we may have EH region begin/end
1806 blocks to remove as well. */
1807 if (deleted_handler)
1808 delete_eh_regions ();
1809
1810 free (worklist);
1811 }
1812
1813 /* Find EH regions for which there is no longer a handler, and delete them. */
1814
1815 static void
1816 delete_eh_regions ()
1817 {
1818 rtx insn;
1819
1820 update_rethrow_references ();
1821
1822 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1823 if (GET_CODE (insn) == NOTE)
1824 {
1825 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1826 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1827 {
1828 int num = NOTE_EH_HANDLER (insn);
1829 /* A NULL handler indicates a region is no longer needed,
1830 as long as its rethrow label isn't used. */
1831 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1832 {
1833 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1834 NOTE_SOURCE_FILE (insn) = 0;
1835 }
1836 }
1837 }
1838 }
1839
1840 /* Return true if NOTE is not one of the ones that must be kept paired,
1841 so that we may simply delete them. */
1842
1843 static int
1844 can_delete_note_p (note)
1845 rtx note;
1846 {
1847 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1848 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1849 }
1850
1851 /* Unlink a chain of insns between START and FINISH, leaving notes
1852 that must be paired. */
1853
1854 void
1855 flow_delete_insn_chain (start, finish)
1856 rtx start, finish;
1857 {
1858 /* Unchain the insns one by one. It would be quicker to delete all
1859 of these with a single unchaining, rather than one at a time, but
1860 we need to keep the NOTE's. */
1861
1862 rtx next;
1863
1864 while (1)
1865 {
1866 next = NEXT_INSN (start);
1867 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1868 ;
1869 else if (GET_CODE (start) == CODE_LABEL
1870 && ! can_delete_label_p (start))
1871 {
1872 const char *name = LABEL_NAME (start);
1873 PUT_CODE (start, NOTE);
1874 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1875 NOTE_SOURCE_FILE (start) = name;
1876 }
1877 else
1878 next = flow_delete_insn (start);
1879
1880 if (start == finish)
1881 break;
1882 start = next;
1883 }
1884 }
1885
1886 /* Delete the insns in a (non-live) block. We physically delete every
1887 non-deleted-note insn, and update the flow graph appropriately.
1888
1889 Return nonzero if we deleted an exception handler. */
1890
1891 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1892 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1893
1894 int
1895 flow_delete_block (b)
1896 basic_block b;
1897 {
1898 int deleted_handler = 0;
1899 rtx insn, end, tmp;
1900
1901 /* If the head of this block is a CODE_LABEL, then it might be the
1902 label for an exception handler which can't be reached.
1903
1904 We need to remove the label from the exception_handler_label list
1905 and remove the associated NOTE_INSN_EH_REGION_BEG and
1906 NOTE_INSN_EH_REGION_END notes. */
1907
1908 insn = b->head;
1909
1910 never_reached_warning (insn);
1911
1912 if (GET_CODE (insn) == CODE_LABEL)
1913 {
1914 rtx x, *prev = &exception_handler_labels;
1915
1916 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1917 {
1918 if (XEXP (x, 0) == insn)
1919 {
1920 /* Found a match, splice this label out of the EH label list. */
1921 *prev = XEXP (x, 1);
1922 XEXP (x, 1) = NULL_RTX;
1923 XEXP (x, 0) = NULL_RTX;
1924
1925 /* Remove the handler from all regions */
1926 remove_handler (insn);
1927 deleted_handler = 1;
1928 break;
1929 }
1930 prev = &XEXP (x, 1);
1931 }
1932 }
1933
1934 /* Include any jump table following the basic block. */
1935 end = b->end;
1936 if (GET_CODE (end) == JUMP_INSN
1937 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1938 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1939 && GET_CODE (tmp) == JUMP_INSN
1940 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1941 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1942 end = tmp;
1943
1944 /* Include any barrier that may follow the basic block. */
1945 tmp = next_nonnote_insn (end);
1946 if (tmp && GET_CODE (tmp) == BARRIER)
1947 end = tmp;
1948
1949 /* Selectively delete the entire chain. */
1950 flow_delete_insn_chain (insn, end);
1951
1952 /* Remove the edges into and out of this block. Note that there may
1953 indeed be edges in, if we are removing an unreachable loop. */
1954 {
1955 edge e, next, *q;
1956
1957 for (e = b->pred; e ; e = next)
1958 {
1959 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1960 continue;
1961 *q = e->succ_next;
1962 next = e->pred_next;
1963 n_edges--;
1964 free (e);
1965 }
1966 for (e = b->succ; e ; e = next)
1967 {
1968 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1969 continue;
1970 *q = e->pred_next;
1971 next = e->succ_next;
1972 n_edges--;
1973 free (e);
1974 }
1975
1976 b->pred = NULL;
1977 b->succ = NULL;
1978 }
1979
1980 /* Remove the basic block from the array, and compact behind it. */
1981 expunge_block (b);
1982
1983 return deleted_handler;
1984 }
1985
1986 /* Remove block B from the basic block array and compact behind it. */
1987
1988 static void
1989 expunge_block (b)
1990 basic_block b;
1991 {
1992 int i, n = n_basic_blocks;
1993
1994 for (i = b->index; i + 1 < n; ++i)
1995 {
1996 basic_block x = BASIC_BLOCK (i + 1);
1997 BASIC_BLOCK (i) = x;
1998 x->index = i;
1999 }
2000
2001 basic_block_info->num_elements--;
2002 n_basic_blocks--;
2003 }
2004
2005 /* Delete INSN by patching it out. Return the next insn. */
2006
2007 rtx
2008 flow_delete_insn (insn)
2009 rtx insn;
2010 {
2011 rtx prev = PREV_INSN (insn);
2012 rtx next = NEXT_INSN (insn);
2013 rtx note;
2014
2015 PREV_INSN (insn) = NULL_RTX;
2016 NEXT_INSN (insn) = NULL_RTX;
2017 INSN_DELETED_P (insn) = 1;
2018
2019 if (prev)
2020 NEXT_INSN (prev) = next;
2021 if (next)
2022 PREV_INSN (next) = prev;
2023 else
2024 set_last_insn (prev);
2025
2026 if (GET_CODE (insn) == CODE_LABEL)
2027 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2028
2029 /* If deleting a jump, decrement the use count of the label. Deleting
2030 the label itself should happen in the normal course of block merging. */
2031 if (GET_CODE (insn) == JUMP_INSN
2032 && JUMP_LABEL (insn)
2033 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2034 LABEL_NUSES (JUMP_LABEL (insn))--;
2035
2036 /* Also if deleting an insn that references a label. */
2037 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2038 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2039 LABEL_NUSES (XEXP (note, 0))--;
2040
2041 return next;
2042 }
2043
2044 /* True if a given label can be deleted. */
2045
2046 static int
2047 can_delete_label_p (label)
2048 rtx label;
2049 {
2050 rtx x;
2051
2052 if (LABEL_PRESERVE_P (label))
2053 return 0;
2054
2055 for (x = forced_labels; x ; x = XEXP (x, 1))
2056 if (label == XEXP (x, 0))
2057 return 0;
2058 for (x = label_value_list; x ; x = XEXP (x, 1))
2059 if (label == XEXP (x, 0))
2060 return 0;
2061 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2062 if (label == XEXP (x, 0))
2063 return 0;
2064
2065 /* User declared labels must be preserved. */
2066 if (LABEL_NAME (label) != 0)
2067 return 0;
2068
2069 return 1;
2070 }
2071
2072 static int
2073 tail_recursion_label_p (label)
2074 rtx label;
2075 {
2076 rtx x;
2077
2078 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2079 if (label == XEXP (x, 0))
2080 return 1;
2081
2082 return 0;
2083 }
2084
2085 /* Blocks A and B are to be merged into a single block A. The insns
2086 are already contiguous, hence `nomove'. */
2087
2088 void
2089 merge_blocks_nomove (a, b)
2090 basic_block a, b;
2091 {
2092 edge e;
2093 rtx b_head, b_end, a_end;
2094 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2095 int b_empty = 0;
2096
2097 /* If there was a CODE_LABEL beginning B, delete it. */
2098 b_head = b->head;
2099 b_end = b->end;
2100 if (GET_CODE (b_head) == CODE_LABEL)
2101 {
2102 /* Detect basic blocks with nothing but a label. This can happen
2103 in particular at the end of a function. */
2104 if (b_head == b_end)
2105 b_empty = 1;
2106 del_first = del_last = b_head;
2107 b_head = NEXT_INSN (b_head);
2108 }
2109
2110 /* Delete the basic block note. */
2111 if (GET_CODE (b_head) == NOTE
2112 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2113 {
2114 if (b_head == b_end)
2115 b_empty = 1;
2116 if (! del_last)
2117 del_first = b_head;
2118 del_last = b_head;
2119 b_head = NEXT_INSN (b_head);
2120 }
2121
2122 /* If there was a jump out of A, delete it. */
2123 a_end = a->end;
2124 if (GET_CODE (a_end) == JUMP_INSN)
2125 {
2126 rtx prev;
2127
2128 prev = prev_nonnote_insn (a_end);
2129 if (!prev)
2130 prev = a->head;
2131
2132 del_first = a_end;
2133
2134 #ifdef HAVE_cc0
2135 /* If this was a conditional jump, we need to also delete
2136 the insn that set cc0. */
2137 if (prev && sets_cc0_p (prev))
2138 {
2139 rtx tmp = prev;
2140 prev = prev_nonnote_insn (prev);
2141 if (!prev)
2142 prev = a->head;
2143 del_first = tmp;
2144 }
2145 #endif
2146
2147 a_end = prev;
2148 }
2149
2150 /* Delete everything marked above as well as crap that might be
2151 hanging out between the two blocks. */
2152 flow_delete_insn_chain (del_first, del_last);
2153
2154 /* Normally there should only be one successor of A and that is B, but
2155 partway though the merge of blocks for conditional_execution we'll
2156 be merging a TEST block with THEN and ELSE successors. Free the
2157 whole lot of them and hope the caller knows what they're doing. */
2158 while (a->succ)
2159 remove_edge (a->succ);
2160
2161 /* Adjust the edges out of B for the new owner. */
2162 for (e = b->succ; e ; e = e->succ_next)
2163 e->src = a;
2164 a->succ = b->succ;
2165
2166 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2167 b->pred = b->succ = NULL;
2168
2169 /* Reassociate the insns of B with A. */
2170 if (!b_empty)
2171 {
2172 if (basic_block_for_insn)
2173 {
2174 BLOCK_FOR_INSN (b_head) = a;
2175 while (b_head != b_end)
2176 {
2177 b_head = NEXT_INSN (b_head);
2178 BLOCK_FOR_INSN (b_head) = a;
2179 }
2180 }
2181 a_end = b_end;
2182 }
2183 a->end = a_end;
2184
2185 expunge_block (b);
2186 }
2187
2188 /* Blocks A and B are to be merged into a single block. A has no incoming
2189 fallthru edge, so it can be moved before B without adding or modifying
2190 any jumps (aside from the jump from A to B). */
2191
2192 static int
2193 merge_blocks_move_predecessor_nojumps (a, b)
2194 basic_block a, b;
2195 {
2196 rtx start, end, barrier;
2197 int index;
2198
2199 start = a->head;
2200 end = a->end;
2201
2202 barrier = next_nonnote_insn (end);
2203 if (GET_CODE (barrier) != BARRIER)
2204 abort ();
2205 flow_delete_insn (barrier);
2206
2207 /* Move block and loop notes out of the chain so that we do not
2208 disturb their order.
2209
2210 ??? A better solution would be to squeeze out all the non-nested notes
2211 and adjust the block trees appropriately. Even better would be to have
2212 a tighter connection between block trees and rtl so that this is not
2213 necessary. */
2214 start = squeeze_notes (start, end);
2215
2216 /* Scramble the insn chain. */
2217 if (end != PREV_INSN (b->head))
2218 reorder_insns (start, end, PREV_INSN (b->head));
2219
2220 if (rtl_dump_file)
2221 {
2222 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2223 a->index, b->index);
2224 }
2225
2226 /* Swap the records for the two blocks around. Although we are deleting B,
2227 A is now where B was and we want to compact the BB array from where
2228 A used to be. */
2229 BASIC_BLOCK(a->index) = b;
2230 BASIC_BLOCK(b->index) = a;
2231 index = a->index;
2232 a->index = b->index;
2233 b->index = index;
2234
2235 /* Now blocks A and B are contiguous. Merge them. */
2236 merge_blocks_nomove (a, b);
2237
2238 return 1;
2239 }
2240
2241 /* Blocks A and B are to be merged into a single block. B has no outgoing
2242 fallthru edge, so it can be moved after A without adding or modifying
2243 any jumps (aside from the jump from A to B). */
2244
2245 static int
2246 merge_blocks_move_successor_nojumps (a, b)
2247 basic_block a, b;
2248 {
2249 rtx start, end, barrier;
2250
2251 start = b->head;
2252 end = b->end;
2253 barrier = NEXT_INSN (end);
2254
2255 /* Recognize a jump table following block B. */
2256 if (GET_CODE (barrier) == CODE_LABEL
2257 && NEXT_INSN (barrier)
2258 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2259 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2260 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2261 {
2262 end = NEXT_INSN (barrier);
2263 barrier = NEXT_INSN (end);
2264 }
2265
2266 /* There had better have been a barrier there. Delete it. */
2267 if (GET_CODE (barrier) != BARRIER)
2268 abort ();
2269 flow_delete_insn (barrier);
2270
2271 /* Move block and loop notes out of the chain so that we do not
2272 disturb their order.
2273
2274 ??? A better solution would be to squeeze out all the non-nested notes
2275 and adjust the block trees appropriately. Even better would be to have
2276 a tighter connection between block trees and rtl so that this is not
2277 necessary. */
2278 start = squeeze_notes (start, end);
2279
2280 /* Scramble the insn chain. */
2281 reorder_insns (start, end, a->end);
2282
2283 /* Now blocks A and B are contiguous. Merge them. */
2284 merge_blocks_nomove (a, b);
2285
2286 if (rtl_dump_file)
2287 {
2288 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2289 b->index, a->index);
2290 }
2291
2292 return 1;
2293 }
2294
2295 /* Attempt to merge basic blocks that are potentially non-adjacent.
2296 Return true iff the attempt succeeded. */
2297
2298 static int
2299 merge_blocks (e, b, c)
2300 edge e;
2301 basic_block b, c;
2302 {
2303 /* If C has a tail recursion label, do not merge. There is no
2304 edge recorded from the call_placeholder back to this label, as
2305 that would make optimize_sibling_and_tail_recursive_calls more
2306 complex for no gain. */
2307 if (GET_CODE (c->head) == CODE_LABEL
2308 && tail_recursion_label_p (c->head))
2309 return 0;
2310
2311 /* If B has a fallthru edge to C, no need to move anything. */
2312 if (e->flags & EDGE_FALLTHRU)
2313 {
2314 merge_blocks_nomove (b, c);
2315
2316 if (rtl_dump_file)
2317 {
2318 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2319 b->index, c->index);
2320 }
2321
2322 return 1;
2323 }
2324 else
2325 {
2326 edge tmp_edge;
2327 basic_block d;
2328 int c_has_outgoing_fallthru;
2329 int b_has_incoming_fallthru;
2330
2331 /* We must make sure to not munge nesting of exception regions,
2332 lexical blocks, and loop notes.
2333
2334 The first is taken care of by requiring that the active eh
2335 region at the end of one block always matches the active eh
2336 region at the beginning of the next block.
2337
2338 The later two are taken care of by squeezing out all the notes. */
2339
2340 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2341 executed and we may want to treat blocks which have two out
2342 edges, one normal, one abnormal as only having one edge for
2343 block merging purposes. */
2344
2345 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2346 if (tmp_edge->flags & EDGE_FALLTHRU)
2347 break;
2348 c_has_outgoing_fallthru = (tmp_edge != NULL);
2349
2350 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2351 if (tmp_edge->flags & EDGE_FALLTHRU)
2352 break;
2353 b_has_incoming_fallthru = (tmp_edge != NULL);
2354
2355 /* If B does not have an incoming fallthru, and the exception regions
2356 match, then it can be moved immediately before C without introducing
2357 or modifying jumps.
2358
2359 C can not be the first block, so we do not have to worry about
2360 accessing a non-existent block. */
2361 d = BASIC_BLOCK (c->index - 1);
2362 if (! b_has_incoming_fallthru
2363 && d->eh_end == b->eh_beg
2364 && b->eh_end == c->eh_beg)
2365 return merge_blocks_move_predecessor_nojumps (b, c);
2366
2367 /* Otherwise, we're going to try to move C after B. Make sure the
2368 exception regions match.
2369
2370 If B is the last basic block, then we must not try to access the
2371 block structure for block B + 1. Luckily in that case we do not
2372 need to worry about matching exception regions. */
2373 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2374 if (b->eh_end == c->eh_beg
2375 && (d == NULL || c->eh_end == d->eh_beg))
2376 {
2377 /* If C does not have an outgoing fallthru, then it can be moved
2378 immediately after B without introducing or modifying jumps. */
2379 if (! c_has_outgoing_fallthru)
2380 return merge_blocks_move_successor_nojumps (b, c);
2381
2382 /* Otherwise, we'll need to insert an extra jump, and possibly
2383 a new block to contain it. */
2384 /* ??? Not implemented yet. */
2385 }
2386
2387 return 0;
2388 }
2389 }
2390
2391 /* Top level driver for merge_blocks. */
2392
2393 static void
2394 try_merge_blocks ()
2395 {
2396 int i;
2397
2398 /* Attempt to merge blocks as made possible by edge removal. If a block
2399 has only one successor, and the successor has only one predecessor,
2400 they may be combined. */
2401
2402 for (i = 0; i < n_basic_blocks; )
2403 {
2404 basic_block c, b = BASIC_BLOCK (i);
2405 edge s;
2406
2407 /* A loop because chains of blocks might be combineable. */
2408 while ((s = b->succ) != NULL
2409 && s->succ_next == NULL
2410 && (s->flags & EDGE_EH) == 0
2411 && (c = s->dest) != EXIT_BLOCK_PTR
2412 && c->pred->pred_next == NULL
2413 /* If the jump insn has side effects, we can't kill the edge. */
2414 && (GET_CODE (b->end) != JUMP_INSN
2415 || onlyjump_p (b->end))
2416 && merge_blocks (s, b, c))
2417 continue;
2418
2419 /* Don't get confused by the index shift caused by deleting blocks. */
2420 i = b->index + 1;
2421 }
2422 }
2423
2424 /* The given edge should potentially be a fallthru edge. If that is in
2425 fact true, delete the jump and barriers that are in the way. */
2426
2427 void
2428 tidy_fallthru_edge (e, b, c)
2429 edge e;
2430 basic_block b, c;
2431 {
2432 rtx q;
2433
2434 /* ??? In a late-running flow pass, other folks may have deleted basic
2435 blocks by nopping out blocks, leaving multiple BARRIERs between here
2436 and the target label. They ought to be chastized and fixed.
2437
2438 We can also wind up with a sequence of undeletable labels between
2439 one block and the next.
2440
2441 So search through a sequence of barriers, labels, and notes for
2442 the head of block C and assert that we really do fall through. */
2443
2444 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2445 return;
2446
2447 /* Remove what will soon cease being the jump insn from the source block.
2448 If block B consisted only of this single jump, turn it into a deleted
2449 note. */
2450 q = b->end;
2451 if (GET_CODE (q) == JUMP_INSN
2452 && (simplejump_p (q)
2453 || (b->succ == e && e->succ_next == NULL)))
2454 {
2455 #ifdef HAVE_cc0
2456 /* If this was a conditional jump, we need to also delete
2457 the insn that set cc0. */
2458 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2459 q = PREV_INSN (q);
2460 #endif
2461
2462 if (b->head == q)
2463 {
2464 PUT_CODE (q, NOTE);
2465 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2466 NOTE_SOURCE_FILE (q) = 0;
2467 }
2468 else
2469 b->end = q = PREV_INSN (q);
2470 }
2471
2472 /* Selectively unlink the sequence. */
2473 if (q != PREV_INSN (c->head))
2474 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2475
2476 e->flags |= EDGE_FALLTHRU;
2477 }
2478
2479 /* Fix up edges that now fall through, or rather should now fall through
2480 but previously required a jump around now deleted blocks. Simplify
2481 the search by only examining blocks numerically adjacent, since this
2482 is how find_basic_blocks created them. */
2483
2484 static void
2485 tidy_fallthru_edges ()
2486 {
2487 int i;
2488
2489 for (i = 1; i < n_basic_blocks; ++i)
2490 {
2491 basic_block b = BASIC_BLOCK (i - 1);
2492 basic_block c = BASIC_BLOCK (i);
2493 edge s;
2494
2495 /* We care about simple conditional or unconditional jumps with
2496 a single successor.
2497
2498 If we had a conditional branch to the next instruction when
2499 find_basic_blocks was called, then there will only be one
2500 out edge for the block which ended with the conditional
2501 branch (since we do not create duplicate edges).
2502
2503 Furthermore, the edge will be marked as a fallthru because we
2504 merge the flags for the duplicate edges. So we do not want to
2505 check that the edge is not a FALLTHRU edge. */
2506 if ((s = b->succ) != NULL
2507 && s->succ_next == NULL
2508 && s->dest == c
2509 /* If the jump insn has side effects, we can't tidy the edge. */
2510 && (GET_CODE (b->end) != JUMP_INSN
2511 || onlyjump_p (b->end)))
2512 tidy_fallthru_edge (s, b, c);
2513 }
2514 }
2515 \f
2516 /* Perform data flow analysis.
2517 F is the first insn of the function; FLAGS is a set of PROP_* flags
2518 to be used in accumulating flow info. */
2519
2520 void
2521 life_analysis (f, file, flags)
2522 rtx f;
2523 FILE *file;
2524 int flags;
2525 {
2526 #ifdef ELIMINABLE_REGS
2527 register int i;
2528 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2529 #endif
2530
2531 /* Record which registers will be eliminated. We use this in
2532 mark_used_regs. */
2533
2534 CLEAR_HARD_REG_SET (elim_reg_set);
2535
2536 #ifdef ELIMINABLE_REGS
2537 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2538 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2539 #else
2540 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2541 #endif
2542
2543 if (! optimize)
2544 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2545
2546 /* The post-reload life analysis have (on a global basis) the same
2547 registers live as was computed by reload itself. elimination
2548 Otherwise offsets and such may be incorrect.
2549
2550 Reload will make some registers as live even though they do not
2551 appear in the rtl. */
2552 if (reload_completed)
2553 flags &= ~PROP_REG_INFO;
2554
2555 /* We want alias analysis information for local dead store elimination. */
2556 if (flags & PROP_SCAN_DEAD_CODE)
2557 init_alias_analysis ();
2558
2559 /* Always remove no-op moves. Do this before other processing so
2560 that we don't have to keep re-scanning them. */
2561 delete_noop_moves (f);
2562
2563 /* Some targets can emit simpler epilogues if they know that sp was
2564 not ever modified during the function. After reload, of course,
2565 we've already emitted the epilogue so there's no sense searching. */
2566 if (! reload_completed)
2567 notice_stack_pointer_modification (f);
2568
2569 /* Allocate and zero out data structures that will record the
2570 data from lifetime analysis. */
2571 allocate_reg_life_data ();
2572 allocate_bb_life_data ();
2573
2574 /* Find the set of registers live on function exit. */
2575 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2576
2577 /* "Update" life info from zero. It'd be nice to begin the
2578 relaxation with just the exit and noreturn blocks, but that set
2579 is not immediately handy. */
2580
2581 if (flags & PROP_REG_INFO)
2582 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2583 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2584
2585 /* Clean up. */
2586 if (flags & PROP_SCAN_DEAD_CODE)
2587 end_alias_analysis ();
2588
2589 if (file)
2590 dump_flow_info (file);
2591
2592 free_basic_block_vars (1);
2593 }
2594
2595 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2596 Search for REGNO. If found, abort if it is not wider than word_mode. */
2597
2598 static int
2599 verify_wide_reg_1 (px, pregno)
2600 rtx *px;
2601 void *pregno;
2602 {
2603 rtx x = *px;
2604 unsigned int regno = *(int *) pregno;
2605
2606 if (GET_CODE (x) == REG && REGNO (x) == regno)
2607 {
2608 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2609 abort ();
2610 return 1;
2611 }
2612 return 0;
2613 }
2614
2615 /* A subroutine of verify_local_live_at_start. Search through insns
2616 between HEAD and END looking for register REGNO. */
2617
2618 static void
2619 verify_wide_reg (regno, head, end)
2620 int regno;
2621 rtx head, end;
2622 {
2623 while (1)
2624 {
2625 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2626 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2627 return;
2628 if (head == end)
2629 break;
2630 head = NEXT_INSN (head);
2631 }
2632
2633 /* We didn't find the register at all. Something's way screwy. */
2634 abort ();
2635 }
2636
2637 /* A subroutine of update_life_info. Verify that there are no untoward
2638 changes in live_at_start during a local update. */
2639
2640 static void
2641 verify_local_live_at_start (new_live_at_start, bb)
2642 regset new_live_at_start;
2643 basic_block bb;
2644 {
2645 if (reload_completed)
2646 {
2647 /* After reload, there are no pseudos, nor subregs of multi-word
2648 registers. The regsets should exactly match. */
2649 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2650 abort ();
2651 }
2652 else
2653 {
2654 int i;
2655
2656 /* Find the set of changed registers. */
2657 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2658
2659 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2660 {
2661 /* No registers should die. */
2662 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2663 abort ();
2664 /* Verify that the now-live register is wider than word_mode. */
2665 verify_wide_reg (i, bb->head, bb->end);
2666 });
2667 }
2668 }
2669
2670 /* Updates life information starting with the basic blocks set in BLOCKS.
2671 If BLOCKS is null, consider it to be the universal set.
2672
2673 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2674 we are only expecting local modifications to basic blocks. If we find
2675 extra registers live at the beginning of a block, then we either killed
2676 useful data, or we have a broken split that wants data not provided.
2677 If we find registers removed from live_at_start, that means we have
2678 a broken peephole that is killing a register it shouldn't.
2679
2680 ??? This is not true in one situation -- when a pre-reload splitter
2681 generates subregs of a multi-word pseudo, current life analysis will
2682 lose the kill. So we _can_ have a pseudo go live. How irritating.
2683
2684 Including PROP_REG_INFO does not properly refresh regs_ever_live
2685 unless the caller resets it to zero. */
2686
2687 void
2688 update_life_info (blocks, extent, prop_flags)
2689 sbitmap blocks;
2690 enum update_life_extent extent;
2691 int prop_flags;
2692 {
2693 regset tmp;
2694 regset_head tmp_head;
2695 int i;
2696
2697 tmp = INITIALIZE_REG_SET (tmp_head);
2698
2699 /* For a global update, we go through the relaxation process again. */
2700 if (extent != UPDATE_LIFE_LOCAL)
2701 {
2702 calculate_global_regs_live (blocks, blocks,
2703 prop_flags & PROP_SCAN_DEAD_CODE);
2704
2705 /* If asked, remove notes from the blocks we'll update. */
2706 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2707 count_or_remove_death_notes (blocks, 1);
2708 }
2709
2710 if (blocks)
2711 {
2712 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2713 {
2714 basic_block bb = BASIC_BLOCK (i);
2715
2716 COPY_REG_SET (tmp, bb->global_live_at_end);
2717 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2718
2719 if (extent == UPDATE_LIFE_LOCAL)
2720 verify_local_live_at_start (tmp, bb);
2721 });
2722 }
2723 else
2724 {
2725 for (i = n_basic_blocks - 1; i >= 0; --i)
2726 {
2727 basic_block bb = BASIC_BLOCK (i);
2728
2729 COPY_REG_SET (tmp, bb->global_live_at_end);
2730 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2731
2732 if (extent == UPDATE_LIFE_LOCAL)
2733 verify_local_live_at_start (tmp, bb);
2734 }
2735 }
2736
2737 FREE_REG_SET (tmp);
2738
2739 if (prop_flags & PROP_REG_INFO)
2740 {
2741 /* The only pseudos that are live at the beginning of the function
2742 are those that were not set anywhere in the function. local-alloc
2743 doesn't know how to handle these correctly, so mark them as not
2744 local to any one basic block. */
2745 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2746 FIRST_PSEUDO_REGISTER, i,
2747 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2748
2749 /* We have a problem with any pseudoreg that lives across the setjmp.
2750 ANSI says that if a user variable does not change in value between
2751 the setjmp and the longjmp, then the longjmp preserves it. This
2752 includes longjmp from a place where the pseudo appears dead.
2753 (In principle, the value still exists if it is in scope.)
2754 If the pseudo goes in a hard reg, some other value may occupy
2755 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2756 Conclusion: such a pseudo must not go in a hard reg. */
2757 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2758 FIRST_PSEUDO_REGISTER, i,
2759 {
2760 if (regno_reg_rtx[i] != 0)
2761 {
2762 REG_LIVE_LENGTH (i) = -1;
2763 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2764 }
2765 });
2766 }
2767 }
2768
2769 /* Free the variables allocated by find_basic_blocks.
2770
2771 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2772
2773 void
2774 free_basic_block_vars (keep_head_end_p)
2775 int keep_head_end_p;
2776 {
2777 if (basic_block_for_insn)
2778 {
2779 VARRAY_FREE (basic_block_for_insn);
2780 basic_block_for_insn = NULL;
2781 }
2782
2783 if (! keep_head_end_p)
2784 {
2785 clear_edges ();
2786 VARRAY_FREE (basic_block_info);
2787 n_basic_blocks = 0;
2788
2789 ENTRY_BLOCK_PTR->aux = NULL;
2790 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2791 EXIT_BLOCK_PTR->aux = NULL;
2792 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2793 }
2794 }
2795
2796 /* Return nonzero if the destination of SET equals the source. */
2797 static int
2798 set_noop_p (set)
2799 rtx set;
2800 {
2801 rtx src = SET_SRC (set);
2802 rtx dst = SET_DEST (set);
2803
2804 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2805 {
2806 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2807 return 0;
2808 src = SUBREG_REG (src);
2809 dst = SUBREG_REG (dst);
2810 }
2811
2812 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2813 && REGNO (src) == REGNO (dst));
2814 }
2815
2816 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2817 value to itself. */
2818 static int
2819 noop_move_p (insn)
2820 rtx insn;
2821 {
2822 rtx pat = PATTERN (insn);
2823
2824 /* Insns carrying these notes are useful later on. */
2825 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2826 return 0;
2827
2828 if (GET_CODE (pat) == SET && set_noop_p (pat))
2829 return 1;
2830
2831 if (GET_CODE (pat) == PARALLEL)
2832 {
2833 int i;
2834 /* If nothing but SETs of registers to themselves,
2835 this insn can also be deleted. */
2836 for (i = 0; i < XVECLEN (pat, 0); i++)
2837 {
2838 rtx tem = XVECEXP (pat, 0, i);
2839
2840 if (GET_CODE (tem) == USE
2841 || GET_CODE (tem) == CLOBBER)
2842 continue;
2843
2844 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2845 return 0;
2846 }
2847
2848 return 1;
2849 }
2850 return 0;
2851 }
2852
2853 /* Delete any insns that copy a register to itself. */
2854
2855 static void
2856 delete_noop_moves (f)
2857 rtx f;
2858 {
2859 rtx insn;
2860 for (insn = f; insn; insn = NEXT_INSN (insn))
2861 {
2862 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2863 {
2864 PUT_CODE (insn, NOTE);
2865 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2866 NOTE_SOURCE_FILE (insn) = 0;
2867 }
2868 }
2869 }
2870
2871 /* Determine if the stack pointer is constant over the life of the function.
2872 Only useful before prologues have been emitted. */
2873
2874 static void
2875 notice_stack_pointer_modification_1 (x, pat, data)
2876 rtx x;
2877 rtx pat ATTRIBUTE_UNUSED;
2878 void *data ATTRIBUTE_UNUSED;
2879 {
2880 if (x == stack_pointer_rtx
2881 /* The stack pointer is only modified indirectly as the result
2882 of a push until later in flow. See the comments in rtl.texi
2883 regarding Embedded Side-Effects on Addresses. */
2884 || (GET_CODE (x) == MEM
2885 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2886 || GET_CODE (XEXP (x, 0)) == PRE_INC
2887 || GET_CODE (XEXP (x, 0)) == POST_DEC
2888 || GET_CODE (XEXP (x, 0)) == POST_INC)
2889 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2890 current_function_sp_is_unchanging = 0;
2891 }
2892
2893 static void
2894 notice_stack_pointer_modification (f)
2895 rtx f;
2896 {
2897 rtx insn;
2898
2899 /* Assume that the stack pointer is unchanging if alloca hasn't
2900 been used. */
2901 current_function_sp_is_unchanging = !current_function_calls_alloca;
2902 if (! current_function_sp_is_unchanging)
2903 return;
2904
2905 for (insn = f; insn; insn = NEXT_INSN (insn))
2906 {
2907 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2908 {
2909 /* Check if insn modifies the stack pointer. */
2910 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2911 NULL);
2912 if (! current_function_sp_is_unchanging)
2913 return;
2914 }
2915 }
2916 }
2917
2918 /* Mark a register in SET. Hard registers in large modes get all
2919 of their component registers set as well. */
2920 static void
2921 mark_reg (reg, xset)
2922 rtx reg;
2923 void *xset;
2924 {
2925 regset set = (regset) xset;
2926 int regno = REGNO (reg);
2927
2928 if (GET_MODE (reg) == BLKmode)
2929 abort ();
2930
2931 SET_REGNO_REG_SET (set, regno);
2932 if (regno < FIRST_PSEUDO_REGISTER)
2933 {
2934 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2935 while (--n > 0)
2936 SET_REGNO_REG_SET (set, regno + n);
2937 }
2938 }
2939
2940 /* Mark those regs which are needed at the end of the function as live
2941 at the end of the last basic block. */
2942 static void
2943 mark_regs_live_at_end (set)
2944 regset set;
2945 {
2946 int i;
2947
2948 /* If exiting needs the right stack value, consider the stack pointer
2949 live at the end of the function. */
2950 if ((HAVE_epilogue && reload_completed)
2951 || ! EXIT_IGNORE_STACK
2952 || (! FRAME_POINTER_REQUIRED
2953 && ! current_function_calls_alloca
2954 && flag_omit_frame_pointer)
2955 || current_function_sp_is_unchanging)
2956 {
2957 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2958 }
2959
2960 /* Mark the frame pointer if needed at the end of the function. If
2961 we end up eliminating it, it will be removed from the live list
2962 of each basic block by reload. */
2963
2964 if (! reload_completed || frame_pointer_needed)
2965 {
2966 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2967 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2968 /* If they are different, also mark the hard frame pointer as live */
2969 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2970 #endif
2971 }
2972
2973 #ifdef PIC_OFFSET_TABLE_REGNUM
2974 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2975 /* Many architectures have a GP register even without flag_pic.
2976 Assume the pic register is not in use, or will be handled by
2977 other means, if it is not fixed. */
2978 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2979 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2980 #endif
2981 #endif
2982
2983 /* Mark all global registers, and all registers used by the epilogue
2984 as being live at the end of the function since they may be
2985 referenced by our caller. */
2986 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2987 if (global_regs[i]
2988 #ifdef EPILOGUE_USES
2989 || EPILOGUE_USES (i)
2990 #endif
2991 )
2992 SET_REGNO_REG_SET (set, i);
2993
2994 /* Mark all call-saved registers that we actaully used. */
2995 if (HAVE_epilogue && reload_completed)
2996 {
2997 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2998 if (! call_used_regs[i] && regs_ever_live[i])
2999 SET_REGNO_REG_SET (set, i);
3000 }
3001
3002 /* Mark function return value. */
3003 diddle_return_value (mark_reg, set);
3004 }
3005
3006 /* Callback function for for_each_successor_phi. DATA is a regset.
3007 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3008 INSN, in the regset. */
3009
3010 static int
3011 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3012 rtx insn ATTRIBUTE_UNUSED;
3013 int dest_regno ATTRIBUTE_UNUSED;
3014 int src_regno;
3015 void *data;
3016 {
3017 regset live = (regset) data;
3018 SET_REGNO_REG_SET (live, src_regno);
3019 return 0;
3020 }
3021
3022 /* Propagate global life info around the graph of basic blocks. Begin
3023 considering blocks with their corresponding bit set in BLOCKS_IN.
3024 If BLOCKS_IN is null, consider it the universal set.
3025
3026 BLOCKS_OUT is set for every block that was changed. */
3027
3028 static void
3029 calculate_global_regs_live (blocks_in, blocks_out, flags)
3030 sbitmap blocks_in, blocks_out;
3031 int flags;
3032 {
3033 basic_block *queue, *qhead, *qtail, *qend;
3034 regset tmp, new_live_at_end;
3035 regset_head tmp_head;
3036 regset_head new_live_at_end_head;
3037 int i;
3038
3039 tmp = INITIALIZE_REG_SET (tmp_head);
3040 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3041
3042 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3043 because the `head == tail' style test for an empty queue doesn't
3044 work with a full queue. */
3045 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3046 qtail = queue;
3047 qhead = qend = queue + n_basic_blocks + 2;
3048
3049 /* Clear out the garbage that might be hanging out in bb->aux. */
3050 for (i = n_basic_blocks - 1; i >= 0; --i)
3051 BASIC_BLOCK (i)->aux = NULL;
3052
3053 /* Queue the blocks set in the initial mask. Do this in reverse block
3054 number order so that we are more likely for the first round to do
3055 useful work. We use AUX non-null to flag that the block is queued. */
3056 if (blocks_in)
3057 {
3058 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3059 {
3060 basic_block bb = BASIC_BLOCK (i);
3061 *--qhead = bb;
3062 bb->aux = bb;
3063 });
3064 }
3065 else
3066 {
3067 for (i = 0; i < n_basic_blocks; ++i)
3068 {
3069 basic_block bb = BASIC_BLOCK (i);
3070 *--qhead = bb;
3071 bb->aux = bb;
3072 }
3073 }
3074
3075 if (blocks_out)
3076 sbitmap_zero (blocks_out);
3077
3078 while (qhead != qtail)
3079 {
3080 int rescan, changed;
3081 basic_block bb;
3082 edge e;
3083
3084 bb = *qhead++;
3085 if (qhead == qend)
3086 qhead = queue;
3087 bb->aux = NULL;
3088
3089 /* Begin by propogating live_at_start from the successor blocks. */
3090 CLEAR_REG_SET (new_live_at_end);
3091 for (e = bb->succ; e ; e = e->succ_next)
3092 {
3093 basic_block sb = e->dest;
3094 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3095 }
3096
3097 /* Force the stack pointer to be live -- which might not already be
3098 the case for blocks within infinite loops. */
3099 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3100
3101 /* Regs used in phi nodes are not included in
3102 global_live_at_start, since they are live only along a
3103 particular edge. Set those regs that are live because of a
3104 phi node alternative corresponding to this particular block. */
3105 if (in_ssa_form)
3106 for_each_successor_phi (bb, &set_phi_alternative_reg,
3107 new_live_at_end);
3108
3109 if (bb == ENTRY_BLOCK_PTR)
3110 {
3111 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3112 continue;
3113 }
3114
3115 /* On our first pass through this block, we'll go ahead and continue.
3116 Recognize first pass by local_set NULL. On subsequent passes, we
3117 get to skip out early if live_at_end wouldn't have changed. */
3118
3119 if (bb->local_set == NULL)
3120 {
3121 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3122 rescan = 1;
3123 }
3124 else
3125 {
3126 /* If any bits were removed from live_at_end, we'll have to
3127 rescan the block. This wouldn't be necessary if we had
3128 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3129 local_live is really dependant on live_at_end. */
3130 CLEAR_REG_SET (tmp);
3131 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3132 new_live_at_end, BITMAP_AND_COMPL);
3133
3134 if (! rescan)
3135 {
3136 /* Find the set of changed bits. Take this opportunity
3137 to notice that this set is empty and early out. */
3138 CLEAR_REG_SET (tmp);
3139 changed = bitmap_operation (tmp, bb->global_live_at_end,
3140 new_live_at_end, BITMAP_XOR);
3141 if (! changed)
3142 continue;
3143
3144 /* If any of the changed bits overlap with local_set,
3145 we'll have to rescan the block. Detect overlap by
3146 the AND with ~local_set turning off bits. */
3147 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3148 BITMAP_AND_COMPL);
3149 }
3150 }
3151
3152 /* Let our caller know that BB changed enough to require its
3153 death notes updated. */
3154 if (blocks_out)
3155 SET_BIT (blocks_out, bb->index);
3156
3157 if (! rescan)
3158 {
3159 /* Add to live_at_start the set of all registers in
3160 new_live_at_end that aren't in the old live_at_end. */
3161
3162 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3163 BITMAP_AND_COMPL);
3164 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3165
3166 changed = bitmap_operation (bb->global_live_at_start,
3167 bb->global_live_at_start,
3168 tmp, BITMAP_IOR);
3169 if (! changed)
3170 continue;
3171 }
3172 else
3173 {
3174 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3175
3176 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3177 into live_at_start. */
3178 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3179
3180 /* If live_at start didn't change, no need to go farther. */
3181 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3182 continue;
3183
3184 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3185 }
3186
3187 /* Queue all predecessors of BB so that we may re-examine
3188 their live_at_end. */
3189 for (e = bb->pred; e ; e = e->pred_next)
3190 {
3191 basic_block pb = e->src;
3192 if (pb->aux == NULL)
3193 {
3194 *qtail++ = pb;
3195 if (qtail == qend)
3196 qtail = queue;
3197 pb->aux = pb;
3198 }
3199 }
3200 }
3201
3202 FREE_REG_SET (tmp);
3203 FREE_REG_SET (new_live_at_end);
3204
3205 if (blocks_out)
3206 {
3207 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3208 {
3209 basic_block bb = BASIC_BLOCK (i);
3210 FREE_REG_SET (bb->local_set);
3211 });
3212 }
3213 else
3214 {
3215 for (i = n_basic_blocks - 1; i >= 0; --i)
3216 {
3217 basic_block bb = BASIC_BLOCK (i);
3218 FREE_REG_SET (bb->local_set);
3219 }
3220 }
3221
3222 free (queue);
3223 }
3224 \f
3225 /* Subroutines of life analysis. */
3226
3227 /* Allocate the permanent data structures that represent the results
3228 of life analysis. Not static since used also for stupid life analysis. */
3229
3230 void
3231 allocate_bb_life_data ()
3232 {
3233 register int i;
3234
3235 for (i = 0; i < n_basic_blocks; i++)
3236 {
3237 basic_block bb = BASIC_BLOCK (i);
3238
3239 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3240 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3241 }
3242
3243 ENTRY_BLOCK_PTR->global_live_at_end
3244 = OBSTACK_ALLOC_REG_SET (function_obstack);
3245 EXIT_BLOCK_PTR->global_live_at_start
3246 = OBSTACK_ALLOC_REG_SET (function_obstack);
3247
3248 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3249 }
3250
3251 void
3252 allocate_reg_life_data ()
3253 {
3254 int i;
3255
3256 max_regno = max_reg_num ();
3257
3258 /* Recalculate the register space, in case it has grown. Old style
3259 vector oriented regsets would set regset_{size,bytes} here also. */
3260 allocate_reg_info (max_regno, FALSE, FALSE);
3261
3262 /* Reset all the data we'll collect in propagate_block and its
3263 subroutines. */
3264 for (i = 0; i < max_regno; i++)
3265 {
3266 REG_N_SETS (i) = 0;
3267 REG_N_REFS (i) = 0;
3268 REG_N_DEATHS (i) = 0;
3269 REG_N_CALLS_CROSSED (i) = 0;
3270 REG_LIVE_LENGTH (i) = 0;
3271 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3272 }
3273 }
3274
3275 /* Delete dead instructions for propagate_block. */
3276
3277 static void
3278 propagate_block_delete_insn (bb, insn)
3279 basic_block bb;
3280 rtx insn;
3281 {
3282 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3283
3284 /* If the insn referred to a label, and that label was attached to
3285 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3286 pretty much mandatory to delete it, because the ADDR_VEC may be
3287 referencing labels that no longer exist. */
3288
3289 if (inote)
3290 {
3291 rtx label = XEXP (inote, 0);
3292 rtx next;
3293
3294 if (LABEL_NUSES (label) == 1
3295 && (next = next_nonnote_insn (label)) != NULL
3296 && GET_CODE (next) == JUMP_INSN
3297 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3298 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3299 {
3300 rtx pat = PATTERN (next);
3301 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3302 int len = XVECLEN (pat, diff_vec_p);
3303 int i;
3304
3305 for (i = 0; i < len; i++)
3306 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3307
3308 flow_delete_insn (next);
3309 }
3310 }
3311
3312 if (bb->end == insn)
3313 bb->end = PREV_INSN (insn);
3314 flow_delete_insn (insn);
3315 }
3316
3317 /* Delete dead libcalls for propagate_block. Return the insn
3318 before the libcall. */
3319
3320 static rtx
3321 propagate_block_delete_libcall (bb, insn, note)
3322 basic_block bb;
3323 rtx insn, note;
3324 {
3325 rtx first = XEXP (note, 0);
3326 rtx before = PREV_INSN (first);
3327
3328 if (insn == bb->end)
3329 bb->end = before;
3330
3331 flow_delete_insn_chain (first, insn);
3332 return before;
3333 }
3334
3335 /* Update the life-status of regs for one insn. Return the previous insn. */
3336
3337 rtx
3338 propagate_one_insn (pbi, insn)
3339 struct propagate_block_info *pbi;
3340 rtx insn;
3341 {
3342 rtx prev = PREV_INSN (insn);
3343 int flags = pbi->flags;
3344 int insn_is_dead = 0;
3345 int libcall_is_dead = 0;
3346 rtx note;
3347 int i;
3348
3349 if (! INSN_P (insn))
3350 return prev;
3351
3352 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3353 if (flags & PROP_SCAN_DEAD_CODE)
3354 {
3355 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3356 REG_NOTES (insn));
3357 libcall_is_dead = (insn_is_dead && note != 0
3358 && libcall_dead_p (pbi, PATTERN (insn),
3359 note, insn));
3360 }
3361
3362 /* We almost certainly don't want to delete prologue or epilogue
3363 instructions. Warn about probable compiler losage. */
3364 if (insn_is_dead
3365 && reload_completed
3366 && (((HAVE_epilogue || HAVE_prologue)
3367 && prologue_epilogue_contains (insn))
3368 || (HAVE_sibcall_epilogue
3369 && sibcall_epilogue_contains (insn))))
3370 {
3371 if (flags & PROP_KILL_DEAD_CODE)
3372 {
3373 warning ("ICE: would have deleted prologue/epilogue insn");
3374 if (!inhibit_warnings)
3375 debug_rtx (insn);
3376 }
3377 libcall_is_dead = insn_is_dead = 0;
3378 }
3379
3380 /* If an instruction consists of just dead store(s) on final pass,
3381 delete it. */
3382 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3383 {
3384 /* Record sets. Do this even for dead instructions, since they
3385 would have killed the values if they hadn't been deleted. */
3386 mark_set_regs (pbi, PATTERN (insn), insn);
3387
3388 /* CC0 is now known to be dead. Either this insn used it,
3389 in which case it doesn't anymore, or clobbered it,
3390 so the next insn can't use it. */
3391 pbi->cc0_live = 0;
3392
3393 if (libcall_is_dead)
3394 {
3395 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3396 insn = NEXT_INSN (prev);
3397 }
3398 else
3399 propagate_block_delete_insn (pbi->bb, insn);
3400
3401 return prev;
3402 }
3403
3404 /* See if this is an increment or decrement that can be merged into
3405 a following memory address. */
3406 #ifdef AUTO_INC_DEC
3407 {
3408 register rtx x = single_set (insn);
3409
3410 /* Does this instruction increment or decrement a register? */
3411 if (!reload_completed
3412 && (flags & PROP_AUTOINC)
3413 && x != 0
3414 && GET_CODE (SET_DEST (x)) == REG
3415 && (GET_CODE (SET_SRC (x)) == PLUS
3416 || GET_CODE (SET_SRC (x)) == MINUS)
3417 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3418 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3419 /* Ok, look for a following memory ref we can combine with.
3420 If one is found, change the memory ref to a PRE_INC
3421 or PRE_DEC, cancel this insn, and return 1.
3422 Return 0 if nothing has been done. */
3423 && try_pre_increment_1 (pbi, insn))
3424 return prev;
3425 }
3426 #endif /* AUTO_INC_DEC */
3427
3428 CLEAR_REG_SET (pbi->new_set);
3429
3430 /* If this is not the final pass, and this insn is copying the value of
3431 a library call and it's dead, don't scan the insns that perform the
3432 library call, so that the call's arguments are not marked live. */
3433 if (libcall_is_dead)
3434 {
3435 /* Record the death of the dest reg. */
3436 mark_set_regs (pbi, PATTERN (insn), insn);
3437
3438 insn = XEXP (note, 0);
3439 return PREV_INSN (insn);
3440 }
3441 else if (GET_CODE (PATTERN (insn)) == SET
3442 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3443 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3444 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3445 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3446 /* We have an insn to pop a constant amount off the stack.
3447 (Such insns use PLUS regardless of the direction of the stack,
3448 and any insn to adjust the stack by a constant is always a pop.)
3449 These insns, if not dead stores, have no effect on life. */
3450 ;
3451 else
3452 {
3453 /* Any regs live at the time of a call instruction must not go
3454 in a register clobbered by calls. Find all regs now live and
3455 record this for them. */
3456
3457 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3458 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3459 { REG_N_CALLS_CROSSED (i)++; });
3460
3461 /* Record sets. Do this even for dead instructions, since they
3462 would have killed the values if they hadn't been deleted. */
3463 mark_set_regs (pbi, PATTERN (insn), insn);
3464
3465 if (GET_CODE (insn) == CALL_INSN)
3466 {
3467 register int i;
3468 rtx note, cond;
3469
3470 cond = NULL_RTX;
3471 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3472 cond = COND_EXEC_TEST (PATTERN (insn));
3473
3474 /* Non-constant calls clobber memory. */
3475 if (! CONST_CALL_P (insn))
3476 free_EXPR_LIST_list (&pbi->mem_set_list);
3477
3478 /* There may be extra registers to be clobbered. */
3479 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3480 note;
3481 note = XEXP (note, 1))
3482 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3483 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3484 cond, insn, pbi->flags);
3485
3486 /* Calls change all call-used and global registers. */
3487 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3488 if (call_used_regs[i] && ! global_regs[i]
3489 && ! fixed_regs[i])
3490 {
3491 /* We do not want REG_UNUSED notes for these registers. */
3492 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3493 cond, insn,
3494 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3495 }
3496 }
3497
3498 /* If an insn doesn't use CC0, it becomes dead since we assume
3499 that every insn clobbers it. So show it dead here;
3500 mark_used_regs will set it live if it is referenced. */
3501 pbi->cc0_live = 0;
3502
3503 /* Record uses. */
3504 if (! insn_is_dead)
3505 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3506
3507 /* Sometimes we may have inserted something before INSN (such as a move)
3508 when we make an auto-inc. So ensure we will scan those insns. */
3509 #ifdef AUTO_INC_DEC
3510 prev = PREV_INSN (insn);
3511 #endif
3512
3513 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3514 {
3515 register int i;
3516 rtx note, cond;
3517
3518 cond = NULL_RTX;
3519 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3520 cond = COND_EXEC_TEST (PATTERN (insn));
3521
3522 /* Calls use their arguments. */
3523 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3524 note;
3525 note = XEXP (note, 1))
3526 if (GET_CODE (XEXP (note, 0)) == USE)
3527 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3528 cond, insn);
3529
3530 /* The stack ptr is used (honorarily) by a CALL insn. */
3531 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3532
3533 /* Calls may also reference any of the global registers,
3534 so they are made live. */
3535 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3536 if (global_regs[i])
3537 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3538 cond, insn);
3539 }
3540 }
3541
3542 /* On final pass, update counts of how many insns in which each reg
3543 is live. */
3544 if (flags & PROP_REG_INFO)
3545 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3546 { REG_LIVE_LENGTH (i)++; });
3547
3548 return prev;
3549 }
3550
3551 /* Initialize a propagate_block_info struct for public consumption.
3552 Note that the structure itself is opaque to this file, but that
3553 the user can use the regsets provided here. */
3554
3555 struct propagate_block_info *
3556 init_propagate_block_info (bb, live, local_set, flags)
3557 basic_block bb;
3558 regset live;
3559 regset local_set;
3560 int flags;
3561 {
3562 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3563
3564 pbi->bb = bb;
3565 pbi->reg_live = live;
3566 pbi->mem_set_list = NULL_RTX;
3567 pbi->local_set = local_set;
3568 pbi->cc0_live = 0;
3569 pbi->flags = flags;
3570
3571 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3572 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3573 else
3574 pbi->reg_next_use = NULL;
3575
3576 pbi->new_set = BITMAP_XMALLOC ();
3577
3578 #ifdef HAVE_conditional_execution
3579 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3580 free_reg_cond_life_info);
3581 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3582
3583 /* If this block ends in a conditional branch, for each register live
3584 from one side of the branch and not the other, record the register
3585 as conditionally dead. */
3586 if (GET_CODE (bb->end) == JUMP_INSN
3587 && condjump_p (bb->end)
3588 && ! simplejump_p (bb->end))
3589 {
3590 regset_head diff_head;
3591 regset diff = INITIALIZE_REG_SET (diff_head);
3592 basic_block bb_true, bb_false;
3593 rtx cond_true, cond_false;
3594 int i;
3595
3596 /* Identify the successor blocks. */
3597 bb_true = bb->succ->dest;
3598 if (bb->succ->succ_next != NULL)
3599 {
3600 bb_false = bb->succ->succ_next->dest;
3601
3602 if (bb->succ->flags & EDGE_FALLTHRU)
3603 {
3604 basic_block t = bb_false;
3605 bb_false = bb_true;
3606 bb_true = t;
3607 }
3608 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3609 abort ();
3610 }
3611 else
3612 {
3613 /* This can happen with a conditional jump to the next insn. */
3614 if (JUMP_LABEL (bb->end) != bb_true->head)
3615 abort ();
3616
3617 /* Simplest way to do nothing. */
3618 bb_false = bb_true;
3619 }
3620
3621 /* Extract the condition from the branch. */
3622 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3623 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3624 GET_MODE (cond_true), XEXP (cond_true, 0),
3625 XEXP (cond_true, 1));
3626 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3627 {
3628 rtx t = cond_false;
3629 cond_false = cond_true;
3630 cond_true = t;
3631 }
3632
3633 /* Compute which register lead different lives in the successors. */
3634 if (bitmap_operation (diff, bb_true->global_live_at_start,
3635 bb_false->global_live_at_start, BITMAP_XOR))
3636 {
3637 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3638 abort ();
3639 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3640
3641 /* For each such register, mark it conditionally dead. */
3642 EXECUTE_IF_SET_IN_REG_SET
3643 (diff, 0, i,
3644 {
3645 struct reg_cond_life_info *rcli;
3646 rtx cond;
3647
3648 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3649
3650 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3651 cond = cond_false;
3652 else
3653 cond = cond_true;
3654 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3655
3656 splay_tree_insert (pbi->reg_cond_dead, i,
3657 (splay_tree_value) rcli);
3658 });
3659 }
3660
3661 FREE_REG_SET (diff);
3662 }
3663 #endif
3664
3665 return pbi;
3666 }
3667
3668 /* Release a propagate_block_info struct. */
3669
3670 void
3671 free_propagate_block_info (pbi)
3672 struct propagate_block_info *pbi;
3673 {
3674 free_EXPR_LIST_list (&pbi->mem_set_list);
3675
3676 BITMAP_XFREE (pbi->new_set);
3677
3678 #ifdef HAVE_conditional_execution
3679 splay_tree_delete (pbi->reg_cond_dead);
3680 BITMAP_XFREE (pbi->reg_cond_reg);
3681 #endif
3682
3683 if (pbi->reg_next_use)
3684 free (pbi->reg_next_use);
3685
3686 free (pbi);
3687 }
3688
3689 /* Compute the registers live at the beginning of a basic block BB from
3690 those live at the end.
3691
3692 When called, REG_LIVE contains those live at the end. On return, it
3693 contains those live at the beginning.
3694
3695 LOCAL_SET, if non-null, will be set with all registers killed by
3696 this basic block. */
3697
3698 void
3699 propagate_block (bb, live, local_set, flags)
3700 basic_block bb;
3701 regset live;
3702 regset local_set;
3703 int flags;
3704 {
3705 struct propagate_block_info *pbi;
3706 rtx insn, prev;
3707
3708 pbi = init_propagate_block_info (bb, live, local_set, flags);
3709
3710 if (flags & PROP_REG_INFO)
3711 {
3712 register int i;
3713
3714 /* Process the regs live at the end of the block.
3715 Mark them as not local to any one basic block. */
3716 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3717 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3718 }
3719
3720 /* Scan the block an insn at a time from end to beginning. */
3721
3722 for (insn = bb->end; ; insn = prev)
3723 {
3724 /* If this is a call to `setjmp' et al, warn if any
3725 non-volatile datum is live. */
3726 if ((flags & PROP_REG_INFO)
3727 && GET_CODE (insn) == NOTE
3728 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3729 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3730
3731 prev = propagate_one_insn (pbi, insn);
3732
3733 if (insn == bb->head)
3734 break;
3735 }
3736
3737 free_propagate_block_info (pbi);
3738 }
3739 \f
3740 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3741 (SET expressions whose destinations are registers dead after the insn).
3742 NEEDED is the regset that says which regs are alive after the insn.
3743
3744 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3745
3746 If X is the entire body of an insn, NOTES contains the reg notes
3747 pertaining to the insn. */
3748
3749 static int
3750 insn_dead_p (pbi, x, call_ok, notes)
3751 struct propagate_block_info *pbi;
3752 rtx x;
3753 int call_ok;
3754 rtx notes ATTRIBUTE_UNUSED;
3755 {
3756 enum rtx_code code = GET_CODE (x);
3757
3758 #ifdef AUTO_INC_DEC
3759 /* If flow is invoked after reload, we must take existing AUTO_INC
3760 expresions into account. */
3761 if (reload_completed)
3762 {
3763 for ( ; notes; notes = XEXP (notes, 1))
3764 {
3765 if (REG_NOTE_KIND (notes) == REG_INC)
3766 {
3767 int regno = REGNO (XEXP (notes, 0));
3768
3769 /* Don't delete insns to set global regs. */
3770 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3771 || REGNO_REG_SET_P (pbi->reg_live, regno))
3772 return 0;
3773 }
3774 }
3775 }
3776 #endif
3777
3778 /* If setting something that's a reg or part of one,
3779 see if that register's altered value will be live. */
3780
3781 if (code == SET)
3782 {
3783 rtx r = SET_DEST (x);
3784
3785 #ifdef HAVE_cc0
3786 if (GET_CODE (r) == CC0)
3787 return ! pbi->cc0_live;
3788 #endif
3789
3790 /* A SET that is a subroutine call cannot be dead. */
3791 if (GET_CODE (SET_SRC (x)) == CALL)
3792 {
3793 if (! call_ok)
3794 return 0;
3795 }
3796
3797 /* Don't eliminate loads from volatile memory or volatile asms. */
3798 else if (volatile_refs_p (SET_SRC (x)))
3799 return 0;
3800
3801 if (GET_CODE (r) == MEM)
3802 {
3803 rtx temp;
3804
3805 if (MEM_VOLATILE_P (r))
3806 return 0;
3807
3808 /* Walk the set of memory locations we are currently tracking
3809 and see if one is an identical match to this memory location.
3810 If so, this memory write is dead (remember, we're walking
3811 backwards from the end of the block to the start. */
3812 temp = pbi->mem_set_list;
3813 while (temp)
3814 {
3815 if (rtx_equal_p (XEXP (temp, 0), r))
3816 return 1;
3817 temp = XEXP (temp, 1);
3818 }
3819 }
3820 else
3821 {
3822 while (GET_CODE (r) == SUBREG
3823 || GET_CODE (r) == STRICT_LOW_PART
3824 || GET_CODE (r) == ZERO_EXTRACT)
3825 r = XEXP (r, 0);
3826
3827 if (GET_CODE (r) == REG)
3828 {
3829 int regno = REGNO (r);
3830
3831 /* Obvious. */
3832 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3833 return 0;
3834
3835 /* If this is a hard register, verify that subsequent
3836 words are not needed. */
3837 if (regno < FIRST_PSEUDO_REGISTER)
3838 {
3839 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3840
3841 while (--n > 0)
3842 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3843 return 0;
3844 }
3845
3846 /* Don't delete insns to set global regs. */
3847 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3848 return 0;
3849
3850 /* Make sure insns to set the stack pointer aren't deleted. */
3851 if (regno == STACK_POINTER_REGNUM)
3852 return 0;
3853
3854 /* Make sure insns to set the frame pointer aren't deleted. */
3855 if (regno == FRAME_POINTER_REGNUM
3856 && (! reload_completed || frame_pointer_needed))
3857 return 0;
3858 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3859 if (regno == HARD_FRAME_POINTER_REGNUM
3860 && (! reload_completed || frame_pointer_needed))
3861 return 0;
3862 #endif
3863
3864 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3865 /* Make sure insns to set arg pointer are never deleted
3866 (if the arg pointer isn't fixed, there will be a USE
3867 for it, so we can treat it normally). */
3868 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3869 return 0;
3870 #endif
3871
3872 /* Otherwise, the set is dead. */
3873 return 1;
3874 }
3875 }
3876 }
3877
3878 /* If performing several activities, insn is dead if each activity
3879 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3880 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3881 worth keeping. */
3882 else if (code == PARALLEL)
3883 {
3884 int i = XVECLEN (x, 0);
3885
3886 for (i--; i >= 0; i--)
3887 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3888 && GET_CODE (XVECEXP (x, 0, i)) != USE
3889 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3890 return 0;
3891
3892 return 1;
3893 }
3894
3895 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3896 is not necessarily true for hard registers. */
3897 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3898 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3899 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3900 return 1;
3901
3902 /* We do not check other CLOBBER or USE here. An insn consisting of just
3903 a CLOBBER or just a USE should not be deleted. */
3904 return 0;
3905 }
3906
3907 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3908 return 1 if the entire library call is dead.
3909 This is true if X copies a register (hard or pseudo)
3910 and if the hard return reg of the call insn is dead.
3911 (The caller should have tested the destination of X already for death.)
3912
3913 If this insn doesn't just copy a register, then we don't
3914 have an ordinary libcall. In that case, cse could not have
3915 managed to substitute the source for the dest later on,
3916 so we can assume the libcall is dead.
3917
3918 NEEDED is the bit vector of pseudoregs live before this insn.
3919 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3920
3921 static int
3922 libcall_dead_p (pbi, x, note, insn)
3923 struct propagate_block_info *pbi;
3924 rtx x;
3925 rtx note;
3926 rtx insn;
3927 {
3928 register RTX_CODE code = GET_CODE (x);
3929
3930 if (code == SET)
3931 {
3932 register rtx r = SET_SRC (x);
3933 if (GET_CODE (r) == REG)
3934 {
3935 rtx call = XEXP (note, 0);
3936 rtx call_pat;
3937 register int i;
3938
3939 /* Find the call insn. */
3940 while (call != insn && GET_CODE (call) != CALL_INSN)
3941 call = NEXT_INSN (call);
3942
3943 /* If there is none, do nothing special,
3944 since ordinary death handling can understand these insns. */
3945 if (call == insn)
3946 return 0;
3947
3948 /* See if the hard reg holding the value is dead.
3949 If this is a PARALLEL, find the call within it. */
3950 call_pat = PATTERN (call);
3951 if (GET_CODE (call_pat) == PARALLEL)
3952 {
3953 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3954 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3955 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3956 break;
3957
3958 /* This may be a library call that is returning a value
3959 via invisible pointer. Do nothing special, since
3960 ordinary death handling can understand these insns. */
3961 if (i < 0)
3962 return 0;
3963
3964 call_pat = XVECEXP (call_pat, 0, i);
3965 }
3966
3967 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3968 }
3969 }
3970 return 1;
3971 }
3972
3973 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3974 live at function entry. Don't count global register variables, variables
3975 in registers that can be used for function arg passing, or variables in
3976 fixed hard registers. */
3977
3978 int
3979 regno_uninitialized (regno)
3980 int regno;
3981 {
3982 if (n_basic_blocks == 0
3983 || (regno < FIRST_PSEUDO_REGISTER
3984 && (global_regs[regno]
3985 || fixed_regs[regno]
3986 || FUNCTION_ARG_REGNO_P (regno))))
3987 return 0;
3988
3989 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3990 }
3991
3992 /* 1 if register REGNO was alive at a place where `setjmp' was called
3993 and was set more than once or is an argument.
3994 Such regs may be clobbered by `longjmp'. */
3995
3996 int
3997 regno_clobbered_at_setjmp (regno)
3998 int regno;
3999 {
4000 if (n_basic_blocks == 0)
4001 return 0;
4002
4003 return ((REG_N_SETS (regno) > 1
4004 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4005 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4006 }
4007 \f
4008 /* INSN references memory, possibly using autoincrement addressing modes.
4009 Find any entries on the mem_set_list that need to be invalidated due
4010 to an address change. */
4011
4012 static void
4013 invalidate_mems_from_autoinc (pbi, insn)
4014 struct propagate_block_info *pbi;
4015 rtx insn;
4016 {
4017 rtx note = REG_NOTES (insn);
4018 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4019 {
4020 if (REG_NOTE_KIND (note) == REG_INC)
4021 {
4022 rtx temp = pbi->mem_set_list;
4023 rtx prev = NULL_RTX;
4024 rtx next;
4025
4026 while (temp)
4027 {
4028 next = XEXP (temp, 1);
4029 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4030 {
4031 /* Splice temp out of list. */
4032 if (prev)
4033 XEXP (prev, 1) = next;
4034 else
4035 pbi->mem_set_list = next;
4036 free_EXPR_LIST_node (temp);
4037 }
4038 else
4039 prev = temp;
4040 temp = next;
4041 }
4042 }
4043 }
4044 }
4045
4046 /* Process the registers that are set within X. Their bits are set to
4047 1 in the regset DEAD, because they are dead prior to this insn.
4048
4049 If INSN is nonzero, it is the insn being processed.
4050
4051 FLAGS is the set of operations to perform. */
4052
4053 static void
4054 mark_set_regs (pbi, x, insn)
4055 struct propagate_block_info *pbi;
4056 rtx x, insn;
4057 {
4058 rtx cond = NULL_RTX;
4059 enum rtx_code code;
4060
4061 retry:
4062 switch (code = GET_CODE (x))
4063 {
4064 case SET:
4065 case CLOBBER:
4066 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4067 return;
4068
4069 case COND_EXEC:
4070 cond = COND_EXEC_TEST (x);
4071 x = COND_EXEC_CODE (x);
4072 goto retry;
4073
4074 case PARALLEL:
4075 {
4076 register int i;
4077 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4078 {
4079 rtx sub = XVECEXP (x, 0, i);
4080 switch (code = GET_CODE (sub))
4081 {
4082 case COND_EXEC:
4083 if (cond != NULL_RTX)
4084 abort ();
4085
4086 cond = COND_EXEC_TEST (sub);
4087 sub = COND_EXEC_CODE (sub);
4088 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4089 break;
4090 /* FALLTHRU */
4091
4092 case SET:
4093 case CLOBBER:
4094 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4095 break;
4096
4097 default:
4098 break;
4099 }
4100 }
4101 break;
4102 }
4103
4104 default:
4105 break;
4106 }
4107 }
4108
4109 /* Process a single SET rtx, X. */
4110
4111 static void
4112 mark_set_1 (pbi, code, reg, cond, insn, flags)
4113 struct propagate_block_info *pbi;
4114 enum rtx_code code;
4115 rtx reg, cond, insn;
4116 int flags;
4117 {
4118 int regno_first = -1, regno_last = -1;
4119 int not_dead = 0;
4120 int i;
4121
4122 /* Some targets place small structures in registers for
4123 return values of functions. We have to detect this
4124 case specially here to get correct flow information. */
4125 if (GET_CODE (reg) == PARALLEL
4126 && GET_MODE (reg) == BLKmode)
4127 {
4128 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4129 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4130 return;
4131 }
4132
4133 /* Modifying just one hardware register of a multi-reg value or just a
4134 byte field of a register does not mean the value from before this insn
4135 is now dead. Of course, if it was dead after it's unused now. */
4136
4137 switch (GET_CODE (reg))
4138 {
4139 case ZERO_EXTRACT:
4140 case SIGN_EXTRACT:
4141 case STRICT_LOW_PART:
4142 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4143 do
4144 reg = XEXP (reg, 0);
4145 while (GET_CODE (reg) == SUBREG
4146 || GET_CODE (reg) == ZERO_EXTRACT
4147 || GET_CODE (reg) == SIGN_EXTRACT
4148 || GET_CODE (reg) == STRICT_LOW_PART);
4149 if (GET_CODE (reg) == MEM)
4150 break;
4151 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4152 /* FALLTHRU */
4153
4154 case REG:
4155 regno_last = regno_first = REGNO (reg);
4156 if (regno_first < FIRST_PSEUDO_REGISTER)
4157 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4158 break;
4159
4160 case SUBREG:
4161 if (GET_CODE (SUBREG_REG (reg)) == REG)
4162 {
4163 enum machine_mode outer_mode = GET_MODE (reg);
4164 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4165
4166 /* Identify the range of registers affected. This is moderately
4167 tricky for hard registers. See alter_subreg. */
4168
4169 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4170 if (regno_first < FIRST_PSEUDO_REGISTER)
4171 {
4172 #ifdef ALTER_HARD_SUBREG
4173 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4174 inner_mode, regno_first);
4175 #else
4176 regno_first += SUBREG_WORD (reg);
4177 #endif
4178 regno_last = (regno_first
4179 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4180
4181 /* Since we've just adjusted the register number ranges, make
4182 sure REG matches. Otherwise some_was_live will be clear
4183 when it shouldn't have been, and we'll create incorrect
4184 REG_UNUSED notes. */
4185 reg = gen_rtx_REG (outer_mode, regno_first);
4186 }
4187 else
4188 {
4189 /* If the number of words in the subreg is less than the number
4190 of words in the full register, we have a well-defined partial
4191 set. Otherwise the high bits are undefined.
4192
4193 This is only really applicable to pseudos, since we just took
4194 care of multi-word hard registers. */
4195 if (((GET_MODE_SIZE (outer_mode)
4196 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4197 < ((GET_MODE_SIZE (inner_mode)
4198 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4199 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4200
4201 reg = SUBREG_REG (reg);
4202 }
4203 }
4204 else
4205 reg = SUBREG_REG (reg);
4206 break;
4207
4208 default:
4209 break;
4210 }
4211
4212 /* If this set is a MEM, then it kills any aliased writes.
4213 If this set is a REG, then it kills any MEMs which use the reg. */
4214 if (flags & PROP_SCAN_DEAD_CODE)
4215 {
4216 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4217 {
4218 rtx temp = pbi->mem_set_list;
4219 rtx prev = NULL_RTX;
4220 rtx next;
4221
4222 while (temp)
4223 {
4224 next = XEXP (temp, 1);
4225 if ((GET_CODE (reg) == MEM
4226 && output_dependence (XEXP (temp, 0), reg))
4227 || (GET_CODE (reg) == REG
4228 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4229 {
4230 /* Splice this entry out of the list. */
4231 if (prev)
4232 XEXP (prev, 1) = next;
4233 else
4234 pbi->mem_set_list = next;
4235 free_EXPR_LIST_node (temp);
4236 }
4237 else
4238 prev = temp;
4239 temp = next;
4240 }
4241 }
4242
4243 /* If the memory reference had embedded side effects (autoincrement
4244 address modes. Then we may need to kill some entries on the
4245 memory set list. */
4246 if (insn && GET_CODE (reg) == MEM)
4247 invalidate_mems_from_autoinc (pbi, insn);
4248
4249 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4250 /* We do not know the size of a BLKmode store, so we do not track
4251 them for redundant store elimination. */
4252 && GET_MODE (reg) != BLKmode
4253 /* There are no REG_INC notes for SP, so we can't assume we'll see
4254 everything that invalidates it. To be safe, don't eliminate any
4255 stores though SP; none of them should be redundant anyway. */
4256 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4257 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4258 }
4259
4260 if (GET_CODE (reg) == REG
4261 && ! (regno_first == FRAME_POINTER_REGNUM
4262 && (! reload_completed || frame_pointer_needed))
4263 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4264 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4265 && (! reload_completed || frame_pointer_needed))
4266 #endif
4267 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4268 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4269 #endif
4270 )
4271 {
4272 int some_was_live = 0, some_was_dead = 0;
4273
4274 for (i = regno_first; i <= regno_last; ++i)
4275 {
4276 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4277 if (pbi->local_set)
4278 SET_REGNO_REG_SET (pbi->local_set, i);
4279 if (code != CLOBBER)
4280 SET_REGNO_REG_SET (pbi->new_set, i);
4281
4282 some_was_live |= needed_regno;
4283 some_was_dead |= ! needed_regno;
4284 }
4285
4286 #ifdef HAVE_conditional_execution
4287 /* Consider conditional death in deciding that the register needs
4288 a death note. */
4289 if (some_was_live && ! not_dead
4290 /* The stack pointer is never dead. Well, not strictly true,
4291 but it's very difficult to tell from here. Hopefully
4292 combine_stack_adjustments will fix up the most egregious
4293 errors. */
4294 && regno_first != STACK_POINTER_REGNUM)
4295 {
4296 for (i = regno_first; i <= regno_last; ++i)
4297 if (! mark_regno_cond_dead (pbi, i, cond))
4298 not_dead = 1;
4299 }
4300 #endif
4301
4302 /* Additional data to record if this is the final pass. */
4303 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4304 | PROP_DEATH_NOTES | PROP_AUTOINC))
4305 {
4306 register rtx y;
4307 register int blocknum = pbi->bb->index;
4308
4309 y = NULL_RTX;
4310 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4311 {
4312 y = pbi->reg_next_use[regno_first];
4313
4314 /* The next use is no longer next, since a store intervenes. */
4315 for (i = regno_first; i <= regno_last; ++i)
4316 pbi->reg_next_use[i] = 0;
4317 }
4318
4319 if (flags & PROP_REG_INFO)
4320 {
4321 for (i = regno_first; i <= regno_last; ++i)
4322 {
4323 /* Count (weighted) references, stores, etc. This counts a
4324 register twice if it is modified, but that is correct. */
4325 REG_N_SETS (i) += 1;
4326 REG_N_REFS (i) += (optimize_size ? 1
4327 : pbi->bb->loop_depth + 1);
4328
4329 /* The insns where a reg is live are normally counted
4330 elsewhere, but we want the count to include the insn
4331 where the reg is set, and the normal counting mechanism
4332 would not count it. */
4333 REG_LIVE_LENGTH (i) += 1;
4334 }
4335
4336 /* If this is a hard reg, record this function uses the reg. */
4337 if (regno_first < FIRST_PSEUDO_REGISTER)
4338 {
4339 for (i = regno_first; i <= regno_last; i++)
4340 regs_ever_live[i] = 1;
4341 }
4342 else
4343 {
4344 /* Keep track of which basic blocks each reg appears in. */
4345 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4346 REG_BASIC_BLOCK (regno_first) = blocknum;
4347 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4348 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4349 }
4350 }
4351
4352 if (! some_was_dead)
4353 {
4354 if (flags & PROP_LOG_LINKS)
4355 {
4356 /* Make a logical link from the next following insn
4357 that uses this register, back to this insn.
4358 The following insns have already been processed.
4359
4360 We don't build a LOG_LINK for hard registers containing
4361 in ASM_OPERANDs. If these registers get replaced,
4362 we might wind up changing the semantics of the insn,
4363 even if reload can make what appear to be valid
4364 assignments later. */
4365 if (y && (BLOCK_NUM (y) == blocknum)
4366 && (regno_first >= FIRST_PSEUDO_REGISTER
4367 || asm_noperands (PATTERN (y)) < 0))
4368 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4369 }
4370 }
4371 else if (not_dead)
4372 ;
4373 else if (! some_was_live)
4374 {
4375 if (flags & PROP_REG_INFO)
4376 REG_N_DEATHS (regno_first) += 1;
4377
4378 if (flags & PROP_DEATH_NOTES)
4379 {
4380 /* Note that dead stores have already been deleted
4381 when possible. If we get here, we have found a
4382 dead store that cannot be eliminated (because the
4383 same insn does something useful). Indicate this
4384 by marking the reg being set as dying here. */
4385 REG_NOTES (insn)
4386 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4387 }
4388 }
4389 else
4390 {
4391 if (flags & PROP_DEATH_NOTES)
4392 {
4393 /* This is a case where we have a multi-word hard register
4394 and some, but not all, of the words of the register are
4395 needed in subsequent insns. Write REG_UNUSED notes
4396 for those parts that were not needed. This case should
4397 be rare. */
4398
4399 for (i = regno_first; i <= regno_last; ++i)
4400 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4401 REG_NOTES (insn)
4402 = alloc_EXPR_LIST (REG_UNUSED,
4403 gen_rtx_REG (reg_raw_mode[i], i),
4404 REG_NOTES (insn));
4405 }
4406 }
4407 }
4408
4409 /* Mark the register as being dead. */
4410 if (some_was_live
4411 && ! not_dead
4412 /* The stack pointer is never dead. Well, not strictly true,
4413 but it's very difficult to tell from here. Hopefully
4414 combine_stack_adjustments will fix up the most egregious
4415 errors. */
4416 && regno_first != STACK_POINTER_REGNUM)
4417 {
4418 for (i = regno_first; i <= regno_last; ++i)
4419 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4420 }
4421 }
4422 else if (GET_CODE (reg) == REG)
4423 {
4424 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4425 pbi->reg_next_use[regno_first] = 0;
4426 }
4427
4428 /* If this is the last pass and this is a SCRATCH, show it will be dying
4429 here and count it. */
4430 else if (GET_CODE (reg) == SCRATCH)
4431 {
4432 if (flags & PROP_DEATH_NOTES)
4433 REG_NOTES (insn)
4434 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4435 }
4436 }
4437 \f
4438 #ifdef HAVE_conditional_execution
4439 /* Mark REGNO conditionally dead. Return true if the register is
4440 now unconditionally dead. */
4441
4442 static int
4443 mark_regno_cond_dead (pbi, regno, cond)
4444 struct propagate_block_info *pbi;
4445 int regno;
4446 rtx cond;
4447 {
4448 /* If this is a store to a predicate register, the value of the
4449 predicate is changing, we don't know that the predicate as seen
4450 before is the same as that seen after. Flush all dependant
4451 conditions from reg_cond_dead. This will make all such
4452 conditionally live registers unconditionally live. */
4453 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4454 flush_reg_cond_reg (pbi, regno);
4455
4456 /* If this is an unconditional store, remove any conditional
4457 life that may have existed. */
4458 if (cond == NULL_RTX)
4459 splay_tree_remove (pbi->reg_cond_dead, regno);
4460 else
4461 {
4462 splay_tree_node node;
4463 struct reg_cond_life_info *rcli;
4464 rtx ncond;
4465
4466 /* Otherwise this is a conditional set. Record that fact.
4467 It may have been conditionally used, or there may be a
4468 subsequent set with a complimentary condition. */
4469
4470 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4471 if (node == NULL)
4472 {
4473 /* The register was unconditionally live previously.
4474 Record the current condition as the condition under
4475 which it is dead. */
4476 rcli = (struct reg_cond_life_info *)
4477 xmalloc (sizeof (*rcli));
4478 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4479 splay_tree_insert (pbi->reg_cond_dead, regno,
4480 (splay_tree_value) rcli);
4481
4482 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4483 REGNO (XEXP (cond, 0)));
4484
4485 /* Not unconditionaly dead. */
4486 return 0;
4487 }
4488 else
4489 {
4490 /* The register was conditionally live previously.
4491 Add the new condition to the old. */
4492 rcli = (struct reg_cond_life_info *) node->value;
4493 ncond = rcli->condition;
4494 ncond = ior_reg_cond (ncond, cond);
4495
4496 /* If the register is now unconditionally dead,
4497 remove the entry in the splay_tree. */
4498 if (ncond == const1_rtx)
4499 splay_tree_remove (pbi->reg_cond_dead, regno);
4500 else
4501 {
4502 rcli->condition = ncond;
4503
4504 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4505 REGNO (XEXP (cond, 0)));
4506
4507 /* Not unconditionaly dead. */
4508 return 0;
4509 }
4510 }
4511 }
4512
4513 return 1;
4514 }
4515
4516 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4517
4518 static void
4519 free_reg_cond_life_info (value)
4520 splay_tree_value value;
4521 {
4522 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4523 free_EXPR_LIST_list (&rcli->condition);
4524 free (rcli);
4525 }
4526
4527 /* Helper function for flush_reg_cond_reg. */
4528
4529 static int
4530 flush_reg_cond_reg_1 (node, data)
4531 splay_tree_node node;
4532 void *data;
4533 {
4534 struct reg_cond_life_info *rcli;
4535 int *xdata = (int *) data;
4536 unsigned int regno = xdata[0];
4537 rtx c, *prev;
4538
4539 /* Don't need to search if last flushed value was farther on in
4540 the in-order traversal. */
4541 if (xdata[1] >= (int) node->key)
4542 return 0;
4543
4544 /* Splice out portions of the expression that refer to regno. */
4545 rcli = (struct reg_cond_life_info *) node->value;
4546 c = *(prev = &rcli->condition);
4547 while (c)
4548 {
4549 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4550 {
4551 rtx next = XEXP (c, 1);
4552 free_EXPR_LIST_node (c);
4553 c = *prev = next;
4554 }
4555 else
4556 c = *(prev = &XEXP (c, 1));
4557 }
4558
4559 /* If the entire condition is now NULL, signal the node to be removed. */
4560 if (! rcli->condition)
4561 {
4562 xdata[1] = node->key;
4563 return -1;
4564 }
4565 else
4566 return 0;
4567 }
4568
4569 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4570
4571 static void
4572 flush_reg_cond_reg (pbi, regno)
4573 struct propagate_block_info *pbi;
4574 int regno;
4575 {
4576 int pair[2];
4577
4578 pair[0] = regno;
4579 pair[1] = -1;
4580 while (splay_tree_foreach (pbi->reg_cond_dead,
4581 flush_reg_cond_reg_1, pair) == -1)
4582 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4583
4584 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4585 }
4586
4587 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4588 We actually use EXPR_LIST to chain the sub-expressions together
4589 instead of IOR because it's easier to manipulate and we have
4590 the lists.c functions to reuse nodes.
4591
4592 Return a new rtl expression as appropriate. */
4593
4594 static rtx
4595 ior_reg_cond (old, x)
4596 rtx old, x;
4597 {
4598 enum rtx_code x_code;
4599 rtx x_reg;
4600 rtx c;
4601
4602 /* We expect these conditions to be of the form (eq reg 0). */
4603 x_code = GET_CODE (x);
4604 if (GET_RTX_CLASS (x_code) != '<'
4605 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4606 || XEXP (x, 1) != const0_rtx)
4607 abort ();
4608
4609 /* Search the expression for an existing sub-expression of X_REG. */
4610 for (c = old; c ; c = XEXP (c, 1))
4611 {
4612 rtx y = XEXP (c, 0);
4613 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4614 {
4615 /* If we find X already present in OLD, we need do nothing. */
4616 if (GET_CODE (y) == x_code)
4617 return old;
4618
4619 /* If we find X being a compliment of a condition in OLD,
4620 then the entire condition is true. */
4621 if (GET_CODE (y) == reverse_condition (x_code))
4622 return const1_rtx;
4623 }
4624 }
4625
4626 /* Otherwise just add to the chain. */
4627 return alloc_EXPR_LIST (0, x, old);
4628 }
4629
4630 static rtx
4631 not_reg_cond (x)
4632 rtx x;
4633 {
4634 enum rtx_code x_code;
4635 rtx x_reg;
4636
4637 /* We expect these conditions to be of the form (eq reg 0). */
4638 x_code = GET_CODE (x);
4639 if (GET_RTX_CLASS (x_code) != '<'
4640 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4641 || XEXP (x, 1) != const0_rtx)
4642 abort ();
4643
4644 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4645 VOIDmode, x_reg, const0_rtx),
4646 NULL_RTX);
4647 }
4648
4649 static rtx
4650 nand_reg_cond (old, x)
4651 rtx old, x;
4652 {
4653 enum rtx_code x_code;
4654 rtx x_reg;
4655 rtx c, *prev;
4656
4657 /* We expect these conditions to be of the form (eq reg 0). */
4658 x_code = GET_CODE (x);
4659 if (GET_RTX_CLASS (x_code) != '<'
4660 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4661 || XEXP (x, 1) != const0_rtx)
4662 abort ();
4663
4664 /* Search the expression for an existing sub-expression of X_REG. */
4665
4666 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4667 {
4668 rtx y = XEXP (c, 0);
4669 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4670 {
4671 /* If we find X already present in OLD, then we need to
4672 splice it out. */
4673 if (GET_CODE (y) == x_code)
4674 {
4675 *prev = XEXP (c, 1);
4676 free_EXPR_LIST_node (c);
4677 return old ? old : const0_rtx;
4678 }
4679
4680 /* If we find X being a compliment of a condition in OLD,
4681 then we need do nothing. */
4682 if (GET_CODE (y) == reverse_condition (x_code))
4683 return old;
4684 }
4685 }
4686
4687 /* Otherwise, by implication, the register in question is now live for
4688 the inverse of the condition X. */
4689 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4690 VOIDmode, x_reg, const0_rtx),
4691 old);
4692 }
4693 #endif /* HAVE_conditional_execution */
4694 \f
4695 #ifdef AUTO_INC_DEC
4696
4697 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4698 reference. */
4699
4700 static void
4701 find_auto_inc (pbi, x, insn)
4702 struct propagate_block_info *pbi;
4703 rtx x;
4704 rtx insn;
4705 {
4706 rtx addr = XEXP (x, 0);
4707 HOST_WIDE_INT offset = 0;
4708 rtx set;
4709
4710 /* Here we detect use of an index register which might be good for
4711 postincrement, postdecrement, preincrement, or predecrement. */
4712
4713 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4714 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4715
4716 if (GET_CODE (addr) == REG)
4717 {
4718 register rtx y;
4719 register int size = GET_MODE_SIZE (GET_MODE (x));
4720 rtx use;
4721 rtx incr;
4722 int regno = REGNO (addr);
4723
4724 /* Is the next use an increment that might make auto-increment? */
4725 if ((incr = pbi->reg_next_use[regno]) != 0
4726 && (set = single_set (incr)) != 0
4727 && GET_CODE (set) == SET
4728 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4729 /* Can't add side effects to jumps; if reg is spilled and
4730 reloaded, there's no way to store back the altered value. */
4731 && GET_CODE (insn) != JUMP_INSN
4732 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4733 && XEXP (y, 0) == addr
4734 && GET_CODE (XEXP (y, 1)) == CONST_INT
4735 && ((HAVE_POST_INCREMENT
4736 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4737 || (HAVE_POST_DECREMENT
4738 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4739 || (HAVE_PRE_INCREMENT
4740 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4741 || (HAVE_PRE_DECREMENT
4742 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4743 /* Make sure this reg appears only once in this insn. */
4744 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4745 use != 0 && use != (rtx) 1))
4746 {
4747 rtx q = SET_DEST (set);
4748 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4749 ? (offset ? PRE_INC : POST_INC)
4750 : (offset ? PRE_DEC : POST_DEC));
4751
4752 if (dead_or_set_p (incr, addr)
4753 /* Mustn't autoinc an eliminable register. */
4754 && (regno >= FIRST_PSEUDO_REGISTER
4755 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4756 {
4757 /* This is the simple case. Try to make the auto-inc. If
4758 we can't, we are done. Otherwise, we will do any
4759 needed updates below. */
4760 if (! validate_change (insn, &XEXP (x, 0),
4761 gen_rtx_fmt_e (inc_code, Pmode, addr),
4762 0))
4763 return;
4764 }
4765 else if (GET_CODE (q) == REG
4766 /* PREV_INSN used here to check the semi-open interval
4767 [insn,incr). */
4768 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4769 /* We must also check for sets of q as q may be
4770 a call clobbered hard register and there may
4771 be a call between PREV_INSN (insn) and incr. */
4772 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4773 {
4774 /* We have *p followed sometime later by q = p+size.
4775 Both p and q must be live afterward,
4776 and q is not used between INSN and its assignment.
4777 Change it to q = p, ...*q..., q = q+size.
4778 Then fall into the usual case. */
4779 rtx insns, temp;
4780
4781 start_sequence ();
4782 emit_move_insn (q, addr);
4783 insns = get_insns ();
4784 end_sequence ();
4785
4786 if (basic_block_for_insn)
4787 for (temp = insns; temp; temp = NEXT_INSN (temp))
4788 set_block_for_insn (temp, pbi->bb);
4789
4790 /* If we can't make the auto-inc, or can't make the
4791 replacement into Y, exit. There's no point in making
4792 the change below if we can't do the auto-inc and doing
4793 so is not correct in the pre-inc case. */
4794
4795 validate_change (insn, &XEXP (x, 0),
4796 gen_rtx_fmt_e (inc_code, Pmode, q),
4797 1);
4798 validate_change (incr, &XEXP (y, 0), q, 1);
4799 if (! apply_change_group ())
4800 return;
4801
4802 /* We now know we'll be doing this change, so emit the
4803 new insn(s) and do the updates. */
4804 emit_insns_before (insns, insn);
4805
4806 if (pbi->bb->head == insn)
4807 pbi->bb->head = insns;
4808
4809 /* INCR will become a NOTE and INSN won't contain a
4810 use of ADDR. If a use of ADDR was just placed in
4811 the insn before INSN, make that the next use.
4812 Otherwise, invalidate it. */
4813 if (GET_CODE (PREV_INSN (insn)) == INSN
4814 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4815 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4816 pbi->reg_next_use[regno] = PREV_INSN (insn);
4817 else
4818 pbi->reg_next_use[regno] = 0;
4819
4820 addr = q;
4821 regno = REGNO (q);
4822
4823 /* REGNO is now used in INCR which is below INSN, but it
4824 previously wasn't live here. If we don't mark it as
4825 live, we'll put a REG_DEAD note for it on this insn,
4826 which is incorrect. */
4827 SET_REGNO_REG_SET (pbi->reg_live, regno);
4828
4829 /* If there are any calls between INSN and INCR, show
4830 that REGNO now crosses them. */
4831 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4832 if (GET_CODE (temp) == CALL_INSN)
4833 REG_N_CALLS_CROSSED (regno)++;
4834 }
4835 else
4836 return;
4837
4838 /* If we haven't returned, it means we were able to make the
4839 auto-inc, so update the status. First, record that this insn
4840 has an implicit side effect. */
4841
4842 REG_NOTES (insn)
4843 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4844
4845 /* Modify the old increment-insn to simply copy
4846 the already-incremented value of our register. */
4847 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4848 abort ();
4849
4850 /* If that makes it a no-op (copying the register into itself) delete
4851 it so it won't appear to be a "use" and a "set" of this
4852 register. */
4853 if (SET_DEST (set) == addr)
4854 {
4855 /* If the original source was dead, it's dead now. */
4856 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4857 if (note && XEXP (note, 0) != addr)
4858 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4859
4860 PUT_CODE (incr, NOTE);
4861 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4862 NOTE_SOURCE_FILE (incr) = 0;
4863 }
4864
4865 if (regno >= FIRST_PSEUDO_REGISTER)
4866 {
4867 /* Count an extra reference to the reg. When a reg is
4868 incremented, spilling it is worse, so we want to make
4869 that less likely. */
4870 REG_N_REFS (regno) += (optimize_size ? 1
4871 : pbi->bb->loop_depth + 1);
4872
4873 /* Count the increment as a setting of the register,
4874 even though it isn't a SET in rtl. */
4875 REG_N_SETS (regno)++;
4876 }
4877 }
4878 }
4879 }
4880 #endif /* AUTO_INC_DEC */
4881 \f
4882 static void
4883 mark_used_reg (pbi, reg, cond, insn)
4884 struct propagate_block_info *pbi;
4885 rtx reg;
4886 rtx cond ATTRIBUTE_UNUSED;
4887 rtx insn;
4888 {
4889 int regno = REGNO (reg);
4890 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4891 int some_was_dead = ! some_was_live;
4892 int some_not_set;
4893 int n;
4894
4895 /* A hard reg in a wide mode may really be multiple registers.
4896 If so, mark all of them just like the first. */
4897 if (regno < FIRST_PSEUDO_REGISTER)
4898 {
4899 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4900 while (--n > 0)
4901 {
4902 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4903 some_was_live |= needed_regno;
4904 some_was_dead |= ! needed_regno;
4905 }
4906 }
4907
4908 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4909 {
4910 /* Record where each reg is used, so when the reg is set we know
4911 the next insn that uses it. */
4912 pbi->reg_next_use[regno] = insn;
4913 }
4914
4915 if (pbi->flags & PROP_REG_INFO)
4916 {
4917 if (regno < FIRST_PSEUDO_REGISTER)
4918 {
4919 /* If this is a register we are going to try to eliminate,
4920 don't mark it live here. If we are successful in
4921 eliminating it, it need not be live unless it is used for
4922 pseudos, in which case it will have been set live when it
4923 was allocated to the pseudos. If the register will not
4924 be eliminated, reload will set it live at that point.
4925
4926 Otherwise, record that this function uses this register. */
4927 /* ??? The PPC backend tries to "eliminate" on the pic
4928 register to itself. This should be fixed. In the mean
4929 time, hack around it. */
4930
4931 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4932 && (regno == FRAME_POINTER_REGNUM
4933 || regno == ARG_POINTER_REGNUM)))
4934 {
4935 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4936 do
4937 regs_ever_live[regno + --n] = 1;
4938 while (n > 0);
4939 }
4940 }
4941 else
4942 {
4943 /* Keep track of which basic block each reg appears in. */
4944
4945 register int blocknum = pbi->bb->index;
4946 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4947 REG_BASIC_BLOCK (regno) = blocknum;
4948 else if (REG_BASIC_BLOCK (regno) != blocknum)
4949 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4950
4951 /* Count (weighted) number of uses of each reg. */
4952 REG_N_REFS (regno) += (optimize_size ? 1
4953 : pbi->bb->loop_depth + 1);
4954 }
4955 }
4956
4957 /* Find out if any of the register was set this insn. */
4958 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
4959 if (regno < FIRST_PSEUDO_REGISTER)
4960 {
4961 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4962 while (--n > 0)
4963 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
4964 }
4965
4966 /* Record and count the insns in which a reg dies. If it is used in
4967 this insn and was dead below the insn then it dies in this insn.
4968 If it was set in this insn, we do not make a REG_DEAD note;
4969 likewise if we already made such a note. */
4970 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
4971 && some_was_dead
4972 && some_not_set)
4973 {
4974 /* Check for the case where the register dying partially
4975 overlaps the register set by this insn. */
4976 if (regno < FIRST_PSEUDO_REGISTER
4977 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4978 {
4979 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4980 while (--n >= 0)
4981 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
4982 }
4983
4984 /* If none of the words in X is needed, make a REG_DEAD note.
4985 Otherwise, we must make partial REG_DEAD notes. */
4986 if (! some_was_live)
4987 {
4988 if ((pbi->flags & PROP_DEATH_NOTES)
4989 && ! find_regno_note (insn, REG_DEAD, regno))
4990 REG_NOTES (insn)
4991 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4992
4993 if (pbi->flags & PROP_REG_INFO)
4994 REG_N_DEATHS (regno)++;
4995 }
4996 else
4997 {
4998 /* Don't make a REG_DEAD note for a part of a register
4999 that is set in the insn. */
5000
5001 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5002 for (; n >= regno; n--)
5003 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5004 && ! dead_or_set_regno_p (insn, n))
5005 REG_NOTES (insn)
5006 = alloc_EXPR_LIST (REG_DEAD,
5007 gen_rtx_REG (reg_raw_mode[n], n),
5008 REG_NOTES (insn));
5009 }
5010 }
5011
5012 SET_REGNO_REG_SET (pbi->reg_live, regno);
5013 if (regno < FIRST_PSEUDO_REGISTER)
5014 {
5015 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5016 while (--n > 0)
5017 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5018 }
5019
5020 #ifdef HAVE_conditional_execution
5021 /* If this is a conditional use, record that fact. If it is later
5022 conditionally set, we'll know to kill the register. */
5023 if (cond != NULL_RTX)
5024 {
5025 splay_tree_node node;
5026 struct reg_cond_life_info *rcli;
5027 rtx ncond;
5028
5029 if (some_was_live)
5030 {
5031 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5032 if (node == NULL)
5033 {
5034 /* The register was unconditionally live previously.
5035 No need to do anything. */
5036 }
5037 else
5038 {
5039 /* The register was conditionally live previously.
5040 Subtract the new life cond from the old death cond. */
5041 rcli = (struct reg_cond_life_info *) node->value;
5042 ncond = rcli->condition;
5043 ncond = nand_reg_cond (ncond, cond);
5044
5045 /* If the register is now unconditionally live, remove the
5046 entry in the splay_tree. */
5047 if (ncond == const0_rtx)
5048 {
5049 rcli->condition = NULL_RTX;
5050 splay_tree_remove (pbi->reg_cond_dead, regno);
5051 }
5052 else
5053 rcli->condition = ncond;
5054 }
5055 }
5056 else
5057 {
5058 /* The register was not previously live at all. Record
5059 the condition under which it is still dead. */
5060 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5061 rcli->condition = not_reg_cond (cond);
5062 splay_tree_insert (pbi->reg_cond_dead, regno,
5063 (splay_tree_value) rcli);
5064 }
5065 }
5066 #endif
5067 }
5068
5069 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5070 This is done assuming the registers needed from X are those that
5071 have 1-bits in PBI->REG_LIVE.
5072
5073 INSN is the containing instruction. If INSN is dead, this function
5074 is not called. */
5075
5076 static void
5077 mark_used_regs (pbi, x, cond, insn)
5078 struct propagate_block_info *pbi;
5079 rtx x, cond, insn;
5080 {
5081 register RTX_CODE code;
5082 register int regno;
5083 int flags = pbi->flags;
5084
5085 retry:
5086 code = GET_CODE (x);
5087 switch (code)
5088 {
5089 case LABEL_REF:
5090 case SYMBOL_REF:
5091 case CONST_INT:
5092 case CONST:
5093 case CONST_DOUBLE:
5094 case PC:
5095 case ADDR_VEC:
5096 case ADDR_DIFF_VEC:
5097 return;
5098
5099 #ifdef HAVE_cc0
5100 case CC0:
5101 pbi->cc0_live = 1;
5102 return;
5103 #endif
5104
5105 case CLOBBER:
5106 /* If we are clobbering a MEM, mark any registers inside the address
5107 as being used. */
5108 if (GET_CODE (XEXP (x, 0)) == MEM)
5109 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5110 return;
5111
5112 case MEM:
5113 /* Don't bother watching stores to mems if this is not the
5114 final pass. We'll not be deleting dead stores this round. */
5115 if (flags & PROP_SCAN_DEAD_CODE)
5116 {
5117 /* Invalidate the data for the last MEM stored, but only if MEM is
5118 something that can be stored into. */
5119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5121 ; /* needn't clear the memory set list */
5122 else
5123 {
5124 rtx temp = pbi->mem_set_list;
5125 rtx prev = NULL_RTX;
5126 rtx next;
5127
5128 while (temp)
5129 {
5130 next = XEXP (temp, 1);
5131 if (anti_dependence (XEXP (temp, 0), x))
5132 {
5133 /* Splice temp out of the list. */
5134 if (prev)
5135 XEXP (prev, 1) = next;
5136 else
5137 pbi->mem_set_list = next;
5138 free_EXPR_LIST_node (temp);
5139 }
5140 else
5141 prev = temp;
5142 temp = next;
5143 }
5144 }
5145
5146 /* If the memory reference had embedded side effects (autoincrement
5147 address modes. Then we may need to kill some entries on the
5148 memory set list. */
5149 if (insn)
5150 invalidate_mems_from_autoinc (pbi, insn);
5151 }
5152
5153 #ifdef AUTO_INC_DEC
5154 if (flags & PROP_AUTOINC)
5155 find_auto_inc (pbi, x, insn);
5156 #endif
5157 break;
5158
5159 case SUBREG:
5160 if (GET_CODE (SUBREG_REG (x)) == REG
5161 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5162 && (GET_MODE_SIZE (GET_MODE (x))
5163 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
5164 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
5165
5166 /* While we're here, optimize this case. */
5167 x = SUBREG_REG (x);
5168 if (GET_CODE (x) != REG)
5169 goto retry;
5170 /* FALLTHRU */
5171
5172 case REG:
5173 /* See a register other than being set => mark it as needed. */
5174 mark_used_reg (pbi, x, cond, insn);
5175 return;
5176
5177 case SET:
5178 {
5179 register rtx testreg = SET_DEST (x);
5180 int mark_dest = 0;
5181
5182 /* If storing into MEM, don't show it as being used. But do
5183 show the address as being used. */
5184 if (GET_CODE (testreg) == MEM)
5185 {
5186 #ifdef AUTO_INC_DEC
5187 if (flags & PROP_AUTOINC)
5188 find_auto_inc (pbi, testreg, insn);
5189 #endif
5190 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5191 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5192 return;
5193 }
5194
5195 /* Storing in STRICT_LOW_PART is like storing in a reg
5196 in that this SET might be dead, so ignore it in TESTREG.
5197 but in some other ways it is like using the reg.
5198
5199 Storing in a SUBREG or a bit field is like storing the entire
5200 register in that if the register's value is not used
5201 then this SET is not needed. */
5202 while (GET_CODE (testreg) == STRICT_LOW_PART
5203 || GET_CODE (testreg) == ZERO_EXTRACT
5204 || GET_CODE (testreg) == SIGN_EXTRACT
5205 || GET_CODE (testreg) == SUBREG)
5206 {
5207 if (GET_CODE (testreg) == SUBREG
5208 && GET_CODE (SUBREG_REG (testreg)) == REG
5209 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5210 && (GET_MODE_SIZE (GET_MODE (testreg))
5211 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
5212 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
5213
5214 /* Modifying a single register in an alternate mode
5215 does not use any of the old value. But these other
5216 ways of storing in a register do use the old value. */
5217 if (GET_CODE (testreg) == SUBREG
5218 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5219 ;
5220 else
5221 mark_dest = 1;
5222
5223 testreg = XEXP (testreg, 0);
5224 }
5225
5226 /* If this is a store into a register, recursively scan the
5227 value being stored. */
5228
5229 if ((GET_CODE (testreg) == PARALLEL
5230 && GET_MODE (testreg) == BLKmode)
5231 || (GET_CODE (testreg) == REG
5232 && (regno = REGNO (testreg),
5233 ! (regno == FRAME_POINTER_REGNUM
5234 && (! reload_completed || frame_pointer_needed)))
5235 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5236 && ! (regno == HARD_FRAME_POINTER_REGNUM
5237 && (! reload_completed || frame_pointer_needed))
5238 #endif
5239 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5240 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5241 #endif
5242 ))
5243 {
5244 if (mark_dest)
5245 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5246 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5247 return;
5248 }
5249 }
5250 break;
5251
5252 case ASM_OPERANDS:
5253 case UNSPEC_VOLATILE:
5254 case TRAP_IF:
5255 case ASM_INPUT:
5256 {
5257 /* Traditional and volatile asm instructions must be considered to use
5258 and clobber all hard registers, all pseudo-registers and all of
5259 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5260
5261 Consider for instance a volatile asm that changes the fpu rounding
5262 mode. An insn should not be moved across this even if it only uses
5263 pseudo-regs because it might give an incorrectly rounded result.
5264
5265 ?!? Unfortunately, marking all hard registers as live causes massive
5266 problems for the register allocator and marking all pseudos as live
5267 creates mountains of uninitialized variable warnings.
5268
5269 So for now, just clear the memory set list and mark any regs
5270 we can find in ASM_OPERANDS as used. */
5271 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5272 free_EXPR_LIST_list (&pbi->mem_set_list);
5273
5274 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5275 We can not just fall through here since then we would be confused
5276 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5277 traditional asms unlike their normal usage. */
5278 if (code == ASM_OPERANDS)
5279 {
5280 int j;
5281
5282 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5283 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5284 }
5285 break;
5286 }
5287
5288 case COND_EXEC:
5289 if (cond != NULL_RTX)
5290 abort ();
5291
5292 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5293
5294 cond = COND_EXEC_TEST (x);
5295 x = COND_EXEC_CODE (x);
5296 goto retry;
5297
5298 case PHI:
5299 /* We _do_not_ want to scan operands of phi nodes. Operands of
5300 a phi function are evaluated only when control reaches this
5301 block along a particular edge. Therefore, regs that appear
5302 as arguments to phi should not be added to the global live at
5303 start. */
5304 return;
5305
5306 default:
5307 break;
5308 }
5309
5310 /* Recursively scan the operands of this expression. */
5311
5312 {
5313 register const char *fmt = GET_RTX_FORMAT (code);
5314 register int i;
5315
5316 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5317 {
5318 if (fmt[i] == 'e')
5319 {
5320 /* Tail recursive case: save a function call level. */
5321 if (i == 0)
5322 {
5323 x = XEXP (x, 0);
5324 goto retry;
5325 }
5326 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5327 }
5328 else if (fmt[i] == 'E')
5329 {
5330 register int j;
5331 for (j = 0; j < XVECLEN (x, i); j++)
5332 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5333 }
5334 }
5335 }
5336 }
5337 \f
5338 #ifdef AUTO_INC_DEC
5339
5340 static int
5341 try_pre_increment_1 (pbi, insn)
5342 struct propagate_block_info *pbi;
5343 rtx insn;
5344 {
5345 /* Find the next use of this reg. If in same basic block,
5346 make it do pre-increment or pre-decrement if appropriate. */
5347 rtx x = single_set (insn);
5348 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5349 * INTVAL (XEXP (SET_SRC (x), 1)));
5350 int regno = REGNO (SET_DEST (x));
5351 rtx y = pbi->reg_next_use[regno];
5352 if (y != 0
5353 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5354 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5355 mode would be better. */
5356 && ! dead_or_set_p (y, SET_DEST (x))
5357 && try_pre_increment (y, SET_DEST (x), amount))
5358 {
5359 /* We have found a suitable auto-increment
5360 and already changed insn Y to do it.
5361 So flush this increment-instruction. */
5362 PUT_CODE (insn, NOTE);
5363 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5364 NOTE_SOURCE_FILE (insn) = 0;
5365 /* Count a reference to this reg for the increment
5366 insn we are deleting. When a reg is incremented.
5367 spilling it is worse, so we want to make that
5368 less likely. */
5369 if (regno >= FIRST_PSEUDO_REGISTER)
5370 {
5371 REG_N_REFS (regno) += (optimize_size ? 1
5372 : pbi->bb->loop_depth + 1);
5373 REG_N_SETS (regno)++;
5374 }
5375 return 1;
5376 }
5377 return 0;
5378 }
5379
5380 /* Try to change INSN so that it does pre-increment or pre-decrement
5381 addressing on register REG in order to add AMOUNT to REG.
5382 AMOUNT is negative for pre-decrement.
5383 Returns 1 if the change could be made.
5384 This checks all about the validity of the result of modifying INSN. */
5385
5386 static int
5387 try_pre_increment (insn, reg, amount)
5388 rtx insn, reg;
5389 HOST_WIDE_INT amount;
5390 {
5391 register rtx use;
5392
5393 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5394 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5395 int pre_ok = 0;
5396 /* Nonzero if we can try to make a post-increment or post-decrement.
5397 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5398 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5399 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5400 int post_ok = 0;
5401
5402 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5403 int do_post = 0;
5404
5405 /* From the sign of increment, see which possibilities are conceivable
5406 on this target machine. */
5407 if (HAVE_PRE_INCREMENT && amount > 0)
5408 pre_ok = 1;
5409 if (HAVE_POST_INCREMENT && amount > 0)
5410 post_ok = 1;
5411
5412 if (HAVE_PRE_DECREMENT && amount < 0)
5413 pre_ok = 1;
5414 if (HAVE_POST_DECREMENT && amount < 0)
5415 post_ok = 1;
5416
5417 if (! (pre_ok || post_ok))
5418 return 0;
5419
5420 /* It is not safe to add a side effect to a jump insn
5421 because if the incremented register is spilled and must be reloaded
5422 there would be no way to store the incremented value back in memory. */
5423
5424 if (GET_CODE (insn) == JUMP_INSN)
5425 return 0;
5426
5427 use = 0;
5428 if (pre_ok)
5429 use = find_use_as_address (PATTERN (insn), reg, 0);
5430 if (post_ok && (use == 0 || use == (rtx) 1))
5431 {
5432 use = find_use_as_address (PATTERN (insn), reg, -amount);
5433 do_post = 1;
5434 }
5435
5436 if (use == 0 || use == (rtx) 1)
5437 return 0;
5438
5439 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5440 return 0;
5441
5442 /* See if this combination of instruction and addressing mode exists. */
5443 if (! validate_change (insn, &XEXP (use, 0),
5444 gen_rtx_fmt_e (amount > 0
5445 ? (do_post ? POST_INC : PRE_INC)
5446 : (do_post ? POST_DEC : PRE_DEC),
5447 Pmode, reg), 0))
5448 return 0;
5449
5450 /* Record that this insn now has an implicit side effect on X. */
5451 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5452 return 1;
5453 }
5454
5455 #endif /* AUTO_INC_DEC */
5456 \f
5457 /* Find the place in the rtx X where REG is used as a memory address.
5458 Return the MEM rtx that so uses it.
5459 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5460 (plus REG (const_int PLUSCONST)).
5461
5462 If such an address does not appear, return 0.
5463 If REG appears more than once, or is used other than in such an address,
5464 return (rtx)1. */
5465
5466 rtx
5467 find_use_as_address (x, reg, plusconst)
5468 register rtx x;
5469 rtx reg;
5470 HOST_WIDE_INT plusconst;
5471 {
5472 enum rtx_code code = GET_CODE (x);
5473 const char *fmt = GET_RTX_FORMAT (code);
5474 register int i;
5475 register rtx value = 0;
5476 register rtx tem;
5477
5478 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5479 return x;
5480
5481 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5482 && XEXP (XEXP (x, 0), 0) == reg
5483 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5484 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5485 return x;
5486
5487 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5488 {
5489 /* If REG occurs inside a MEM used in a bit-field reference,
5490 that is unacceptable. */
5491 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5492 return (rtx) (HOST_WIDE_INT) 1;
5493 }
5494
5495 if (x == reg)
5496 return (rtx) (HOST_WIDE_INT) 1;
5497
5498 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5499 {
5500 if (fmt[i] == 'e')
5501 {
5502 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5503 if (value == 0)
5504 value = tem;
5505 else if (tem != 0)
5506 return (rtx) (HOST_WIDE_INT) 1;
5507 }
5508 else if (fmt[i] == 'E')
5509 {
5510 register int j;
5511 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5512 {
5513 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5514 if (value == 0)
5515 value = tem;
5516 else if (tem != 0)
5517 return (rtx) (HOST_WIDE_INT) 1;
5518 }
5519 }
5520 }
5521
5522 return value;
5523 }
5524 \f
5525 /* Write information about registers and basic blocks into FILE.
5526 This is part of making a debugging dump. */
5527
5528 void
5529 dump_regset (r, outf)
5530 regset r;
5531 FILE *outf;
5532 {
5533 int i;
5534 if (r == NULL)
5535 {
5536 fputs (" (nil)", outf);
5537 return;
5538 }
5539
5540 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5541 {
5542 fprintf (outf, " %d", i);
5543 if (i < FIRST_PSEUDO_REGISTER)
5544 fprintf (outf, " [%s]",
5545 reg_names[i]);
5546 });
5547 }
5548
5549 void
5550 debug_regset (r)
5551 regset r;
5552 {
5553 dump_regset (r, stderr);
5554 putc ('\n', stderr);
5555 }
5556
5557 void
5558 dump_flow_info (file)
5559 FILE *file;
5560 {
5561 register int i;
5562 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5563
5564 fprintf (file, "%d registers.\n", max_regno);
5565 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5566 if (REG_N_REFS (i))
5567 {
5568 enum reg_class class, altclass;
5569 fprintf (file, "\nRegister %d used %d times across %d insns",
5570 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5571 if (REG_BASIC_BLOCK (i) >= 0)
5572 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5573 if (REG_N_SETS (i))
5574 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5575 (REG_N_SETS (i) == 1) ? "" : "s");
5576 if (REG_USERVAR_P (regno_reg_rtx[i]))
5577 fprintf (file, "; user var");
5578 if (REG_N_DEATHS (i) != 1)
5579 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5580 if (REG_N_CALLS_CROSSED (i) == 1)
5581 fprintf (file, "; crosses 1 call");
5582 else if (REG_N_CALLS_CROSSED (i))
5583 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5584 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5585 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5586 class = reg_preferred_class (i);
5587 altclass = reg_alternate_class (i);
5588 if (class != GENERAL_REGS || altclass != ALL_REGS)
5589 {
5590 if (altclass == ALL_REGS || class == ALL_REGS)
5591 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5592 else if (altclass == NO_REGS)
5593 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5594 else
5595 fprintf (file, "; pref %s, else %s",
5596 reg_class_names[(int) class],
5597 reg_class_names[(int) altclass]);
5598 }
5599 if (REGNO_POINTER_FLAG (i))
5600 fprintf (file, "; pointer");
5601 fprintf (file, ".\n");
5602 }
5603
5604 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5605 for (i = 0; i < n_basic_blocks; i++)
5606 {
5607 register basic_block bb = BASIC_BLOCK (i);
5608 register edge e;
5609
5610 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5611 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5612
5613 fprintf (file, "Predecessors: ");
5614 for (e = bb->pred; e ; e = e->pred_next)
5615 dump_edge_info (file, e, 0);
5616
5617 fprintf (file, "\nSuccessors: ");
5618 for (e = bb->succ; e ; e = e->succ_next)
5619 dump_edge_info (file, e, 1);
5620
5621 fprintf (file, "\nRegisters live at start:");
5622 dump_regset (bb->global_live_at_start, file);
5623
5624 fprintf (file, "\nRegisters live at end:");
5625 dump_regset (bb->global_live_at_end, file);
5626
5627 putc('\n', file);
5628 }
5629
5630 putc('\n', file);
5631 }
5632
5633 void
5634 debug_flow_info ()
5635 {
5636 dump_flow_info (stderr);
5637 }
5638
5639 static void
5640 dump_edge_info (file, e, do_succ)
5641 FILE *file;
5642 edge e;
5643 int do_succ;
5644 {
5645 basic_block side = (do_succ ? e->dest : e->src);
5646
5647 if (side == ENTRY_BLOCK_PTR)
5648 fputs (" ENTRY", file);
5649 else if (side == EXIT_BLOCK_PTR)
5650 fputs (" EXIT", file);
5651 else
5652 fprintf (file, " %d", side->index);
5653
5654 if (e->flags)
5655 {
5656 static const char * const bitnames[] = {
5657 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5658 };
5659 int comma = 0;
5660 int i, flags = e->flags;
5661
5662 fputc (' ', file);
5663 fputc ('(', file);
5664 for (i = 0; flags; i++)
5665 if (flags & (1 << i))
5666 {
5667 flags &= ~(1 << i);
5668
5669 if (comma)
5670 fputc (',', file);
5671 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5672 fputs (bitnames[i], file);
5673 else
5674 fprintf (file, "%d", i);
5675 comma = 1;
5676 }
5677 fputc (')', file);
5678 }
5679 }
5680
5681 \f
5682 /* Print out one basic block with live information at start and end. */
5683 void
5684 dump_bb (bb, outf)
5685 basic_block bb;
5686 FILE *outf;
5687 {
5688 rtx insn;
5689 rtx last;
5690 edge e;
5691
5692 fprintf (outf, ";; Basic block %d, loop depth %d",
5693 bb->index, bb->loop_depth);
5694 if (bb->eh_beg != -1 || bb->eh_end != -1)
5695 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5696 putc ('\n', outf);
5697
5698 fputs (";; Predecessors: ", outf);
5699 for (e = bb->pred; e ; e = e->pred_next)
5700 dump_edge_info (outf, e, 0);
5701 putc ('\n', outf);
5702
5703 fputs (";; Registers live at start:", outf);
5704 dump_regset (bb->global_live_at_start, outf);
5705 putc ('\n', outf);
5706
5707 for (insn = bb->head, last = NEXT_INSN (bb->end);
5708 insn != last;
5709 insn = NEXT_INSN (insn))
5710 print_rtl_single (outf, insn);
5711
5712 fputs (";; Registers live at end:", outf);
5713 dump_regset (bb->global_live_at_end, outf);
5714 putc ('\n', outf);
5715
5716 fputs (";; Successors: ", outf);
5717 for (e = bb->succ; e; e = e->succ_next)
5718 dump_edge_info (outf, e, 1);
5719 putc ('\n', outf);
5720 }
5721
5722 void
5723 debug_bb (bb)
5724 basic_block bb;
5725 {
5726 dump_bb (bb, stderr);
5727 }
5728
5729 void
5730 debug_bb_n (n)
5731 int n;
5732 {
5733 dump_bb (BASIC_BLOCK(n), stderr);
5734 }
5735
5736 /* Like print_rtl, but also print out live information for the start of each
5737 basic block. */
5738
5739 void
5740 print_rtl_with_bb (outf, rtx_first)
5741 FILE *outf;
5742 rtx rtx_first;
5743 {
5744 register rtx tmp_rtx;
5745
5746 if (rtx_first == 0)
5747 fprintf (outf, "(nil)\n");
5748 else
5749 {
5750 int i;
5751 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5752 int max_uid = get_max_uid ();
5753 basic_block *start = (basic_block *)
5754 xcalloc (max_uid, sizeof (basic_block));
5755 basic_block *end = (basic_block *)
5756 xcalloc (max_uid, sizeof (basic_block));
5757 enum bb_state *in_bb_p = (enum bb_state *)
5758 xcalloc (max_uid, sizeof (enum bb_state));
5759
5760 for (i = n_basic_blocks - 1; i >= 0; i--)
5761 {
5762 basic_block bb = BASIC_BLOCK (i);
5763 rtx x;
5764
5765 start[INSN_UID (bb->head)] = bb;
5766 end[INSN_UID (bb->end)] = bb;
5767 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5768 {
5769 enum bb_state state = IN_MULTIPLE_BB;
5770 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5771 state = IN_ONE_BB;
5772 in_bb_p[INSN_UID(x)] = state;
5773
5774 if (x == bb->end)
5775 break;
5776 }
5777 }
5778
5779 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5780 {
5781 int did_output;
5782 basic_block bb;
5783
5784 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5785 {
5786 fprintf (outf, ";; Start of basic block %d, registers live:",
5787 bb->index);
5788 dump_regset (bb->global_live_at_start, outf);
5789 putc ('\n', outf);
5790 }
5791
5792 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5793 && GET_CODE (tmp_rtx) != NOTE
5794 && GET_CODE (tmp_rtx) != BARRIER)
5795 fprintf (outf, ";; Insn is not within a basic block\n");
5796 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5797 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5798
5799 did_output = print_rtl_single (outf, tmp_rtx);
5800
5801 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5802 {
5803 fprintf (outf, ";; End of basic block %d, registers live:\n",
5804 bb->index);
5805 dump_regset (bb->global_live_at_end, outf);
5806 putc ('\n', outf);
5807 }
5808
5809 if (did_output)
5810 putc ('\n', outf);
5811 }
5812
5813 free (start);
5814 free (end);
5815 free (in_bb_p);
5816 }
5817
5818 if (current_function_epilogue_delay_list != 0)
5819 {
5820 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5821 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5822 tmp_rtx = XEXP (tmp_rtx, 1))
5823 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5824 }
5825 }
5826
5827 /* Compute dominator relationships using new flow graph structures. */
5828 void
5829 compute_flow_dominators (dominators, post_dominators)
5830 sbitmap *dominators;
5831 sbitmap *post_dominators;
5832 {
5833 int bb;
5834 sbitmap *temp_bitmap;
5835 edge e;
5836 basic_block *worklist, *workend, *qin, *qout;
5837 int qlen;
5838
5839 /* Allocate a worklist array/queue. Entries are only added to the
5840 list if they were not already on the list. So the size is
5841 bounded by the number of basic blocks. */
5842 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5843 workend = &worklist[n_basic_blocks];
5844
5845 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5846 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5847
5848 if (dominators)
5849 {
5850 /* The optimistic setting of dominators requires us to put every
5851 block on the work list initially. */
5852 qin = qout = worklist;
5853 for (bb = 0; bb < n_basic_blocks; bb++)
5854 {
5855 *qin++ = BASIC_BLOCK (bb);
5856 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5857 }
5858 qlen = n_basic_blocks;
5859 qin = worklist;
5860
5861 /* We want a maximal solution, so initially assume everything dominates
5862 everything else. */
5863 sbitmap_vector_ones (dominators, n_basic_blocks);
5864
5865 /* Mark successors of the entry block so we can identify them below. */
5866 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5867 e->dest->aux = ENTRY_BLOCK_PTR;
5868
5869 /* Iterate until the worklist is empty. */
5870 while (qlen)
5871 {
5872 /* Take the first entry off the worklist. */
5873 basic_block b = *qout++;
5874 if (qout >= workend)
5875 qout = worklist;
5876 qlen--;
5877
5878 bb = b->index;
5879
5880 /* Compute the intersection of the dominators of all the
5881 predecessor blocks.
5882
5883 If one of the predecessor blocks is the ENTRY block, then the
5884 intersection of the dominators of the predecessor blocks is
5885 defined as the null set. We can identify such blocks by the
5886 special value in the AUX field in the block structure. */
5887 if (b->aux == ENTRY_BLOCK_PTR)
5888 {
5889 /* Do not clear the aux field for blocks which are
5890 successors of the ENTRY block. That way we we never
5891 add them to the worklist again.
5892
5893 The intersect of dominators of the preds of this block is
5894 defined as the null set. */
5895 sbitmap_zero (temp_bitmap[bb]);
5896 }
5897 else
5898 {
5899 /* Clear the aux field of this block so it can be added to
5900 the worklist again if necessary. */
5901 b->aux = NULL;
5902 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5903 }
5904
5905 /* Make sure each block always dominates itself. */
5906 SET_BIT (temp_bitmap[bb], bb);
5907
5908 /* If the out state of this block changed, then we need to
5909 add the successors of this block to the worklist if they
5910 are not already on the worklist. */
5911 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5912 {
5913 for (e = b->succ; e; e = e->succ_next)
5914 {
5915 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5916 {
5917 *qin++ = e->dest;
5918 if (qin >= workend)
5919 qin = worklist;
5920 qlen++;
5921
5922 e->dest->aux = e;
5923 }
5924 }
5925 }
5926 }
5927 }
5928
5929 if (post_dominators)
5930 {
5931 /* The optimistic setting of dominators requires us to put every
5932 block on the work list initially. */
5933 qin = qout = worklist;
5934 for (bb = 0; bb < n_basic_blocks; bb++)
5935 {
5936 *qin++ = BASIC_BLOCK (bb);
5937 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5938 }
5939 qlen = n_basic_blocks;
5940 qin = worklist;
5941
5942 /* We want a maximal solution, so initially assume everything post
5943 dominates everything else. */
5944 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5945
5946 /* Mark predecessors of the exit block so we can identify them below. */
5947 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5948 e->src->aux = EXIT_BLOCK_PTR;
5949
5950 /* Iterate until the worklist is empty. */
5951 while (qlen)
5952 {
5953 /* Take the first entry off the worklist. */
5954 basic_block b = *qout++;
5955 if (qout >= workend)
5956 qout = worklist;
5957 qlen--;
5958
5959 bb = b->index;
5960
5961 /* Compute the intersection of the post dominators of all the
5962 successor blocks.
5963
5964 If one of the successor blocks is the EXIT block, then the
5965 intersection of the dominators of the successor blocks is
5966 defined as the null set. We can identify such blocks by the
5967 special value in the AUX field in the block structure. */
5968 if (b->aux == EXIT_BLOCK_PTR)
5969 {
5970 /* Do not clear the aux field for blocks which are
5971 predecessors of the EXIT block. That way we we never
5972 add them to the worklist again.
5973
5974 The intersect of dominators of the succs of this block is
5975 defined as the null set. */
5976 sbitmap_zero (temp_bitmap[bb]);
5977 }
5978 else
5979 {
5980 /* Clear the aux field of this block so it can be added to
5981 the worklist again if necessary. */
5982 b->aux = NULL;
5983 sbitmap_intersection_of_succs (temp_bitmap[bb],
5984 post_dominators, bb);
5985 }
5986
5987 /* Make sure each block always post dominates itself. */
5988 SET_BIT (temp_bitmap[bb], bb);
5989
5990 /* If the out state of this block changed, then we need to
5991 add the successors of this block to the worklist if they
5992 are not already on the worklist. */
5993 if (sbitmap_a_and_b (post_dominators[bb],
5994 post_dominators[bb],
5995 temp_bitmap[bb]))
5996 {
5997 for (e = b->pred; e; e = e->pred_next)
5998 {
5999 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6000 {
6001 *qin++ = e->src;
6002 if (qin >= workend)
6003 qin = worklist;
6004 qlen++;
6005
6006 e->src->aux = e;
6007 }
6008 }
6009 }
6010 }
6011 }
6012
6013 free (worklist);
6014 free (temp_bitmap);
6015 }
6016
6017 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6018
6019 void
6020 compute_immediate_dominators (idom, dominators)
6021 int *idom;
6022 sbitmap *dominators;
6023 {
6024 sbitmap *tmp;
6025 int b;
6026
6027 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6028
6029 /* Begin with tmp(n) = dom(n) - { n }. */
6030 for (b = n_basic_blocks; --b >= 0; )
6031 {
6032 sbitmap_copy (tmp[b], dominators[b]);
6033 RESET_BIT (tmp[b], b);
6034 }
6035
6036 /* Subtract out all of our dominator's dominators. */
6037 for (b = n_basic_blocks; --b >= 0; )
6038 {
6039 sbitmap tmp_b = tmp[b];
6040 int s;
6041
6042 for (s = n_basic_blocks; --s >= 0; )
6043 if (TEST_BIT (tmp_b, s))
6044 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6045 }
6046
6047 /* Find the one bit set in the bitmap and put it in the output array. */
6048 for (b = n_basic_blocks; --b >= 0; )
6049 {
6050 int t;
6051 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6052 }
6053
6054 sbitmap_vector_free (tmp);
6055 }
6056
6057 /* Recompute register set/reference counts immediately prior to register
6058 allocation.
6059
6060 This avoids problems with set/reference counts changing to/from values
6061 which have special meanings to the register allocators.
6062
6063 Additionally, the reference counts are the primary component used by the
6064 register allocators to prioritize pseudos for allocation to hard regs.
6065 More accurate reference counts generally lead to better register allocation.
6066
6067 F is the first insn to be scanned.
6068
6069 LOOP_STEP denotes how much loop_depth should be incremented per
6070 loop nesting level in order to increase the ref count more for
6071 references in a loop.
6072
6073 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6074 possibly other information which is used by the register allocators. */
6075
6076 void
6077 recompute_reg_usage (f, loop_step)
6078 rtx f ATTRIBUTE_UNUSED;
6079 int loop_step ATTRIBUTE_UNUSED;
6080 {
6081 allocate_reg_life_data ();
6082 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6083 }
6084
6085 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6086 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6087 of the number of registers that died. */
6088
6089 int
6090 count_or_remove_death_notes (blocks, kill)
6091 sbitmap blocks;
6092 int kill;
6093 {
6094 int i, count = 0;
6095
6096 for (i = n_basic_blocks - 1; i >= 0; --i)
6097 {
6098 basic_block bb;
6099 rtx insn;
6100
6101 if (blocks && ! TEST_BIT (blocks, i))
6102 continue;
6103
6104 bb = BASIC_BLOCK (i);
6105
6106 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6107 {
6108 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6109 {
6110 rtx *pprev = &REG_NOTES (insn);
6111 rtx link = *pprev;
6112
6113 while (link)
6114 {
6115 switch (REG_NOTE_KIND (link))
6116 {
6117 case REG_DEAD:
6118 if (GET_CODE (XEXP (link, 0)) == REG)
6119 {
6120 rtx reg = XEXP (link, 0);
6121 int n;
6122
6123 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6124 n = 1;
6125 else
6126 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6127 count += n;
6128 }
6129 /* FALLTHRU */
6130
6131 case REG_UNUSED:
6132 if (kill)
6133 {
6134 rtx next = XEXP (link, 1);
6135 free_EXPR_LIST_node (link);
6136 *pprev = link = next;
6137 break;
6138 }
6139 /* FALLTHRU */
6140
6141 default:
6142 pprev = &XEXP (link, 1);
6143 link = *pprev;
6144 break;
6145 }
6146 }
6147 }
6148
6149 if (insn == bb->end)
6150 break;
6151 }
6152 }
6153
6154 return count;
6155 }
6156
6157 /* Record INSN's block as BB. */
6158
6159 void
6160 set_block_for_insn (insn, bb)
6161 rtx insn;
6162 basic_block bb;
6163 {
6164 size_t uid = INSN_UID (insn);
6165 if (uid >= basic_block_for_insn->num_elements)
6166 {
6167 int new_size;
6168
6169 /* Add one-eighth the size so we don't keep calling xrealloc. */
6170 new_size = uid + (uid + 7) / 8;
6171
6172 VARRAY_GROW (basic_block_for_insn, new_size);
6173 }
6174 VARRAY_BB (basic_block_for_insn, uid) = bb;
6175 }
6176
6177 /* Record INSN's block number as BB. */
6178 /* ??? This has got to go. */
6179
6180 void
6181 set_block_num (insn, bb)
6182 rtx insn;
6183 int bb;
6184 {
6185 set_block_for_insn (insn, BASIC_BLOCK (bb));
6186 }
6187 \f
6188 /* Verify the CFG consistency. This function check some CFG invariants and
6189 aborts when something is wrong. Hope that this function will help to
6190 convert many optimization passes to preserve CFG consistent.
6191
6192 Currently it does following checks:
6193
6194 - test head/end pointers
6195 - overlapping of basic blocks
6196 - edge list corectness
6197 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6198 - tails of basic blocks (ensure that boundary is necesary)
6199 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6200 and NOTE_INSN_BASIC_BLOCK
6201 - check that all insns are in the basic blocks
6202 (except the switch handling code, barriers and notes)
6203 - check that all returns are followed by barriers
6204
6205 In future it can be extended check a lot of other stuff as well
6206 (reachability of basic blocks, life information, etc. etc.). */
6207
6208 void
6209 verify_flow_info ()
6210 {
6211 const int max_uid = get_max_uid ();
6212 const rtx rtx_first = get_insns ();
6213 basic_block *bb_info;
6214 rtx x;
6215 int i, last_bb_num_seen, num_bb_notes, err = 0;
6216
6217 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6218
6219 /* First pass check head/end pointers and set bb_info array used by
6220 later passes. */
6221 for (i = n_basic_blocks - 1; i >= 0; i--)
6222 {
6223 basic_block bb = BASIC_BLOCK (i);
6224
6225 /* Check the head pointer and make sure that it is pointing into
6226 insn list. */
6227 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6228 if (x == bb->head)
6229 break;
6230 if (!x)
6231 {
6232 error ("Head insn %d for block %d not found in the insn stream.",
6233 INSN_UID (bb->head), bb->index);
6234 err = 1;
6235 }
6236
6237 /* Check the end pointer and make sure that it is pointing into
6238 insn list. */
6239 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6240 {
6241 if (bb_info[INSN_UID (x)] != NULL)
6242 {
6243 error ("Insn %d is in multiple basic blocks (%d and %d)",
6244 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6245 err = 1;
6246 }
6247 bb_info[INSN_UID (x)] = bb;
6248
6249 if (x == bb->end)
6250 break;
6251 }
6252 if (!x)
6253 {
6254 error ("End insn %d for block %d not found in the insn stream.",
6255 INSN_UID (bb->end), bb->index);
6256 err = 1;
6257 }
6258 }
6259
6260 /* Now check the basic blocks (boundaries etc.) */
6261 for (i = n_basic_blocks - 1; i >= 0; i--)
6262 {
6263 basic_block bb = BASIC_BLOCK (i);
6264 /* Check corectness of edge lists */
6265 edge e;
6266
6267 e = bb->succ;
6268 while (e)
6269 {
6270 if (e->src != bb)
6271 {
6272 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6273 bb->index);
6274 fprintf (stderr, "Predecessor: ");
6275 dump_edge_info (stderr, e, 0);
6276 fprintf (stderr, "\nSuccessor: ");
6277 dump_edge_info (stderr, e, 1);
6278 fflush (stderr);
6279 err = 1;
6280 }
6281 if (e->dest != EXIT_BLOCK_PTR)
6282 {
6283 edge e2 = e->dest->pred;
6284 while (e2 && e2 != e)
6285 e2 = e2->pred_next;
6286 if (!e2)
6287 {
6288 error ("Basic block %i edge lists are corrupted", bb->index);
6289 err = 1;
6290 }
6291 }
6292 e = e->succ_next;
6293 }
6294
6295 e = bb->pred;
6296 while (e)
6297 {
6298 if (e->dest != bb)
6299 {
6300 error ("Basic block %d pred edge is corrupted", bb->index);
6301 fputs ("Predecessor: ", stderr);
6302 dump_edge_info (stderr, e, 0);
6303 fputs ("\nSuccessor: ", stderr);
6304 dump_edge_info (stderr, e, 1);
6305 fputc ('\n', stderr);
6306 err = 1;
6307 }
6308 if (e->src != ENTRY_BLOCK_PTR)
6309 {
6310 edge e2 = e->src->succ;
6311 while (e2 && e2 != e)
6312 e2 = e2->succ_next;
6313 if (!e2)
6314 {
6315 error ("Basic block %i edge lists are corrupted", bb->index);
6316 err = 1;
6317 }
6318 }
6319 e = e->pred_next;
6320 }
6321
6322 /* OK pointers are correct. Now check the header of basic
6323 block. It ought to contain optional CODE_LABEL followed
6324 by NOTE_BASIC_BLOCK. */
6325 x = bb->head;
6326 if (GET_CODE (x) == CODE_LABEL)
6327 {
6328 if (bb->end == x)
6329 {
6330 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6331 bb->index);
6332 err = 1;
6333 }
6334 x = NEXT_INSN (x);
6335 }
6336 if (GET_CODE (x) != NOTE
6337 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6338 || NOTE_BASIC_BLOCK (x) != bb)
6339 {
6340 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6341 bb->index);
6342 err = 1;
6343 }
6344
6345 if (bb->end == x)
6346 {
6347 /* Do checks for empty blocks here */
6348 }
6349 else
6350 {
6351 x = NEXT_INSN (x);
6352 while (x)
6353 {
6354 if (GET_CODE (x) == NOTE
6355 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6356 {
6357 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6358 INSN_UID (x), bb->index);
6359 err = 1;
6360 }
6361
6362 if (x == bb->end)
6363 break;
6364
6365 if (GET_CODE (x) == JUMP_INSN
6366 || GET_CODE (x) == CODE_LABEL
6367 || GET_CODE (x) == BARRIER)
6368 {
6369 error ("In basic block %d:", bb->index);
6370 fatal_insn ("Flow control insn inside a basic block", x);
6371 }
6372
6373 x = NEXT_INSN (x);
6374 }
6375 }
6376 }
6377
6378 last_bb_num_seen = -1;
6379 num_bb_notes = 0;
6380 x = rtx_first;
6381 while (x)
6382 {
6383 if (GET_CODE (x) == NOTE
6384 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6385 {
6386 basic_block bb = NOTE_BASIC_BLOCK (x);
6387 num_bb_notes++;
6388 if (bb->index != last_bb_num_seen + 1)
6389 fatal ("Basic blocks not numbered consecutively");
6390 last_bb_num_seen = bb->index;
6391 }
6392
6393 if (!bb_info[INSN_UID (x)])
6394 {
6395 switch (GET_CODE (x))
6396 {
6397 case BARRIER:
6398 case NOTE:
6399 break;
6400
6401 case CODE_LABEL:
6402 /* An addr_vec is placed outside any block block. */
6403 if (NEXT_INSN (x)
6404 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6405 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6406 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6407 {
6408 x = NEXT_INSN (x);
6409 }
6410
6411 /* But in any case, non-deletable labels can appear anywhere. */
6412 break;
6413
6414 default:
6415 fatal_insn ("Insn outside basic block", x);
6416 }
6417 }
6418
6419 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6420 && GET_CODE (x) == JUMP_INSN
6421 && returnjump_p (x) && ! condjump_p (x)
6422 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6423 fatal_insn ("Return not followed by barrier", x);
6424
6425 x = NEXT_INSN (x);
6426 }
6427
6428 if (num_bb_notes != n_basic_blocks)
6429 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6430 num_bb_notes, n_basic_blocks);
6431
6432 if (err)
6433 abort ();
6434
6435 /* Clean up. */
6436 free (bb_info);
6437 }
6438 \f
6439 /* Functions to access an edge list with a vector representation.
6440 Enough data is kept such that given an index number, the
6441 pred and succ that edge reprsents can be determined, or
6442 given a pred and a succ, it's index number can be returned.
6443 This allows algorithms which comsume a lot of memory to
6444 represent the normally full matrix of edge (pred,succ) with a
6445 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6446 wasted space in the client code due to sparse flow graphs. */
6447
6448 /* This functions initializes the edge list. Basically the entire
6449 flowgraph is processed, and all edges are assigned a number,
6450 and the data structure is filed in. */
6451 struct edge_list *
6452 create_edge_list ()
6453 {
6454 struct edge_list *elist;
6455 edge e;
6456 int num_edges;
6457 int x;
6458 int block_count;
6459
6460 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6461
6462 num_edges = 0;
6463
6464 /* Determine the number of edges in the flow graph by counting successor
6465 edges on each basic block. */
6466 for (x = 0; x < n_basic_blocks; x++)
6467 {
6468 basic_block bb = BASIC_BLOCK (x);
6469
6470 for (e = bb->succ; e; e = e->succ_next)
6471 num_edges++;
6472 }
6473 /* Don't forget successors of the entry block. */
6474 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6475 num_edges++;
6476
6477 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6478 elist->num_blocks = block_count;
6479 elist->num_edges = num_edges;
6480 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6481
6482 num_edges = 0;
6483
6484 /* Follow successors of the entry block, and register these edges. */
6485 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6486 {
6487 elist->index_to_edge[num_edges] = e;
6488 num_edges++;
6489 }
6490
6491 for (x = 0; x < n_basic_blocks; x++)
6492 {
6493 basic_block bb = BASIC_BLOCK (x);
6494
6495 /* Follow all successors of blocks, and register these edges. */
6496 for (e = bb->succ; e; e = e->succ_next)
6497 {
6498 elist->index_to_edge[num_edges] = e;
6499 num_edges++;
6500 }
6501 }
6502 return elist;
6503 }
6504
6505 /* This function free's memory associated with an edge list. */
6506 void
6507 free_edge_list (elist)
6508 struct edge_list *elist;
6509 {
6510 if (elist)
6511 {
6512 free (elist->index_to_edge);
6513 free (elist);
6514 }
6515 }
6516
6517 /* This function provides debug output showing an edge list. */
6518 void
6519 print_edge_list (f, elist)
6520 FILE *f;
6521 struct edge_list *elist;
6522 {
6523 int x;
6524 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6525 elist->num_blocks - 2, elist->num_edges);
6526
6527 for (x = 0; x < elist->num_edges; x++)
6528 {
6529 fprintf (f, " %-4d - edge(", x);
6530 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6531 fprintf (f,"entry,");
6532 else
6533 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6534
6535 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6536 fprintf (f,"exit)\n");
6537 else
6538 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6539 }
6540 }
6541
6542 /* This function provides an internal consistancy check of an edge list,
6543 verifying that all edges are present, and that there are no
6544 extra edges. */
6545 void
6546 verify_edge_list (f, elist)
6547 FILE *f;
6548 struct edge_list *elist;
6549 {
6550 int x, pred, succ, index;
6551 edge e;
6552
6553 for (x = 0; x < n_basic_blocks; x++)
6554 {
6555 basic_block bb = BASIC_BLOCK (x);
6556
6557 for (e = bb->succ; e; e = e->succ_next)
6558 {
6559 pred = e->src->index;
6560 succ = e->dest->index;
6561 index = EDGE_INDEX (elist, e->src, e->dest);
6562 if (index == EDGE_INDEX_NO_EDGE)
6563 {
6564 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6565 continue;
6566 }
6567 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6568 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6569 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6570 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6571 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6572 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6573 }
6574 }
6575 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6576 {
6577 pred = e->src->index;
6578 succ = e->dest->index;
6579 index = EDGE_INDEX (elist, e->src, e->dest);
6580 if (index == EDGE_INDEX_NO_EDGE)
6581 {
6582 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6583 continue;
6584 }
6585 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6586 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6587 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6588 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6589 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6590 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6591 }
6592 /* We've verified that all the edges are in the list, no lets make sure
6593 there are no spurious edges in the list. */
6594
6595 for (pred = 0 ; pred < n_basic_blocks; pred++)
6596 for (succ = 0 ; succ < n_basic_blocks; succ++)
6597 {
6598 basic_block p = BASIC_BLOCK (pred);
6599 basic_block s = BASIC_BLOCK (succ);
6600
6601 int found_edge = 0;
6602
6603 for (e = p->succ; e; e = e->succ_next)
6604 if (e->dest == s)
6605 {
6606 found_edge = 1;
6607 break;
6608 }
6609 for (e = s->pred; e; e = e->pred_next)
6610 if (e->src == p)
6611 {
6612 found_edge = 1;
6613 break;
6614 }
6615 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6616 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6617 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6618 pred, succ);
6619 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6620 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6621 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6622 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6623 BASIC_BLOCK (succ)));
6624 }
6625 for (succ = 0 ; succ < n_basic_blocks; succ++)
6626 {
6627 basic_block p = ENTRY_BLOCK_PTR;
6628 basic_block s = BASIC_BLOCK (succ);
6629
6630 int found_edge = 0;
6631
6632 for (e = p->succ; e; e = e->succ_next)
6633 if (e->dest == s)
6634 {
6635 found_edge = 1;
6636 break;
6637 }
6638 for (e = s->pred; e; e = e->pred_next)
6639 if (e->src == p)
6640 {
6641 found_edge = 1;
6642 break;
6643 }
6644 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6645 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6646 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6647 succ);
6648 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6649 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6650 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6651 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6652 BASIC_BLOCK (succ)));
6653 }
6654 for (pred = 0 ; pred < n_basic_blocks; pred++)
6655 {
6656 basic_block p = BASIC_BLOCK (pred);
6657 basic_block s = EXIT_BLOCK_PTR;
6658
6659 int found_edge = 0;
6660
6661 for (e = p->succ; e; e = e->succ_next)
6662 if (e->dest == s)
6663 {
6664 found_edge = 1;
6665 break;
6666 }
6667 for (e = s->pred; e; e = e->pred_next)
6668 if (e->src == p)
6669 {
6670 found_edge = 1;
6671 break;
6672 }
6673 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6674 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6675 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6676 pred);
6677 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6678 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6679 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6680 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6681 EXIT_BLOCK_PTR));
6682 }
6683 }
6684
6685 /* This routine will determine what, if any, edge there is between
6686 a specified predecessor and successor. */
6687
6688 int
6689 find_edge_index (edge_list, pred, succ)
6690 struct edge_list *edge_list;
6691 basic_block pred, succ;
6692 {
6693 int x;
6694 for (x = 0; x < NUM_EDGES (edge_list); x++)
6695 {
6696 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6697 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6698 return x;
6699 }
6700 return (EDGE_INDEX_NO_EDGE);
6701 }
6702
6703 /* This function will remove an edge from the flow graph. */
6704 void
6705 remove_edge (e)
6706 edge e;
6707 {
6708 edge last_pred = NULL;
6709 edge last_succ = NULL;
6710 edge tmp;
6711 basic_block src, dest;
6712 src = e->src;
6713 dest = e->dest;
6714 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6715 last_succ = tmp;
6716
6717 if (!tmp)
6718 abort ();
6719 if (last_succ)
6720 last_succ->succ_next = e->succ_next;
6721 else
6722 src->succ = e->succ_next;
6723
6724 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6725 last_pred = tmp;
6726
6727 if (!tmp)
6728 abort ();
6729 if (last_pred)
6730 last_pred->pred_next = e->pred_next;
6731 else
6732 dest->pred = e->pred_next;
6733
6734 n_edges--;
6735 free (e);
6736 }
6737
6738 /* This routine will remove any fake successor edges for a basic block.
6739 When the edge is removed, it is also removed from whatever predecessor
6740 list it is in. */
6741 static void
6742 remove_fake_successors (bb)
6743 basic_block bb;
6744 {
6745 edge e;
6746 for (e = bb->succ; e ; )
6747 {
6748 edge tmp = e;
6749 e = e->succ_next;
6750 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6751 remove_edge (tmp);
6752 }
6753 }
6754
6755 /* This routine will remove all fake edges from the flow graph. If
6756 we remove all fake successors, it will automatically remove all
6757 fake predecessors. */
6758 void
6759 remove_fake_edges ()
6760 {
6761 int x;
6762
6763 for (x = 0; x < n_basic_blocks; x++)
6764 remove_fake_successors (BASIC_BLOCK (x));
6765
6766 /* We've handled all successors except the entry block's. */
6767 remove_fake_successors (ENTRY_BLOCK_PTR);
6768 }
6769
6770 /* This functions will add a fake edge between any block which has no
6771 successors, and the exit block. Some data flow equations require these
6772 edges to exist. */
6773 void
6774 add_noreturn_fake_exit_edges ()
6775 {
6776 int x;
6777
6778 for (x = 0; x < n_basic_blocks; x++)
6779 if (BASIC_BLOCK (x)->succ == NULL)
6780 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6781 }
6782 \f
6783 /* Dump the list of basic blocks in the bitmap NODES. */
6784 static void
6785 flow_nodes_print (str, nodes, file)
6786 const char *str;
6787 const sbitmap nodes;
6788 FILE *file;
6789 {
6790 int node;
6791
6792 fprintf (file, "%s { ", str);
6793 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6794 fputs ("}\n", file);
6795 }
6796
6797
6798 /* Dump the list of exiting edges in the array EDGES. */
6799 static void
6800 flow_exits_print (str, edges, num_edges, file)
6801 const char *str;
6802 const edge *edges;
6803 int num_edges;
6804 FILE *file;
6805 {
6806 int i;
6807
6808 fprintf (file, "%s { ", str);
6809 for (i = 0; i < num_edges; i++)
6810 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6811 fputs ("}\n", file);
6812 }
6813
6814
6815 /* Dump loop related CFG information. */
6816 static void
6817 flow_loops_cfg_dump (loops, file)
6818 const struct loops *loops;
6819 FILE *file;
6820 {
6821 int i;
6822
6823 if (! loops->num || ! file || ! loops->cfg.dom)
6824 return;
6825
6826 for (i = 0; i < n_basic_blocks; i++)
6827 {
6828 edge succ;
6829
6830 fprintf (file, ";; %d succs { ", i);
6831 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6832 fprintf (file, "%d ", succ->dest->index);
6833 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6834 }
6835
6836
6837 /* Dump the DFS node order. */
6838 if (loops->cfg.dfs_order)
6839 {
6840 fputs (";; DFS order: ", file);
6841 for (i = 0; i < n_basic_blocks; i++)
6842 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6843 fputs ("\n", file);
6844 }
6845 }
6846
6847
6848 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6849 static int
6850 flow_loop_nested_p (outer, loop)
6851 struct loop *outer;
6852 struct loop *loop;
6853 {
6854 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6855 }
6856
6857
6858 /* Dump the loop information specified by LOOPS to the stream FILE. */
6859 void
6860 flow_loops_dump (loops, file, verbose)
6861 const struct loops *loops;
6862 FILE *file;
6863 int verbose;
6864 {
6865 int i;
6866 int num_loops;
6867
6868 num_loops = loops->num;
6869 if (! num_loops || ! file)
6870 return;
6871
6872 fprintf (file, ";; %d loops found, %d levels\n",
6873 num_loops, loops->levels);
6874
6875 for (i = 0; i < num_loops; i++)
6876 {
6877 struct loop *loop = &loops->array[i];
6878
6879 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6880 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6881 loop->header->index, loop->latch->index,
6882 loop->pre_header ? loop->pre_header->index : -1,
6883 loop->depth, loop->level,
6884 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6885 fprintf (file, ";; %d", loop->num_nodes);
6886 flow_nodes_print (" nodes", loop->nodes, file);
6887 fprintf (file, ";; %d", loop->num_exits);
6888 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6889
6890 if (loop->shared)
6891 {
6892 int j;
6893
6894 for (j = 0; j < i; j++)
6895 {
6896 struct loop *oloop = &loops->array[j];
6897
6898 if (loop->header == oloop->header)
6899 {
6900 int disjoint;
6901 int smaller;
6902
6903 smaller = loop->num_nodes < oloop->num_nodes;
6904
6905 /* If the union of LOOP and OLOOP is different than
6906 the larger of LOOP and OLOOP then LOOP and OLOOP
6907 must be disjoint. */
6908 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6909 smaller ? oloop : loop);
6910 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6911 loop->header->index, i, j,
6912 disjoint ? "disjoint" : "nested");
6913 }
6914 }
6915 }
6916
6917 if (verbose)
6918 {
6919 /* Print diagnostics to compare our concept of a loop with
6920 what the loop notes say. */
6921 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6922 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6923 != NOTE_INSN_LOOP_BEG)
6924 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6925 INSN_UID (PREV_INSN (loop->first->head)));
6926 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6927 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6928 != NOTE_INSN_LOOP_END)
6929 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6930 INSN_UID (NEXT_INSN (loop->last->end)));
6931 }
6932 }
6933
6934 if (verbose)
6935 flow_loops_cfg_dump (loops, file);
6936 }
6937
6938
6939 /* Free all the memory allocated for LOOPS. */
6940 void
6941 flow_loops_free (loops)
6942 struct loops *loops;
6943 {
6944 if (loops->array)
6945 {
6946 int i;
6947
6948 if (! loops->num)
6949 abort ();
6950
6951 /* Free the loop descriptors. */
6952 for (i = 0; i < loops->num; i++)
6953 {
6954 struct loop *loop = &loops->array[i];
6955
6956 if (loop->nodes)
6957 sbitmap_free (loop->nodes);
6958 if (loop->exits)
6959 free (loop->exits);
6960 }
6961 free (loops->array);
6962 loops->array = NULL;
6963
6964 if (loops->cfg.dom)
6965 sbitmap_vector_free (loops->cfg.dom);
6966 if (loops->cfg.dfs_order)
6967 free (loops->cfg.dfs_order);
6968
6969 sbitmap_free (loops->shared_headers);
6970 }
6971 }
6972
6973
6974 /* Find the exits from the loop using the bitmap of loop nodes NODES
6975 and store in EXITS array. Return the number of exits from the
6976 loop. */
6977 static int
6978 flow_loop_exits_find (nodes, exits)
6979 const sbitmap nodes;
6980 edge **exits;
6981 {
6982 edge e;
6983 int node;
6984 int num_exits;
6985
6986 *exits = NULL;
6987
6988 /* Check all nodes within the loop to see if there are any
6989 successors not in the loop. Note that a node may have multiple
6990 exiting edges. */
6991 num_exits = 0;
6992 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6993 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6994 {
6995 basic_block dest = e->dest;
6996
6997 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6998 num_exits++;
6999 }
7000 });
7001
7002 if (! num_exits)
7003 return 0;
7004
7005 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7006
7007 /* Store all exiting edges into an array. */
7008 num_exits = 0;
7009 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7010 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7011 {
7012 basic_block dest = e->dest;
7013
7014 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7015 (*exits)[num_exits++] = e;
7016 }
7017 });
7018
7019 return num_exits;
7020 }
7021
7022
7023 /* Find the nodes contained within the loop with header HEADER and
7024 latch LATCH and store in NODES. Return the number of nodes within
7025 the loop. */
7026 static int
7027 flow_loop_nodes_find (header, latch, nodes)
7028 basic_block header;
7029 basic_block latch;
7030 sbitmap nodes;
7031 {
7032 basic_block *stack;
7033 int sp;
7034 int num_nodes = 0;
7035
7036 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7037 sp = 0;
7038
7039 /* Start with only the loop header in the set of loop nodes. */
7040 sbitmap_zero (nodes);
7041 SET_BIT (nodes, header->index);
7042 num_nodes++;
7043 header->loop_depth++;
7044
7045 /* Push the loop latch on to the stack. */
7046 if (! TEST_BIT (nodes, latch->index))
7047 {
7048 SET_BIT (nodes, latch->index);
7049 latch->loop_depth++;
7050 num_nodes++;
7051 stack[sp++] = latch;
7052 }
7053
7054 while (sp)
7055 {
7056 basic_block node;
7057 edge e;
7058
7059 node = stack[--sp];
7060 for (e = node->pred; e; e = e->pred_next)
7061 {
7062 basic_block ancestor = e->src;
7063
7064 /* If each ancestor not marked as part of loop, add to set of
7065 loop nodes and push on to stack. */
7066 if (ancestor != ENTRY_BLOCK_PTR
7067 && ! TEST_BIT (nodes, ancestor->index))
7068 {
7069 SET_BIT (nodes, ancestor->index);
7070 ancestor->loop_depth++;
7071 num_nodes++;
7072 stack[sp++] = ancestor;
7073 }
7074 }
7075 }
7076 free (stack);
7077 return num_nodes;
7078 }
7079
7080
7081 /* Compute the depth first search order and store in the array
7082 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7083 number of nodes visited. */
7084 static int
7085 flow_depth_first_order_compute (dfs_order)
7086 int *dfs_order;
7087 {
7088 edge e;
7089 edge *stack;
7090 int sp;
7091 int dfsnum = 0;
7092 sbitmap visited;
7093
7094 /* Allocate stack for back-tracking up CFG. */
7095 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
7096 sp = 0;
7097
7098 /* Allocate bitmap to track nodes that have been visited. */
7099 visited = sbitmap_alloc (n_basic_blocks);
7100
7101 /* None of the nodes in the CFG have been visited yet. */
7102 sbitmap_zero (visited);
7103
7104 /* Start with the first successor edge from the entry block. */
7105 e = ENTRY_BLOCK_PTR->succ;
7106 while (e)
7107 {
7108 basic_block src = e->src;
7109 basic_block dest = e->dest;
7110
7111 /* Mark that we have visited this node. */
7112 if (src != ENTRY_BLOCK_PTR)
7113 SET_BIT (visited, src->index);
7114
7115 /* If this node has not been visited before, push the current
7116 edge on to the stack and proceed with the first successor
7117 edge of this node. */
7118 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7119 && dest->succ)
7120 {
7121 stack[sp++] = e;
7122 e = dest->succ;
7123 }
7124 else
7125 {
7126 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7127 && ! dest->succ)
7128 {
7129 /* DEST has no successors (for example, a non-returning
7130 function is called) so do not push the current edge
7131 but carry on with its next successor. */
7132 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
7133 SET_BIT (visited, dest->index);
7134 }
7135
7136 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
7137 {
7138 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
7139
7140 /* Pop edge off stack. */
7141 e = stack[--sp];
7142 src = e->src;
7143 }
7144 e = e->succ_next;
7145 }
7146 }
7147 free (stack);
7148 sbitmap_free (visited);
7149
7150 /* The number of nodes visited should not be greater than
7151 n_basic_blocks. */
7152 if (dfsnum > n_basic_blocks)
7153 abort ();
7154
7155 /* There are some nodes left in the CFG that are unreachable. */
7156 if (dfsnum < n_basic_blocks)
7157 abort ();
7158 return dfsnum;
7159 }
7160
7161
7162 /* Return the block for the pre-header of the loop with header
7163 HEADER where DOM specifies the dominator information. Return NULL if
7164 there is no pre-header. */
7165 static basic_block
7166 flow_loop_pre_header_find (header, dom)
7167 basic_block header;
7168 const sbitmap *dom;
7169 {
7170 basic_block pre_header;
7171 edge e;
7172
7173 /* If block p is a predecessor of the header and is the only block
7174 that the header does not dominate, then it is the pre-header. */
7175 pre_header = NULL;
7176 for (e = header->pred; e; e = e->pred_next)
7177 {
7178 basic_block node = e->src;
7179
7180 if (node != ENTRY_BLOCK_PTR
7181 && ! TEST_BIT (dom[node->index], header->index))
7182 {
7183 if (pre_header == NULL)
7184 pre_header = node;
7185 else
7186 {
7187 /* There are multiple edges into the header from outside
7188 the loop so there is no pre-header block. */
7189 pre_header = NULL;
7190 break;
7191 }
7192 }
7193 }
7194 return pre_header;
7195 }
7196
7197
7198 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7199 previously added. The insertion algorithm assumes that the loops
7200 are added in the order found by a depth first search of the CFG. */
7201 static void
7202 flow_loop_tree_node_add (prevloop, loop)
7203 struct loop *prevloop;
7204 struct loop *loop;
7205 {
7206
7207 if (flow_loop_nested_p (prevloop, loop))
7208 {
7209 prevloop->inner = loop;
7210 loop->outer = prevloop;
7211 return;
7212 }
7213
7214 while (prevloop->outer)
7215 {
7216 if (flow_loop_nested_p (prevloop->outer, loop))
7217 {
7218 prevloop->next = loop;
7219 loop->outer = prevloop->outer;
7220 return;
7221 }
7222 prevloop = prevloop->outer;
7223 }
7224
7225 prevloop->next = loop;
7226 loop->outer = NULL;
7227 }
7228
7229
7230 /* Build the loop hierarchy tree for LOOPS. */
7231 static void
7232 flow_loops_tree_build (loops)
7233 struct loops *loops;
7234 {
7235 int i;
7236 int num_loops;
7237
7238 num_loops = loops->num;
7239 if (! num_loops)
7240 return;
7241
7242 /* Root the loop hierarchy tree with the first loop found.
7243 Since we used a depth first search this should be the
7244 outermost loop. */
7245 loops->tree = &loops->array[0];
7246 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7247
7248 /* Add the remaining loops to the tree. */
7249 for (i = 1; i < num_loops; i++)
7250 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7251 }
7252
7253
7254 /* Helper function to compute loop nesting depth and enclosed loop level
7255 for the natural loop specified by LOOP at the loop depth DEPTH.
7256 Returns the loop level. */
7257 static int
7258 flow_loop_level_compute (loop, depth)
7259 struct loop *loop;
7260 int depth;
7261 {
7262 struct loop *inner;
7263 int level = 1;
7264
7265 if (! loop)
7266 return 0;
7267
7268 /* Traverse loop tree assigning depth and computing level as the
7269 maximum level of all the inner loops of this loop. The loop
7270 level is equivalent to the height of the loop in the loop tree
7271 and corresponds to the number of enclosed loop levels (including
7272 itself). */
7273 for (inner = loop->inner; inner; inner = inner->next)
7274 {
7275 int ilevel;
7276
7277 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7278
7279 if (ilevel > level)
7280 level = ilevel;
7281 }
7282 loop->level = level;
7283 loop->depth = depth;
7284 return level;
7285 }
7286
7287
7288 /* Compute the loop nesting depth and enclosed loop level for the loop
7289 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7290 level. */
7291
7292 static int
7293 flow_loops_level_compute (loops)
7294 struct loops *loops;
7295 {
7296 struct loop *loop;
7297 int level;
7298 int levels = 0;
7299
7300 /* Traverse all the outer level loops. */
7301 for (loop = loops->tree; loop; loop = loop->next)
7302 {
7303 level = flow_loop_level_compute (loop, 1);
7304 if (level > levels)
7305 levels = level;
7306 }
7307 return levels;
7308 }
7309
7310
7311 /* Find all the natural loops in the function and save in LOOPS structure
7312 and recalculate loop_depth information in basic block structures.
7313 Return the number of natural loops found. */
7314
7315 int
7316 flow_loops_find (loops)
7317 struct loops *loops;
7318 {
7319 int i;
7320 int b;
7321 int num_loops;
7322 edge e;
7323 sbitmap headers;
7324 sbitmap *dom;
7325 int *dfs_order;
7326
7327 loops->num = 0;
7328 loops->array = NULL;
7329 loops->tree = NULL;
7330 dfs_order = NULL;
7331
7332 /* Taking care of this degenerate case makes the rest of
7333 this code simpler. */
7334 if (n_basic_blocks == 0)
7335 return 0;
7336
7337 /* Compute the dominators. */
7338 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7339 compute_flow_dominators (dom, NULL);
7340
7341 /* Count the number of loop edges (back edges). This should be the
7342 same as the number of natural loops. Also clear the loop_depth
7343 and as we work from inner->outer in a loop nest we call
7344 find_loop_nodes_find which will increment loop_depth for nodes
7345 within the current loop, which happens to enclose inner loops. */
7346
7347 num_loops = 0;
7348 for (b = 0; b < n_basic_blocks; b++)
7349 {
7350 BASIC_BLOCK (b)->loop_depth = 0;
7351 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7352 {
7353 basic_block latch = e->src;
7354
7355 /* Look for back edges where a predecessor is dominated
7356 by this block. A natural loop has a single entry
7357 node (header) that dominates all the nodes in the
7358 loop. It also has single back edge to the header
7359 from a latch node. Note that multiple natural loops
7360 may share the same header. */
7361 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7362 num_loops++;
7363 }
7364 }
7365
7366 if (num_loops)
7367 {
7368 /* Compute depth first search order of the CFG so that outer
7369 natural loops will be found before inner natural loops. */
7370 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7371 flow_depth_first_order_compute (dfs_order);
7372
7373 /* Allocate loop structures. */
7374 loops->array
7375 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7376
7377 headers = sbitmap_alloc (n_basic_blocks);
7378 sbitmap_zero (headers);
7379
7380 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7381 sbitmap_zero (loops->shared_headers);
7382
7383 /* Find and record information about all the natural loops
7384 in the CFG. */
7385 num_loops = 0;
7386 for (b = 0; b < n_basic_blocks; b++)
7387 {
7388 basic_block header;
7389
7390 /* Search the nodes of the CFG in DFS order that we can find
7391 outer loops first. */
7392 header = BASIC_BLOCK (dfs_order[b]);
7393
7394 /* Look for all the possible latch blocks for this header. */
7395 for (e = header->pred; e; e = e->pred_next)
7396 {
7397 basic_block latch = e->src;
7398
7399 /* Look for back edges where a predecessor is dominated
7400 by this block. A natural loop has a single entry
7401 node (header) that dominates all the nodes in the
7402 loop. It also has single back edge to the header
7403 from a latch node. Note that multiple natural loops
7404 may share the same header. */
7405 if (latch != ENTRY_BLOCK_PTR
7406 && TEST_BIT (dom[latch->index], header->index))
7407 {
7408 struct loop *loop;
7409
7410 loop = loops->array + num_loops;
7411
7412 loop->header = header;
7413 loop->latch = latch;
7414
7415 /* Keep track of blocks that are loop headers so
7416 that we can tell which loops should be merged. */
7417 if (TEST_BIT (headers, header->index))
7418 SET_BIT (loops->shared_headers, header->index);
7419 SET_BIT (headers, header->index);
7420
7421 /* Find nodes contained within the loop. */
7422 loop->nodes = sbitmap_alloc (n_basic_blocks);
7423 loop->num_nodes
7424 = flow_loop_nodes_find (header, latch, loop->nodes);
7425
7426 /* Compute first and last blocks within the loop.
7427 These are often the same as the loop header and
7428 loop latch respectively, but this is not always
7429 the case. */
7430 loop->first
7431 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7432 loop->last
7433 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7434
7435 /* Find edges which exit the loop. Note that a node
7436 may have several exit edges. */
7437 loop->num_exits
7438 = flow_loop_exits_find (loop->nodes, &loop->exits);
7439
7440 /* Look to see if the loop has a pre-header node. */
7441 loop->pre_header
7442 = flow_loop_pre_header_find (header, dom);
7443
7444 num_loops++;
7445 }
7446 }
7447 }
7448
7449 /* Natural loops with shared headers may either be disjoint or
7450 nested. Disjoint loops with shared headers cannot be inner
7451 loops and should be merged. For now just mark loops that share
7452 headers. */
7453 for (i = 0; i < num_loops; i++)
7454 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7455 loops->array[i].shared = 1;
7456
7457 sbitmap_free (headers);
7458 }
7459
7460 loops->num = num_loops;
7461
7462 /* Save CFG derived information to avoid recomputing it. */
7463 loops->cfg.dom = dom;
7464 loops->cfg.dfs_order = dfs_order;
7465
7466 /* Build the loop hierarchy tree. */
7467 flow_loops_tree_build (loops);
7468
7469 /* Assign the loop nesting depth and enclosed loop level for each
7470 loop. */
7471 loops->levels = flow_loops_level_compute (loops);
7472
7473 return num_loops;
7474 }
7475
7476
7477 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7478 int
7479 flow_loop_outside_edge_p (loop, e)
7480 const struct loop *loop;
7481 edge e;
7482 {
7483 if (e->dest != loop->header)
7484 abort ();
7485 return (e->src == ENTRY_BLOCK_PTR)
7486 || ! TEST_BIT (loop->nodes, e->src->index);
7487 }
7488
7489
7490 /* Clear LOG_LINKS fields of insns in a chain. */
7491 void
7492 clear_log_links (insns)
7493 rtx insns;
7494 {
7495 rtx i;
7496 for (i = insns; i; i = NEXT_INSN (i))
7497 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7498 LOG_LINKS (i) = 0;
7499 }