Don't use #if inside C test expression.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
176
177 /* The obstack on which the flow graph components are allocated. */
178
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
181
182 /* Number of basic blocks in the current function. */
183
184 int n_basic_blocks;
185
186 /* Number of edges in the current function. */
187
188 int n_edges;
189
190 /* The basic block array. */
191
192 varray_type basic_block_info;
193
194 /* The special entry and exit blocks. */
195
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0 /* frequency */
212 },
213 {
214 NULL, /* head */
215 NULL, /* end */
216 NULL, /* head_tree */
217 NULL, /* end_tree */
218 NULL, /* pred */
219 NULL, /* succ */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
224 NULL, /* aux */
225 EXIT_BLOCK, /* index */
226 0, /* loop_depth */
227 0, /* count */
228 0 /* frequency */
229 }
230 };
231
232 /* Nonzero if the second flow pass has completed. */
233 int flow2_completed;
234
235 /* Maximum register number used in this function, plus one. */
236
237 int max_regno;
238
239 /* Indexed by n, giving various register information */
240
241 varray_type reg_n_info;
242
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
245
246 int regset_bytes;
247 int regset_size;
248
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
251
252 regset regs_live_at_setjmp;
253
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
258 rtx regs_may_share;
259
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
263
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
266
267 static HARD_REG_SET elim_reg_set;
268
269 /* The basic block structure for every insn, indexed by uid. */
270
271 varray_type basic_block_for_insn;
272
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
276
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
279
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
282 {
283 /* A boolean expression of conditions under which a register is dead. */
284 rtx condition;
285 /* Conditions under which a register is dead at the basic block end. */
286 rtx orig_condition;
287
288 /* A boolean expression of conditions under which a register has been
289 stored into. */
290 rtx stores;
291
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
294 };
295
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
298
299 struct propagate_block_info
300 {
301 /* The basic block we're considering. */
302 basic_block bb;
303
304 /* Bit N is set if register N is conditionally or unconditionally live. */
305 regset reg_live;
306
307 /* Bit N is set if register N is set this insn. */
308 regset new_set;
309
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
312 rtx *reg_next_use;
313
314 /* Contains a list of all the MEMs we are tracking for dead store
315 elimination. */
316 rtx mem_set_list;
317
318 /* If non-null, record the set of registers set unconditionally in the
319 basic block. */
320 regset local_set;
321
322 /* If non-null, record the set of registers set conditionally in the
323 basic block. */
324 regset cond_local_set;
325
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
330
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
332 regset reg_cond_reg;
333 #endif
334
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
337
338 /* Non-zero if the value of CC0 is live. */
339 int cc0_live;
340
341 /* Flags controling the set of information propagate_block collects. */
342 int flags;
343 };
344
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
348
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
352 basic_block *stack;
353
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
355 have edges. */
356 unsigned int sp;
357
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
360 };
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
362
363 /* Have print_rtl_and_abort give the same information that fancy_abort
364 does. */
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
367
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
373 rtx *, rtx *));
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
379 rtx, int));
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
381
382 static void commit_one_edge_insertion PARAMS ((edge));
383
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
390 basic_block));
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
392 basic_block));
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
394 int));
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
413 rtx, int, rtx));
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, rtx));
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
420 rtx, int));
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
423 int, rtx));
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
427 int));
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
432 #endif
433 #ifdef AUTO_INC_DEC
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx));
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
439 rtx));
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
441 #endif
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
443 rtx, rtx, rtx));
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
449 const char *))
450 ATTRIBUTE_NORETURN;
451
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
453 rtx));
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
457 rtx));
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
460 FILE *));
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
462 int, FILE *));
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
464 FILE *));
465 static int flow_loop_nested_p PARAMS ((struct loop *,
466 struct loop *));
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
468 edge **));
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
481 const sbitmap *));
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
488 static bool need_fake_edge_p PARAMS ((rtx));
489 \f
490 /* Find basic blocks of the current function.
491 F is the first insn of the function and NREGS the number of register
492 numbers in use. */
493
494 void
495 find_basic_blocks (f, nregs, file)
496 rtx f;
497 int nregs ATTRIBUTE_UNUSED;
498 FILE *file ATTRIBUTE_UNUSED;
499 {
500 int max_uid;
501 timevar_push (TV_CFG);
502
503 /* Flush out existing data. */
504 if (basic_block_info != NULL)
505 {
506 int i;
507
508 clear_edges ();
509
510 /* Clear bb->aux on all extant basic blocks. We'll use this as a
511 tag for reuse during create_basic_block, just in case some pass
512 copies around basic block notes improperly. */
513 for (i = 0; i < n_basic_blocks; ++i)
514 BASIC_BLOCK (i)->aux = NULL;
515
516 VARRAY_FREE (basic_block_info);
517 }
518
519 n_basic_blocks = count_basic_blocks (f);
520
521 /* Size the basic block table. The actual structures will be allocated
522 by find_basic_blocks_1, since we want to keep the structure pointers
523 stable across calls to find_basic_blocks. */
524 /* ??? This whole issue would be much simpler if we called find_basic_blocks
525 exactly once, and thereafter we don't have a single long chain of
526 instructions at all until close to the end of compilation when we
527 actually lay them out. */
528
529 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
530
531 find_basic_blocks_1 (f);
532
533 /* Record the block to which an insn belongs. */
534 /* ??? This should be done another way, by which (perhaps) a label is
535 tagged directly with the basic block that it starts. It is used for
536 more than that currently, but IMO that is the only valid use. */
537
538 max_uid = get_max_uid ();
539 #ifdef AUTO_INC_DEC
540 /* Leave space for insns life_analysis makes in some cases for auto-inc.
541 These cases are rare, so we don't need too much space. */
542 max_uid += max_uid / 10;
543 #endif
544
545 compute_bb_for_insn (max_uid);
546
547 /* Discover the edges of our cfg. */
548 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
549
550 /* Do very simple cleanup now, for the benefit of code that runs between
551 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
552 tidy_fallthru_edges ();
553
554 mark_critical_edges ();
555
556 #ifdef ENABLE_CHECKING
557 verify_flow_info ();
558 #endif
559 timevar_pop (TV_CFG);
560 }
561
562 void
563 check_function_return_warnings ()
564 {
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
571
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
576
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
582 {
583 int max_uid = get_max_uid ();
584
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
590 {
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
594
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
597 }
598 }
599 }
600
601 /* Count the basic blocks of the function. */
602
603 static int
604 count_basic_blocks (f)
605 rtx f;
606 {
607 register rtx insn;
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
611
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
614 {
615 enum rtx_code code = GET_CODE (insn);
616
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
622 {
623 saw_abnormal_edge = 0;
624 count++;
625 }
626
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
629 {
630 rtx note;
631
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
638
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
641 }
642 else if (flag_non_call_exceptions
643 && code == INSN
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
646
647 if (code != NOTE)
648 prev_code = code;
649 }
650
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
653 if (count == 0)
654 {
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
656 count = 1;
657 }
658
659 return count;
660 }
661
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
664 static rtx
665 find_label_refs (f, lvl)
666 rtx f;
667 rtx lvl;
668 {
669 rtx insn;
670
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
673 {
674 rtx note;
675
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
678
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
681
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
684
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
687 {
688 rtx lab = XEXP (note, 0), next;
689
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
694 ;
695 else if (GET_CODE (lab) == NOTE)
696 ;
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
699 ;
700 else
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
702 }
703 }
704
705 return lvl;
706 }
707
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
710 void
711 find_sub_basic_blocks (bb)
712 basic_block bb;
713 {
714 rtx insn = bb->head;
715 rtx end = bb->end;
716 rtx jump_insn = NULL_RTX;
717 edge falltru = 0;
718 basic_block first_bb = bb;
719 int i;
720
721 if (insn == bb->end)
722 return;
723
724 if (GET_CODE (insn) == CODE_LABEL)
725 insn = NEXT_INSN (insn);
726
727 /* Scan insn chain and try to find new basic block boundaries. */
728 while (1)
729 {
730 enum rtx_code code = GET_CODE (insn);
731 switch (code)
732 {
733 case BARRIER:
734 if (!jump_insn)
735 abort ();
736 break;
737 /* On code label, split current basic block. */
738 case CODE_LABEL:
739 falltru = split_block (bb, PREV_INSN (insn));
740 if (jump_insn)
741 bb->end = jump_insn;
742 bb = falltru->dest;
743 remove_edge (falltru);
744 jump_insn = 0;
745 if (LABEL_ALTERNATE_NAME (insn))
746 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
747 break;
748 case INSN:
749 case JUMP_INSN:
750 /* In case we've previously split insn on the JUMP_INSN, move the
751 block header to proper place. */
752 if (jump_insn)
753 {
754 falltru = split_block (bb, PREV_INSN (insn));
755 bb->end = jump_insn;
756 bb = falltru->dest;
757 remove_edge (falltru);
758 jump_insn = 0;
759 }
760 /* We need some special care for those expressions. */
761 if (GET_CODE (insn) == JUMP_INSN)
762 {
763 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
764 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
765 abort();
766 jump_insn = insn;
767 }
768 break;
769 default:
770 break;
771 }
772 if (insn == end)
773 break;
774 insn = NEXT_INSN (insn);
775 }
776
777 /* In case expander replaced normal insn by sequence terminating by
778 return and barrier, or possibly other sequence not behaving like
779 ordinary jump, we need to take care and move basic block boundary. */
780 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
781 bb->end = jump_insn;
782
783 /* We've possibly replaced the conditional jump by conditional jump
784 followed by cleanup at fallthru edge, so the outgoing edges may
785 be dead. */
786 purge_dead_edges (bb);
787
788 /* Now re-scan and wire in all edges. This expect simple (conditional)
789 jumps at the end of each new basic blocks. */
790 make_edges (NULL, first_bb->index, bb->index, 1);
791
792 /* Update branch probabilities. Expect only (un)conditional jumps
793 to be created with only the forward edges. */
794 for (i = first_bb->index; i <= bb->index; i++)
795 {
796 edge e,f;
797 basic_block b = BASIC_BLOCK (i);
798 if (b != first_bb)
799 {
800 b->count = 0;
801 b->frequency = 0;
802 for (e = b->pred; e; e=e->pred_next)
803 {
804 b->count += e->count;
805 b->frequency += EDGE_FREQUENCY (e);
806 }
807 }
808 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
809 {
810 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
811 int probability;
812
813 if (!note)
814 continue;
815 probability = INTVAL (XEXP (find_reg_note (b->end,
816 REG_BR_PROB,
817 NULL), 0));
818 e = BRANCH_EDGE (b);
819 e->probability = probability;
820 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
821 / REG_BR_PROB_BASE);
822 f = FALLTHRU_EDGE (b);
823 f->probability = REG_BR_PROB_BASE - probability;
824 f->count = b->count - e->count;
825 }
826 if (b->succ && !b->succ->succ_next)
827 {
828 e = b->succ;
829 e->probability = REG_BR_PROB_BASE;
830 e->count = b->count;
831 }
832 }
833 }
834
835 /* Find all basic blocks of the function whose first insn is F.
836
837 Collect and return a list of labels whose addresses are taken. This
838 will be used in make_edges for use with computed gotos. */
839
840 static void
841 find_basic_blocks_1 (f)
842 rtx f;
843 {
844 register rtx insn, next;
845 int i = 0;
846 rtx bb_note = NULL_RTX;
847 rtx lvl = NULL_RTX;
848 rtx trll = NULL_RTX;
849 rtx head = NULL_RTX;
850 rtx end = NULL_RTX;
851
852 /* We process the instructions in a slightly different way than we did
853 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
854 closed out the previous block, so that it gets attached at the proper
855 place. Since this form should be equivalent to the previous,
856 count_basic_blocks continues to use the old form as a check. */
857
858 for (insn = f; insn; insn = next)
859 {
860 enum rtx_code code = GET_CODE (insn);
861
862 next = NEXT_INSN (insn);
863
864 switch (code)
865 {
866 case NOTE:
867 {
868 int kind = NOTE_LINE_NUMBER (insn);
869
870 /* Look for basic block notes with which to keep the
871 basic_block_info pointers stable. Unthread the note now;
872 we'll put it back at the right place in create_basic_block.
873 Or not at all if we've already found a note in this block. */
874 if (kind == NOTE_INSN_BASIC_BLOCK)
875 {
876 if (bb_note == NULL_RTX)
877 bb_note = insn;
878 else
879 next = flow_delete_insn (insn);
880 }
881 break;
882 }
883
884 case CODE_LABEL:
885 /* A basic block starts at a label. If we've closed one off due
886 to a barrier or some such, no need to do it again. */
887 if (head != NULL_RTX)
888 {
889 create_basic_block (i++, head, end, bb_note);
890 bb_note = NULL_RTX;
891 }
892
893 head = end = insn;
894 break;
895
896 case JUMP_INSN:
897 /* A basic block ends at a jump. */
898 if (head == NULL_RTX)
899 head = insn;
900 else
901 {
902 /* ??? Make a special check for table jumps. The way this
903 happens is truly and amazingly gross. We are about to
904 create a basic block that contains just a code label and
905 an addr*vec jump insn. Worse, an addr_diff_vec creates
906 its own natural loop.
907
908 Prevent this bit of brain damage, pasting things together
909 correctly in make_edges.
910
911 The correct solution involves emitting the table directly
912 on the tablejump instruction as a note, or JUMP_LABEL. */
913
914 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
915 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
916 {
917 head = end = NULL;
918 n_basic_blocks--;
919 break;
920 }
921 }
922 end = insn;
923 goto new_bb_inclusive;
924
925 case BARRIER:
926 /* A basic block ends at a barrier. It may be that an unconditional
927 jump already closed the basic block -- no need to do it again. */
928 if (head == NULL_RTX)
929 break;
930 goto new_bb_exclusive;
931
932 case CALL_INSN:
933 {
934 /* Record whether this call created an edge. */
935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
936 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
937
938 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
939 {
940 /* Scan each of the alternatives for label refs. */
941 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
944 /* Record its tail recursion label, if any. */
945 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
946 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
947 }
948
949 /* A basic block ends at a call that can either throw or
950 do a non-local goto. */
951 if ((nonlocal_goto_handler_labels && region >= 0)
952 || can_throw_internal (insn))
953 {
954 new_bb_inclusive:
955 if (head == NULL_RTX)
956 head = insn;
957 end = insn;
958
959 new_bb_exclusive:
960 create_basic_block (i++, head, end, bb_note);
961 head = end = NULL_RTX;
962 bb_note = NULL_RTX;
963 break;
964 }
965 }
966 /* Fall through. */
967
968 case INSN:
969 /* Non-call exceptions generate new blocks just like calls. */
970 if (flag_non_call_exceptions && can_throw_internal (insn))
971 goto new_bb_inclusive;
972
973 if (head == NULL_RTX)
974 head = insn;
975 end = insn;
976 break;
977
978 default:
979 abort ();
980 }
981
982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
983 {
984 rtx note;
985
986 /* Make a list of all labels referred to other than by jumps.
987
988 Make a special exception for labels followed by an ADDR*VEC,
989 as this would be a part of the tablejump setup code.
990
991 Make a special exception to registers loaded with label
992 values just before jump insns that use them. */
993
994 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
995 if (REG_NOTE_KIND (note) == REG_LABEL)
996 {
997 rtx lab = XEXP (note, 0), next;
998
999 if ((next = next_nonnote_insn (lab)) != NULL
1000 && GET_CODE (next) == JUMP_INSN
1001 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1002 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1003 ;
1004 else if (GET_CODE (lab) == NOTE)
1005 ;
1006 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1007 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1008 ;
1009 else
1010 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1011 }
1012 }
1013 }
1014
1015 if (head != NULL_RTX)
1016 create_basic_block (i++, head, end, bb_note);
1017 else if (bb_note)
1018 flow_delete_insn (bb_note);
1019
1020 if (i != n_basic_blocks)
1021 abort ();
1022
1023 label_value_list = lvl;
1024 tail_recursion_label_list = trll;
1025 }
1026
1027 /* Tidy the CFG by deleting unreachable code and whatnot. */
1028
1029 void
1030 cleanup_cfg (mode)
1031 int mode;
1032 {
1033 timevar_push (TV_CLEANUP_CFG);
1034 delete_unreachable_blocks ();
1035 if (try_optimize_cfg (mode))
1036 delete_unreachable_blocks ();
1037 mark_critical_edges ();
1038
1039 /* Kill the data we won't maintain. */
1040 free_EXPR_LIST_list (&label_value_list);
1041 free_EXPR_LIST_list (&tail_recursion_label_list);
1042 timevar_pop (TV_CLEANUP_CFG);
1043 }
1044
1045 /* Create a new basic block consisting of the instructions between
1046 HEAD and END inclusive. Reuses the note and basic block struct
1047 in BB_NOTE, if any. */
1048
1049 void
1050 create_basic_block (index, head, end, bb_note)
1051 int index;
1052 rtx head, end, bb_note;
1053 {
1054 basic_block bb;
1055
1056 if (bb_note
1057 && ! RTX_INTEGRATED_P (bb_note)
1058 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1059 && bb->aux == NULL)
1060 {
1061 /* If we found an existing note, thread it back onto the chain. */
1062
1063 rtx after;
1064
1065 if (GET_CODE (head) == CODE_LABEL)
1066 after = head;
1067 else
1068 {
1069 after = PREV_INSN (head);
1070 head = bb_note;
1071 }
1072
1073 if (after != bb_note && NEXT_INSN (after) != bb_note)
1074 reorder_insns (bb_note, bb_note, after);
1075 }
1076 else
1077 {
1078 /* Otherwise we must create a note and a basic block structure.
1079 Since we allow basic block structs in rtl, give the struct
1080 the same lifetime by allocating it off the function obstack
1081 rather than using malloc. */
1082
1083 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1084 memset (bb, 0, sizeof (*bb));
1085
1086 if (GET_CODE (head) == CODE_LABEL)
1087 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1088 else
1089 {
1090 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1091 head = bb_note;
1092 }
1093 NOTE_BASIC_BLOCK (bb_note) = bb;
1094 }
1095
1096 /* Always include the bb note in the block. */
1097 if (NEXT_INSN (end) == bb_note)
1098 end = bb_note;
1099
1100 bb->head = head;
1101 bb->end = end;
1102 bb->index = index;
1103 BASIC_BLOCK (index) = bb;
1104
1105 /* Tag the block so that we know it has been used when considering
1106 other basic block notes. */
1107 bb->aux = bb;
1108 }
1109 \f
1110 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1111 note associated with the BLOCK. */
1112
1113 rtx
1114 first_insn_after_basic_block_note (block)
1115 basic_block block;
1116 {
1117 rtx insn;
1118
1119 /* Get the first instruction in the block. */
1120 insn = block->head;
1121
1122 if (insn == NULL_RTX)
1123 return NULL_RTX;
1124 if (GET_CODE (insn) == CODE_LABEL)
1125 insn = NEXT_INSN (insn);
1126 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1127 abort ();
1128
1129 return NEXT_INSN (insn);
1130 }
1131
1132 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1133 indexed by INSN_UID. MAX is the size of the array. */
1134
1135 void
1136 compute_bb_for_insn (max)
1137 int max;
1138 {
1139 int i;
1140
1141 if (basic_block_for_insn)
1142 VARRAY_FREE (basic_block_for_insn);
1143 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1144
1145 for (i = 0; i < n_basic_blocks; ++i)
1146 {
1147 basic_block bb = BASIC_BLOCK (i);
1148 rtx insn, end;
1149
1150 end = bb->end;
1151 insn = bb->head;
1152 while (1)
1153 {
1154 int uid = INSN_UID (insn);
1155 if (uid < max)
1156 VARRAY_BB (basic_block_for_insn, uid) = bb;
1157 if (insn == end)
1158 break;
1159 insn = NEXT_INSN (insn);
1160 }
1161 }
1162 }
1163
1164 /* Free the memory associated with the edge structures. */
1165
1166 void
1167 clear_edges ()
1168 {
1169 int i;
1170 edge n, e;
1171
1172 for (i = 0; i < n_basic_blocks; ++i)
1173 {
1174 basic_block bb = BASIC_BLOCK (i);
1175
1176 for (e = bb->succ; e; e = n)
1177 {
1178 n = e->succ_next;
1179 free (e);
1180 }
1181
1182 bb->succ = 0;
1183 bb->pred = 0;
1184 }
1185
1186 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1187 {
1188 n = e->succ_next;
1189 free (e);
1190 }
1191
1192 ENTRY_BLOCK_PTR->succ = 0;
1193 EXIT_BLOCK_PTR->pred = 0;
1194
1195 n_edges = 0;
1196 }
1197
1198 /* Identify the edges between basic blocks MIN to MAX.
1199
1200 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1201 that are otherwise unreachable may be reachable with a non-local goto.
1202
1203 BB_EH_END is an array indexed by basic block number in which we record
1204 the list of exception regions active at the end of the basic block. */
1205
1206 static void
1207 make_edges (label_value_list, min, max, update_p)
1208 rtx label_value_list;
1209 int min, max, update_p;
1210 {
1211 int i;
1212 sbitmap *edge_cache = NULL;
1213
1214 /* Assume no computed jump; revise as we create edges. */
1215 current_function_has_computed_jump = 0;
1216
1217 /* Heavy use of computed goto in machine-generated code can lead to
1218 nearly fully-connected CFGs. In that case we spend a significant
1219 amount of time searching the edge lists for duplicates. */
1220 if (forced_labels || label_value_list)
1221 {
1222 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1223 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1224
1225 if (update_p)
1226 for (i = min; i <= max; ++i)
1227 {
1228 edge e;
1229 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 SET_BIT (edge_cache[i], e->dest->index);
1232 }
1233 }
1234
1235 /* By nature of the way these get numbered, block 0 is always the entry. */
1236 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1237
1238 for (i = min; i <= max; ++i)
1239 {
1240 basic_block bb = BASIC_BLOCK (i);
1241 rtx insn, x;
1242 enum rtx_code code;
1243 int force_fallthru = 0;
1244
1245 if (GET_CODE (bb->head) == CODE_LABEL
1246 && LABEL_ALTERNATE_NAME (bb->head))
1247 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1248
1249 /* Examine the last instruction of the block, and discover the
1250 ways we can leave the block. */
1251
1252 insn = bb->end;
1253 code = GET_CODE (insn);
1254
1255 /* A branch. */
1256 if (code == JUMP_INSN)
1257 {
1258 rtx tmp;
1259
1260 /* Recognize exception handling placeholders. */
1261 if (GET_CODE (PATTERN (insn)) == RESX)
1262 make_eh_edge (edge_cache, bb, insn);
1263
1264 /* Recognize a non-local goto as a branch outside the
1265 current function. */
1266 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1267 ;
1268
1269 /* ??? Recognize a tablejump and do the right thing. */
1270 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1271 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1272 && GET_CODE (tmp) == JUMP_INSN
1273 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1274 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1275 {
1276 rtvec vec;
1277 int j;
1278
1279 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1280 vec = XVEC (PATTERN (tmp), 0);
1281 else
1282 vec = XVEC (PATTERN (tmp), 1);
1283
1284 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1285 make_label_edge (edge_cache, bb,
1286 XEXP (RTVEC_ELT (vec, j), 0), 0);
1287
1288 /* Some targets (eg, ARM) emit a conditional jump that also
1289 contains the out-of-range target. Scan for these and
1290 add an edge if necessary. */
1291 if ((tmp = single_set (insn)) != NULL
1292 && SET_DEST (tmp) == pc_rtx
1293 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1294 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1295 make_label_edge (edge_cache, bb,
1296 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1297
1298 #ifdef CASE_DROPS_THROUGH
1299 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1300 us naturally detecting fallthru into the next block. */
1301 force_fallthru = 1;
1302 #endif
1303 }
1304
1305 /* If this is a computed jump, then mark it as reaching
1306 everything on the label_value_list and forced_labels list. */
1307 else if (computed_jump_p (insn))
1308 {
1309 current_function_has_computed_jump = 1;
1310
1311 for (x = label_value_list; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1313
1314 for (x = forced_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1316 }
1317
1318 /* Returns create an exit out. */
1319 else if (returnjump_p (insn))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1321
1322 /* Otherwise, we have a plain conditional or unconditional jump. */
1323 else
1324 {
1325 if (! JUMP_LABEL (insn))
1326 abort ();
1327 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1328 }
1329 }
1330
1331 /* If this is a sibling call insn, then this is in effect a
1332 combined call and return, and so we need an edge to the
1333 exit block. No need to worry about EH edges, since we
1334 wouldn't have created the sibling call in the first place. */
1335
1336 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1337 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1338 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1339
1340 /* If this is a CALL_INSN, then mark it as reaching the active EH
1341 handler for this CALL_INSN. If we're handling non-call
1342 exceptions then any insn can reach any of the active handlers.
1343
1344 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1345
1346 else if (code == CALL_INSN || flag_non_call_exceptions)
1347 {
1348 /* Add any appropriate EH edges. */
1349 make_eh_edge (edge_cache, bb, insn);
1350
1351 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1352 {
1353 /* ??? This could be made smarter: in some cases it's possible
1354 to tell that certain calls will not do a nonlocal goto.
1355
1356 For example, if the nested functions that do the nonlocal
1357 gotos do not have their addresses taken, then only calls to
1358 those functions or to other nested functions that use them
1359 could possibly do nonlocal gotos. */
1360 /* We do know that a REG_EH_REGION note with a value less
1361 than 0 is guaranteed not to perform a non-local goto. */
1362 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1363 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1364 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1365 make_label_edge (edge_cache, bb, XEXP (x, 0),
1366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1367 }
1368 }
1369
1370 /* Find out if we can drop through to the next block. */
1371 insn = next_nonnote_insn (insn);
1372 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1373 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1374 else if (i + 1 < n_basic_blocks)
1375 {
1376 rtx tmp = BLOCK_HEAD (i + 1);
1377 if (GET_CODE (tmp) == NOTE)
1378 tmp = next_nonnote_insn (tmp);
1379 if (force_fallthru || insn == tmp)
1380 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1381 }
1382 }
1383
1384 if (edge_cache)
1385 sbitmap_vector_free (edge_cache);
1386 }
1387
1388 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1389 about the edge that is accumulated between calls. */
1390
1391 void
1392 make_edge (edge_cache, src, dst, flags)
1393 sbitmap *edge_cache;
1394 basic_block src, dst;
1395 int flags;
1396 {
1397 int use_edge_cache;
1398 edge e;
1399
1400 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1401 many edges to them, and we didn't allocate memory for it. */
1402 use_edge_cache = (edge_cache
1403 && src != ENTRY_BLOCK_PTR
1404 && dst != EXIT_BLOCK_PTR);
1405
1406 /* Make sure we don't add duplicate edges. */
1407 switch (use_edge_cache)
1408 {
1409 default:
1410 /* Quick test for non-existance of the edge. */
1411 if (! TEST_BIT (edge_cache[src->index], dst->index))
1412 break;
1413
1414 /* The edge exists; early exit if no work to do. */
1415 if (flags == 0)
1416 return;
1417
1418 /* FALLTHRU */
1419 case 0:
1420 for (e = src->succ; e; e = e->succ_next)
1421 if (e->dest == dst)
1422 {
1423 e->flags |= flags;
1424 return;
1425 }
1426 break;
1427 }
1428
1429 e = (edge) xcalloc (1, sizeof (*e));
1430 n_edges++;
1431
1432 e->succ_next = src->succ;
1433 e->pred_next = dst->pred;
1434 e->src = src;
1435 e->dest = dst;
1436 e->flags = flags;
1437
1438 src->succ = e;
1439 dst->pred = e;
1440
1441 if (use_edge_cache)
1442 SET_BIT (edge_cache[src->index], dst->index);
1443 }
1444
1445 /* Create an edge from a basic block to a label. */
1446
1447 static void
1448 make_label_edge (edge_cache, src, label, flags)
1449 sbitmap *edge_cache;
1450 basic_block src;
1451 rtx label;
1452 int flags;
1453 {
1454 if (GET_CODE (label) != CODE_LABEL)
1455 abort ();
1456
1457 /* If the label was never emitted, this insn is junk, but avoid a
1458 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1459 as a result of a syntax error and a diagnostic has already been
1460 printed. */
1461
1462 if (INSN_UID (label) == 0)
1463 return;
1464
1465 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1466 }
1467
1468 /* Create the edges generated by INSN in REGION. */
1469
1470 static void
1471 make_eh_edge (edge_cache, src, insn)
1472 sbitmap *edge_cache;
1473 basic_block src;
1474 rtx insn;
1475 {
1476 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1477 rtx handlers, i;
1478
1479 handlers = reachable_handlers (insn);
1480
1481 for (i = handlers; i; i = XEXP (i, 1))
1482 make_label_edge (edge_cache, src, XEXP (i, 0),
1483 EDGE_ABNORMAL | EDGE_EH | is_call);
1484
1485 free_INSN_LIST_list (&handlers);
1486 }
1487
1488 /* Identify critical edges and set the bits appropriately. */
1489
1490 void
1491 mark_critical_edges ()
1492 {
1493 int i, n = n_basic_blocks;
1494 basic_block bb;
1495
1496 /* We begin with the entry block. This is not terribly important now,
1497 but could be if a front end (Fortran) implemented alternate entry
1498 points. */
1499 bb = ENTRY_BLOCK_PTR;
1500 i = -1;
1501
1502 while (1)
1503 {
1504 edge e;
1505
1506 /* (1) Critical edges must have a source with multiple successors. */
1507 if (bb->succ && bb->succ->succ_next)
1508 {
1509 for (e = bb->succ; e; e = e->succ_next)
1510 {
1511 /* (2) Critical edges must have a destination with multiple
1512 predecessors. Note that we know there is at least one
1513 predecessor -- the edge we followed to get here. */
1514 if (e->dest->pred->pred_next)
1515 e->flags |= EDGE_CRITICAL;
1516 else
1517 e->flags &= ~EDGE_CRITICAL;
1518 }
1519 }
1520 else
1521 {
1522 for (e = bb->succ; e; e = e->succ_next)
1523 e->flags &= ~EDGE_CRITICAL;
1524 }
1525
1526 if (++i >= n)
1527 break;
1528 bb = BASIC_BLOCK (i);
1529 }
1530 }
1531 \f
1532 /* Mark the back edges in DFS traversal.
1533 Return non-zero if a loop (natural or otherwise) is present.
1534 Inspired by Depth_First_Search_PP described in:
1535
1536 Advanced Compiler Design and Implementation
1537 Steven Muchnick
1538 Morgan Kaufmann, 1997
1539
1540 and heavily borrowed from flow_depth_first_order_compute. */
1541
1542 bool
1543 mark_dfs_back_edges ()
1544 {
1545 edge *stack;
1546 int *pre;
1547 int *post;
1548 int sp;
1549 int prenum = 1;
1550 int postnum = 1;
1551 sbitmap visited;
1552 bool found = false;
1553
1554 /* Allocate the preorder and postorder number arrays. */
1555 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1556 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1557
1558 /* Allocate stack for back-tracking up CFG. */
1559 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1560 sp = 0;
1561
1562 /* Allocate bitmap to track nodes that have been visited. */
1563 visited = sbitmap_alloc (n_basic_blocks);
1564
1565 /* None of the nodes in the CFG have been visited yet. */
1566 sbitmap_zero (visited);
1567
1568 /* Push the first edge on to the stack. */
1569 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1570
1571 while (sp)
1572 {
1573 edge e;
1574 basic_block src;
1575 basic_block dest;
1576
1577 /* Look at the edge on the top of the stack. */
1578 e = stack[sp - 1];
1579 src = e->src;
1580 dest = e->dest;
1581 e->flags &= ~EDGE_DFS_BACK;
1582
1583 /* Check if the edge destination has been visited yet. */
1584 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1585 {
1586 /* Mark that we have visited the destination. */
1587 SET_BIT (visited, dest->index);
1588
1589 pre[dest->index] = prenum++;
1590
1591 if (dest->succ)
1592 {
1593 /* Since the DEST node has been visited for the first
1594 time, check its successors. */
1595 stack[sp++] = dest->succ;
1596 }
1597 else
1598 post[dest->index] = postnum++;
1599 }
1600 else
1601 {
1602 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1603 && pre[src->index] >= pre[dest->index]
1604 && post[dest->index] == 0)
1605 e->flags |= EDGE_DFS_BACK, found = true;
1606
1607 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1608 post[src->index] = postnum++;
1609
1610 if (e->succ_next)
1611 stack[sp - 1] = e->succ_next;
1612 else
1613 sp--;
1614 }
1615 }
1616
1617 free (pre);
1618 free (post);
1619 free (stack);
1620 sbitmap_free (visited);
1621
1622 return found;
1623 }
1624 \f
1625 /* Split a block BB after insn INSN creating a new fallthru edge.
1626 Return the new edge. Note that to keep other parts of the compiler happy,
1627 this function renumbers all the basic blocks so that the new
1628 one has a number one greater than the block split. */
1629
1630 edge
1631 split_block (bb, insn)
1632 basic_block bb;
1633 rtx insn;
1634 {
1635 basic_block new_bb;
1636 edge new_edge;
1637 edge e;
1638 rtx bb_note;
1639 int i, j;
1640
1641 /* There is no point splitting the block after its end. */
1642 if (bb->end == insn)
1643 return 0;
1644
1645 /* Create the new structures. */
1646 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1647 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1648 n_edges++;
1649
1650 memset (new_bb, 0, sizeof (*new_bb));
1651
1652 new_bb->head = NEXT_INSN (insn);
1653 new_bb->end = bb->end;
1654 bb->end = insn;
1655
1656 new_bb->succ = bb->succ;
1657 bb->succ = new_edge;
1658 new_bb->pred = new_edge;
1659 new_bb->count = bb->count;
1660 new_bb->frequency = bb->frequency;
1661 new_bb->loop_depth = bb->loop_depth;
1662
1663 new_edge->src = bb;
1664 new_edge->dest = new_bb;
1665 new_edge->flags = EDGE_FALLTHRU;
1666 new_edge->probability = REG_BR_PROB_BASE;
1667 new_edge->count = bb->count;
1668
1669 /* Redirect the src of the successor edges of bb to point to new_bb. */
1670 for (e = new_bb->succ; e; e = e->succ_next)
1671 e->src = new_bb;
1672
1673 /* Place the new block just after the block being split. */
1674 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1675
1676 /* Some parts of the compiler expect blocks to be number in
1677 sequential order so insert the new block immediately after the
1678 block being split.. */
1679 j = bb->index;
1680 for (i = n_basic_blocks - 1; i > j + 1; --i)
1681 {
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1684 tmp->index = i;
1685 }
1686
1687 BASIC_BLOCK (i) = new_bb;
1688 new_bb->index = i;
1689
1690 if (GET_CODE (new_bb->head) == CODE_LABEL)
1691 {
1692 /* Create the basic block note. */
1693 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1694 new_bb->head);
1695 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1696
1697 /* If the only thing in this new block was the label, make sure
1698 the block note gets included. */
1699 if (new_bb->head == new_bb->end)
1700 new_bb->end = bb_note;
1701 }
1702 else
1703 {
1704 /* Create the basic block note. */
1705 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1706 new_bb->head);
1707 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1708 new_bb->head = bb_note;
1709 }
1710
1711 update_bb_for_insn (new_bb);
1712
1713 if (bb->global_live_at_start)
1714 {
1715 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1716 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1717 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1718
1719 /* We now have to calculate which registers are live at the end
1720 of the split basic block and at the start of the new basic
1721 block. Start with those registers that are known to be live
1722 at the end of the original basic block and get
1723 propagate_block to determine which registers are live. */
1724 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1725 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1726 COPY_REG_SET (bb->global_live_at_end,
1727 new_bb->global_live_at_start);
1728 }
1729
1730 return new_edge;
1731 }
1732
1733 /* Return label in the head of basic block. Create one if it doesn't exist. */
1734 rtx
1735 block_label (block)
1736 basic_block block;
1737 {
1738 if (block == EXIT_BLOCK_PTR)
1739 return NULL_RTX;
1740 if (GET_CODE (block->head) != CODE_LABEL)
1741 {
1742 block->head = emit_label_before (gen_label_rtx (), block->head);
1743 if (basic_block_for_insn)
1744 set_block_for_insn (block->head, block);
1745 }
1746 return block->head;
1747 }
1748
1749 /* Return true if the block has no effect and only forwards control flow to
1750 its single destination. */
1751 bool
1752 forwarder_block_p (bb)
1753 basic_block bb;
1754 {
1755 rtx insn = bb->head;
1756 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1757 || !bb->succ || bb->succ->succ_next)
1758 return false;
1759
1760 while (insn != bb->end)
1761 {
1762 if (active_insn_p (insn))
1763 return false;
1764 insn = NEXT_INSN (insn);
1765 }
1766 return (!active_insn_p (insn)
1767 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1768 }
1769
1770 /* Return nonzero if we can reach target from src by falling trought. */
1771 static bool
1772 can_fallthru (src, target)
1773 basic_block src, target;
1774 {
1775 rtx insn = src->end;
1776 rtx insn2 = target->head;
1777
1778 if (src->index + 1 == target->index && !active_insn_p (insn2))
1779 insn2 = next_active_insn (insn2);
1780 /* ??? Later we may add code to move jump tables offline. */
1781 return next_active_insn (insn) == insn2;
1782 }
1783
1784 /* Attempt to perform edge redirection by replacing possibly complex jump
1785 instruction by unconditional jump or removing jump completely.
1786 This can apply only if all edges now point to the same block.
1787
1788 The parameters and return values are equivalent to redirect_edge_and_branch.
1789 */
1790 static bool
1791 try_redirect_by_replacing_jump (e, target)
1792 edge e;
1793 basic_block target;
1794 {
1795 basic_block src = e->src;
1796 rtx insn = src->end, kill_from;
1797 edge tmp;
1798 rtx set;
1799 int fallthru = 0;
1800
1801 /* Verify that all targets will be TARGET. */
1802 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1803 if (tmp->dest != target && tmp != e)
1804 break;
1805 if (tmp || !onlyjump_p (insn))
1806 return false;
1807
1808 /* Avoid removing branch with side effects. */
1809 set = single_set (insn);
1810 if (!set || side_effects_p (set))
1811 return false;
1812
1813 /* In case we zap a conditional jump, we'll need to kill
1814 the cc0 setter too. */
1815 kill_from = insn;
1816 #ifdef HAVE_cc0
1817 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1818 kill_from = PREV_INSN (insn);
1819 #endif
1820
1821 /* See if we can create the fallthru edge. */
1822 if (can_fallthru (src, target))
1823 {
1824 src->end = PREV_INSN (kill_from);
1825 if (rtl_dump_file)
1826 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1827 fallthru = 1;
1828
1829 /* Selectivly unlink whole insn chain. */
1830 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1831 }
1832 /* If this already is simplejump, redirect it. */
1833 else if (simplejump_p (insn))
1834 {
1835 if (e->dest == target)
1836 return false;
1837 if (rtl_dump_file)
1838 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1839 INSN_UID (insn), e->dest->index, target->index);
1840 redirect_jump (insn, block_label (target), 0);
1841 }
1842 /* Or replace possibly complicated jump insn by simple jump insn. */
1843 else
1844 {
1845 rtx target_label = block_label (target);
1846 rtx barrier;
1847
1848 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1849 JUMP_LABEL (src->end) = target_label;
1850 LABEL_NUSES (target_label)++;
1851 if (basic_block_for_insn)
1852 set_block_for_new_insns (src->end, src);
1853 if (rtl_dump_file)
1854 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1855 INSN_UID (insn), INSN_UID (src->end));
1856
1857 flow_delete_insn_chain (kill_from, insn);
1858
1859 barrier = next_nonnote_insn (src->end);
1860 if (!barrier || GET_CODE (barrier) != BARRIER)
1861 emit_barrier_after (src->end);
1862 }
1863
1864 /* Keep only one edge out and set proper flags. */
1865 while (src->succ->succ_next)
1866 remove_edge (src->succ);
1867 e = src->succ;
1868 if (fallthru)
1869 e->flags = EDGE_FALLTHRU;
1870 else
1871 e->flags = 0;
1872 e->probability = REG_BR_PROB_BASE;
1873 e->count = src->count;
1874
1875 /* We don't want a block to end on a line-number note since that has
1876 the potential of changing the code between -g and not -g. */
1877 while (GET_CODE (e->src->end) == NOTE
1878 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1879 {
1880 rtx prev = PREV_INSN (e->src->end);
1881 flow_delete_insn (e->src->end);
1882 e->src->end = prev;
1883 }
1884
1885 if (e->dest != target)
1886 redirect_edge_succ (e, target);
1887 return true;
1888 }
1889
1890 /* Return last loop_beg note appearing after INSN, before start of next
1891 basic block. Return INSN if there are no such notes.
1892
1893 When emmiting jump to redirect an fallthru edge, it should always
1894 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1895 eighter start by fallthru edge or jump following the LOOP_BEG note
1896 jumping to the loop exit test. */
1897 rtx
1898 last_loop_beg_note (insn)
1899 rtx insn;
1900 {
1901 rtx last = insn;
1902 insn = NEXT_INSN (insn);
1903 while (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1905 {
1906 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1907 last = insn;
1908 insn = NEXT_INSN (insn);
1909 }
1910 return last;
1911 }
1912
1913 /* Attempt to change code to redirect edge E to TARGET.
1914 Don't do that on expense of adding new instructions or reordering
1915 basic blocks.
1916
1917 Function can be also called with edge destionation equivalent to the
1918 TARGET. Then it should try the simplifications and do nothing if
1919 none is possible.
1920
1921 Return true if transformation suceeded. We still return flase in case
1922 E already destinated TARGET and we didn't managed to simplify instruction
1923 stream. */
1924 bool
1925 redirect_edge_and_branch (e, target)
1926 edge e;
1927 basic_block target;
1928 {
1929 rtx tmp;
1930 rtx old_label = e->dest->head;
1931 basic_block src = e->src;
1932 rtx insn = src->end;
1933
1934 if (e->flags & EDGE_COMPLEX)
1935 return false;
1936
1937 if (try_redirect_by_replacing_jump (e, target))
1938 return true;
1939 /* Do this fast path late, as we want above code to simplify for cases
1940 where called on single edge leaving basic block containing nontrivial
1941 jump insn. */
1942 else if (e->dest == target)
1943 return false;
1944
1945 /* We can only redirect non-fallthru edges of jump insn. */
1946 if (e->flags & EDGE_FALLTHRU)
1947 return false;
1948 if (GET_CODE (insn) != JUMP_INSN)
1949 return false;
1950
1951 /* Recognize a tablejump and adjust all matching cases. */
1952 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1953 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1954 && GET_CODE (tmp) == JUMP_INSN
1955 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1956 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1957 {
1958 rtvec vec;
1959 int j;
1960 rtx new_label = block_label (target);
1961
1962 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1963 vec = XVEC (PATTERN (tmp), 0);
1964 else
1965 vec = XVEC (PATTERN (tmp), 1);
1966
1967 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1968 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1969 {
1970 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1971 --LABEL_NUSES (old_label);
1972 ++LABEL_NUSES (new_label);
1973 }
1974
1975 /* Handle casesi dispatch insns */
1976 if ((tmp = single_set (insn)) != NULL
1977 && SET_DEST (tmp) == pc_rtx
1978 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1979 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1980 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1981 {
1982 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1983 new_label);
1984 --LABEL_NUSES (old_label);
1985 ++LABEL_NUSES (new_label);
1986 }
1987 }
1988 else
1989 {
1990 /* ?? We may play the games with moving the named labels from
1991 one basic block to the other in case only one computed_jump is
1992 available. */
1993 if (computed_jump_p (insn))
1994 return false;
1995
1996 /* A return instruction can't be redirected. */
1997 if (returnjump_p (insn))
1998 return false;
1999
2000 /* If the insn doesn't go where we think, we're confused. */
2001 if (JUMP_LABEL (insn) != old_label)
2002 abort ();
2003 redirect_jump (insn, block_label (target), 0);
2004 }
2005
2006 if (rtl_dump_file)
2007 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2008 e->src->index, e->dest->index, target->index);
2009 if (e->dest != target)
2010 redirect_edge_succ_nodup (e, target);
2011 return true;
2012 }
2013
2014 /* Redirect edge even at the expense of creating new jump insn or
2015 basic block. Return new basic block if created, NULL otherwise.
2016 Abort if converison is impossible. */
2017 basic_block
2018 redirect_edge_and_branch_force (e, target)
2019 edge e;
2020 basic_block target;
2021 {
2022 basic_block new_bb;
2023 edge new_edge;
2024 rtx label;
2025 rtx bb_note;
2026 int i, j;
2027
2028 if (redirect_edge_and_branch (e, target))
2029 return NULL;
2030 if (e->dest == target)
2031 return NULL;
2032 if (e->flags & EDGE_ABNORMAL)
2033 abort ();
2034 if (!(e->flags & EDGE_FALLTHRU))
2035 abort ();
2036
2037 e->flags &= ~EDGE_FALLTHRU;
2038 label = block_label (target);
2039 /* Case of the fallthru block. */
2040 if (!e->src->succ->succ_next)
2041 {
2042 e->src->end = emit_jump_insn_after (gen_jump (label),
2043 last_loop_beg_note (e->src->end));
2044 JUMP_LABEL (e->src->end) = label;
2045 LABEL_NUSES (label)++;
2046 if (basic_block_for_insn)
2047 set_block_for_new_insns (e->src->end, e->src);
2048 emit_barrier_after (e->src->end);
2049 if (rtl_dump_file)
2050 fprintf (rtl_dump_file,
2051 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2052 INSN_UID (e->src->end), e->src->index, e->dest->index,
2053 target->index);
2054 redirect_edge_succ (e, target);
2055 return NULL;
2056 }
2057 /* Redirecting fallthru edge of the conditional needs extra work. */
2058
2059 if (rtl_dump_file)
2060 fprintf (rtl_dump_file,
2061 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2062 INSN_UID (e->src->end), e->src->index, e->dest->index,
2063 target->index);
2064
2065 /* Create the new structures. */
2066 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2067 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2068 n_edges++;
2069
2070 memset (new_bb, 0, sizeof (*new_bb));
2071
2072 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2073 new_bb->succ = NULL;
2074 new_bb->pred = new_edge;
2075 new_bb->count = e->count;
2076 new_bb->frequency = EDGE_FREQUENCY (e);
2077 new_bb->loop_depth = e->dest->loop_depth;
2078
2079 new_edge->flags = EDGE_FALLTHRU;
2080 new_edge->probability = e->probability;
2081 new_edge->count = e->count;
2082
2083 if (target->global_live_at_start)
2084 {
2085 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2086 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2087 COPY_REG_SET (new_bb->global_live_at_start,
2088 target->global_live_at_start);
2089 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2090 }
2091
2092 /* Wire edge in. */
2093 new_edge->src = e->src;
2094 new_edge->dest = new_bb;
2095 new_edge->succ_next = e->src->succ;
2096 e->src->succ = new_edge;
2097 new_edge->pred_next = NULL;
2098
2099 /* Redirect old edge. */
2100 redirect_edge_succ (e, target);
2101 redirect_edge_pred (e, new_bb);
2102 e->probability = REG_BR_PROB_BASE;
2103
2104 /* Place the new block just after the block being split. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2106
2107 /* Some parts of the compiler expect blocks to be number in
2108 sequential order so insert the new block immediately after the
2109 block being split.. */
2110 j = new_edge->src->index;
2111 for (i = n_basic_blocks - 1; i > j + 1; --i)
2112 {
2113 basic_block tmp = BASIC_BLOCK (i - 1);
2114 BASIC_BLOCK (i) = tmp;
2115 tmp->index = i;
2116 }
2117
2118 BASIC_BLOCK (i) = new_bb;
2119 new_bb->index = i;
2120
2121 /* Create the basic block note. */
2122 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2123 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2124 new_bb->head = bb_note;
2125
2126 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2127 JUMP_LABEL (new_bb->end) = label;
2128 LABEL_NUSES (label)++;
2129 if (basic_block_for_insn)
2130 set_block_for_new_insns (new_bb->end, new_bb);
2131 emit_barrier_after (new_bb->end);
2132 return new_bb;
2133 }
2134
2135 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2136 is back edge of syntactic loop. */
2137 static bool
2138 back_edge_of_syntactic_loop_p (bb1, bb2)
2139 basic_block bb1, bb2;
2140 {
2141 rtx insn;
2142 int count = 0;
2143 if (bb1->index > bb2->index)
2144 return false;
2145 if (bb1->index == bb2->index)
2146 return true;
2147 for (insn = bb1->end; insn != bb2->head && count >= 0;
2148 insn = NEXT_INSN (insn))
2149 if (GET_CODE (insn) == NOTE)
2150 {
2151 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2152 count++;
2153 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2154 count--;
2155 }
2156 return count >= 0;
2157 }
2158
2159 /* Split a (typically critical) edge. Return the new block.
2160 Abort on abnormal edges.
2161
2162 ??? The code generally expects to be called on critical edges.
2163 The case of a block ending in an unconditional jump to a
2164 block with multiple predecessors is not handled optimally. */
2165
2166 basic_block
2167 split_edge (edge_in)
2168 edge edge_in;
2169 {
2170 basic_block old_pred, bb, old_succ;
2171 edge edge_out;
2172 rtx bb_note;
2173 int i, j;
2174
2175 /* Abnormal edges cannot be split. */
2176 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2177 abort ();
2178
2179 old_pred = edge_in->src;
2180 old_succ = edge_in->dest;
2181
2182 /* Create the new structures. */
2183 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2184 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2185 n_edges++;
2186
2187 memset (bb, 0, sizeof (*bb));
2188
2189 /* ??? This info is likely going to be out of date very soon. */
2190 if (old_succ->global_live_at_start)
2191 {
2192 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2193 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2194 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2195 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2196 }
2197
2198 /* Wire them up. */
2199 bb->succ = edge_out;
2200 bb->count = edge_in->count;
2201 bb->frequency = EDGE_FREQUENCY (edge_in);
2202
2203 edge_in->flags &= ~EDGE_CRITICAL;
2204
2205 edge_out->pred_next = old_succ->pred;
2206 edge_out->succ_next = NULL;
2207 edge_out->src = bb;
2208 edge_out->dest = old_succ;
2209 edge_out->flags = EDGE_FALLTHRU;
2210 edge_out->probability = REG_BR_PROB_BASE;
2211 edge_out->count = edge_in->count;
2212
2213 old_succ->pred = edge_out;
2214
2215 /* Tricky case -- if there existed a fallthru into the successor
2216 (and we're not it) we must add a new unconditional jump around
2217 the new block we're actually interested in.
2218
2219 Further, if that edge is critical, this means a second new basic
2220 block must be created to hold it. In order to simplify correct
2221 insn placement, do this before we touch the existing basic block
2222 ordering for the block we were really wanting. */
2223 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2224 {
2225 edge e;
2226 for (e = edge_out->pred_next; e; e = e->pred_next)
2227 if (e->flags & EDGE_FALLTHRU)
2228 break;
2229
2230 if (e)
2231 {
2232 basic_block jump_block;
2233 rtx pos;
2234
2235 if ((e->flags & EDGE_CRITICAL) == 0
2236 && e->src != ENTRY_BLOCK_PTR)
2237 {
2238 /* Non critical -- we can simply add a jump to the end
2239 of the existing predecessor. */
2240 jump_block = e->src;
2241 }
2242 else
2243 {
2244 /* We need a new block to hold the jump. The simplest
2245 way to do the bulk of the work here is to recursively
2246 call ourselves. */
2247 jump_block = split_edge (e);
2248 e = jump_block->succ;
2249 }
2250
2251 /* Now add the jump insn ... */
2252 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2253 last_loop_beg_note (jump_block->end));
2254 jump_block->end = pos;
2255 if (basic_block_for_insn)
2256 set_block_for_new_insns (pos, jump_block);
2257 emit_barrier_after (pos);
2258
2259 /* ... let jump know that label is in use, ... */
2260 JUMP_LABEL (pos) = old_succ->head;
2261 ++LABEL_NUSES (old_succ->head);
2262
2263 /* ... and clear fallthru on the outgoing edge. */
2264 e->flags &= ~EDGE_FALLTHRU;
2265
2266 /* Continue splitting the interesting edge. */
2267 }
2268 }
2269
2270 /* Place the new block just in front of the successor. */
2271 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2272 if (old_succ == EXIT_BLOCK_PTR)
2273 j = n_basic_blocks - 1;
2274 else
2275 j = old_succ->index;
2276 for (i = n_basic_blocks - 1; i > j; --i)
2277 {
2278 basic_block tmp = BASIC_BLOCK (i - 1);
2279 BASIC_BLOCK (i) = tmp;
2280 tmp->index = i;
2281 }
2282 BASIC_BLOCK (i) = bb;
2283 bb->index = i;
2284
2285 /* Create the basic block note.
2286
2287 Where we place the note can have a noticable impact on the generated
2288 code. Consider this cfg:
2289
2290 E
2291 |
2292 0
2293 / \
2294 +->1-->2--->E
2295 | |
2296 +--+
2297
2298 If we need to insert an insn on the edge from block 0 to block 1,
2299 we want to ensure the instructions we insert are outside of any
2300 loop notes that physically sit between block 0 and block 1. Otherwise
2301 we confuse the loop optimizer into thinking the loop is a phony. */
2302 if (old_succ != EXIT_BLOCK_PTR
2303 && PREV_INSN (old_succ->head)
2304 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2305 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2306 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2307 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2308 PREV_INSN (old_succ->head));
2309 else if (old_succ != EXIT_BLOCK_PTR)
2310 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2311 else
2312 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2313 NOTE_BASIC_BLOCK (bb_note) = bb;
2314 bb->head = bb->end = bb_note;
2315
2316 /* For non-fallthry edges, we must adjust the predecessor's
2317 jump instruction to target our new block. */
2318 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2319 {
2320 if (!redirect_edge_and_branch (edge_in, bb))
2321 abort ();
2322 }
2323 else
2324 redirect_edge_succ (edge_in, bb);
2325
2326 return bb;
2327 }
2328
2329 /* Queue instructions for insertion on an edge between two basic blocks.
2330 The new instructions and basic blocks (if any) will not appear in the
2331 CFG until commit_edge_insertions is called. */
2332
2333 void
2334 insert_insn_on_edge (pattern, e)
2335 rtx pattern;
2336 edge e;
2337 {
2338 /* We cannot insert instructions on an abnormal critical edge.
2339 It will be easier to find the culprit if we die now. */
2340 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2341 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2342 abort ();
2343
2344 if (e->insns == NULL_RTX)
2345 start_sequence ();
2346 else
2347 push_to_sequence (e->insns);
2348
2349 emit_insn (pattern);
2350
2351 e->insns = get_insns ();
2352 end_sequence ();
2353 }
2354
2355 /* Update the CFG for the instructions queued on edge E. */
2356
2357 static void
2358 commit_one_edge_insertion (e)
2359 edge e;
2360 {
2361 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2362 basic_block bb;
2363
2364 /* Pull the insns off the edge now since the edge might go away. */
2365 insns = e->insns;
2366 e->insns = NULL_RTX;
2367
2368 /* Figure out where to put these things. If the destination has
2369 one predecessor, insert there. Except for the exit block. */
2370 if (e->dest->pred->pred_next == NULL
2371 && e->dest != EXIT_BLOCK_PTR)
2372 {
2373 bb = e->dest;
2374
2375 /* Get the location correct wrt a code label, and "nice" wrt
2376 a basic block note, and before everything else. */
2377 tmp = bb->head;
2378 if (GET_CODE (tmp) == CODE_LABEL)
2379 tmp = NEXT_INSN (tmp);
2380 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2381 tmp = NEXT_INSN (tmp);
2382 if (tmp == bb->head)
2383 before = tmp;
2384 else
2385 after = PREV_INSN (tmp);
2386 }
2387
2388 /* If the source has one successor and the edge is not abnormal,
2389 insert there. Except for the entry block. */
2390 else if ((e->flags & EDGE_ABNORMAL) == 0
2391 && e->src->succ->succ_next == NULL
2392 && e->src != ENTRY_BLOCK_PTR)
2393 {
2394 bb = e->src;
2395 /* It is possible to have a non-simple jump here. Consider a target
2396 where some forms of unconditional jumps clobber a register. This
2397 happens on the fr30 for example.
2398
2399 We know this block has a single successor, so we can just emit
2400 the queued insns before the jump. */
2401 if (GET_CODE (bb->end) == JUMP_INSN)
2402 {
2403 before = bb->end;
2404 }
2405 else
2406 {
2407 /* We'd better be fallthru, or we've lost track of what's what. */
2408 if ((e->flags & EDGE_FALLTHRU) == 0)
2409 abort ();
2410
2411 after = bb->end;
2412 }
2413 }
2414
2415 /* Otherwise we must split the edge. */
2416 else
2417 {
2418 bb = split_edge (e);
2419 after = bb->end;
2420 }
2421
2422 /* Now that we've found the spot, do the insertion. */
2423
2424 /* Set the new block number for these insns, if structure is allocated. */
2425 if (basic_block_for_insn)
2426 {
2427 rtx i;
2428 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2429 set_block_for_insn (i, bb);
2430 }
2431
2432 if (before)
2433 {
2434 emit_insns_before (insns, before);
2435 if (before == bb->head)
2436 bb->head = insns;
2437
2438 last = prev_nonnote_insn (before);
2439 }
2440 else
2441 {
2442 last = emit_insns_after (insns, after);
2443 if (after == bb->end)
2444 bb->end = last;
2445 }
2446
2447 if (returnjump_p (last))
2448 {
2449 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2450 This is not currently a problem because this only happens
2451 for the (single) epilogue, which already has a fallthru edge
2452 to EXIT. */
2453
2454 e = bb->succ;
2455 if (e->dest != EXIT_BLOCK_PTR
2456 || e->succ_next != NULL
2457 || (e->flags & EDGE_FALLTHRU) == 0)
2458 abort ();
2459 e->flags &= ~EDGE_FALLTHRU;
2460
2461 emit_barrier_after (last);
2462 bb->end = last;
2463
2464 if (before)
2465 flow_delete_insn (before);
2466 }
2467 else if (GET_CODE (last) == JUMP_INSN)
2468 abort ();
2469 find_sub_basic_blocks (bb);
2470 }
2471
2472 /* Update the CFG for all queued instructions. */
2473
2474 void
2475 commit_edge_insertions ()
2476 {
2477 int i;
2478 basic_block bb;
2479 compute_bb_for_insn (get_max_uid ());
2480
2481 #ifdef ENABLE_CHECKING
2482 verify_flow_info ();
2483 #endif
2484
2485 i = -1;
2486 bb = ENTRY_BLOCK_PTR;
2487 while (1)
2488 {
2489 edge e, next;
2490
2491 for (e = bb->succ; e; e = next)
2492 {
2493 next = e->succ_next;
2494 if (e->insns)
2495 commit_one_edge_insertion (e);
2496 }
2497
2498 if (++i >= n_basic_blocks)
2499 break;
2500 bb = BASIC_BLOCK (i);
2501 }
2502 }
2503
2504 /* Return true if we need to add fake edge to exit.
2505 Helper function for the flow_call_edges_add. */
2506 static bool
2507 need_fake_edge_p (insn)
2508 rtx insn;
2509 {
2510 if (!INSN_P (insn))
2511 return false;
2512
2513 if ((GET_CODE (insn) == CALL_INSN
2514 && !SIBLING_CALL_P (insn)
2515 && !find_reg_note (insn, REG_NORETURN, NULL) && !CONST_CALL_P (insn)))
2516 return true;
2517
2518 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2519 && MEM_VOLATILE_P (PATTERN (insn)))
2520 || (GET_CODE (PATTERN (insn)) == PARALLEL
2521 && asm_noperands (insn) != -1
2522 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2523 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2524 }
2525
2526 /* Add fake edges to the function exit for any non constant and non noreturn
2527 calls, volatile inline assembly in the bitmap of blocks specified by
2528 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2529 that were split.
2530
2531 The goal is to expose cases in which entering a basic block does not imply
2532 that all subsequent instructions must be executed. */
2533
2534 int
2535 flow_call_edges_add (blocks)
2536 sbitmap blocks;
2537 {
2538 int i;
2539 int blocks_split = 0;
2540 int bb_num = 0;
2541 basic_block *bbs;
2542 bool check_last_block = false;
2543
2544 /* Map bb indicies into basic block pointers since split_block
2545 will renumber the basic blocks. */
2546
2547 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2548
2549 if (! blocks)
2550 {
2551 for (i = 0; i < n_basic_blocks; i++)
2552 bbs[bb_num++] = BASIC_BLOCK (i);
2553 check_last_block = true;
2554 }
2555 else
2556 {
2557 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2558 {
2559 bbs[bb_num++] = BASIC_BLOCK (i);
2560 if (i == n_basic_blocks - 1)
2561 check_last_block = true;
2562 });
2563 }
2564
2565 /* In the last basic block, before epilogue generation, there will be
2566 a fallthru edge to EXIT. Special care is required if the last insn
2567 of the last basic block is a call because make_edge folds duplicate
2568 edges, which would result in the fallthru edge also being marked
2569 fake, which would result in the fallthru edge being removed by
2570 remove_fake_edges, which would result in an invalid CFG.
2571
2572 Moreover, we can't elide the outgoing fake edge, since the block
2573 profiler needs to take this into account in order to solve the minimal
2574 spanning tree in the case that the call doesn't return.
2575
2576 Handle this by adding a dummy instruction in a new last basic block. */
2577 if (check_last_block
2578 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2579 {
2580 edge e;
2581 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2582 if (e->dest == EXIT_BLOCK_PTR)
2583 break;
2584 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2585 commit_edge_insertions ();
2586 }
2587
2588
2589 /* Now add fake edges to the function exit for any non constant
2590 calls since there is no way that we can determine if they will
2591 return or not... */
2592
2593 for (i = 0; i < bb_num; i++)
2594 {
2595 basic_block bb = bbs[i];
2596 rtx insn;
2597 rtx prev_insn;
2598
2599 for (insn = bb->end; ; insn = prev_insn)
2600 {
2601 prev_insn = PREV_INSN (insn);
2602 if (need_fake_edge_p (insn))
2603 {
2604 edge e;
2605
2606 /* The above condition should be enought to verify that there is
2607 no edge to the exit block in CFG already. Calling make_edge in
2608 such case would make us to mark that edge as fake and remove it
2609 later. */
2610 #ifdef ENABLE_CHECKING
2611 if (insn == bb->end)
2612 for (e = bb->succ; e; e = e->succ_next)
2613 if (e->dest == EXIT_BLOCK_PTR)
2614 abort ();
2615 #endif
2616
2617 /* Note that the following may create a new basic block
2618 and renumber the existing basic blocks. */
2619 e = split_block (bb, insn);
2620 if (e)
2621 blocks_split++;
2622
2623 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2624 }
2625 if (insn == bb->head)
2626 break;
2627 }
2628 }
2629
2630 if (blocks_split)
2631 verify_flow_info ();
2632
2633 free (bbs);
2634 return blocks_split;
2635 }
2636 \f
2637 /* Find unreachable blocks. An unreachable block will have NULL in
2638 block->aux, a non-NULL value indicates the block is reachable. */
2639
2640 void
2641 find_unreachable_blocks ()
2642 {
2643 edge e;
2644 int i, n;
2645 basic_block *tos, *worklist;
2646
2647 n = n_basic_blocks;
2648 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2649
2650 /* Use basic_block->aux as a marker. Clear them all. */
2651
2652 for (i = 0; i < n; ++i)
2653 BASIC_BLOCK (i)->aux = NULL;
2654
2655 /* Add our starting points to the worklist. Almost always there will
2656 be only one. It isn't inconcievable that we might one day directly
2657 support Fortran alternate entry points. */
2658
2659 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2660 {
2661 *tos++ = e->dest;
2662
2663 /* Mark the block with a handy non-null value. */
2664 e->dest->aux = e;
2665 }
2666
2667 /* Iterate: find everything reachable from what we've already seen. */
2668
2669 while (tos != worklist)
2670 {
2671 basic_block b = *--tos;
2672
2673 for (e = b->succ; e; e = e->succ_next)
2674 if (!e->dest->aux)
2675 {
2676 *tos++ = e->dest;
2677 e->dest->aux = e;
2678 }
2679 }
2680
2681 free (worklist);
2682 }
2683
2684 /* Delete all unreachable basic blocks. */
2685 static void
2686 delete_unreachable_blocks ()
2687 {
2688 int i;
2689
2690 find_unreachable_blocks ();
2691
2692 /* Delete all unreachable basic blocks. Count down so that we
2693 don't interfere with the block renumbering that happens in
2694 flow_delete_block. */
2695
2696 for (i = n_basic_blocks - 1; i >= 0; --i)
2697 {
2698 basic_block b = BASIC_BLOCK (i);
2699
2700 if (b->aux != NULL)
2701 /* This block was found. Tidy up the mark. */
2702 b->aux = NULL;
2703 else
2704 flow_delete_block (b);
2705 }
2706
2707 tidy_fallthru_edges ();
2708 }
2709
2710 /* Return true if NOTE is not one of the ones that must be kept paired,
2711 so that we may simply delete them. */
2712
2713 static int
2714 can_delete_note_p (note)
2715 rtx note;
2716 {
2717 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2718 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2719 }
2720
2721 /* Unlink a chain of insns between START and FINISH, leaving notes
2722 that must be paired. */
2723
2724 void
2725 flow_delete_insn_chain (start, finish)
2726 rtx start, finish;
2727 {
2728 /* Unchain the insns one by one. It would be quicker to delete all
2729 of these with a single unchaining, rather than one at a time, but
2730 we need to keep the NOTE's. */
2731
2732 rtx next;
2733
2734 while (1)
2735 {
2736 next = NEXT_INSN (start);
2737 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2738 ;
2739 else if (GET_CODE (start) == CODE_LABEL
2740 && ! can_delete_label_p (start))
2741 {
2742 const char *name = LABEL_NAME (start);
2743 PUT_CODE (start, NOTE);
2744 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2745 NOTE_SOURCE_FILE (start) = name;
2746 }
2747 else
2748 next = flow_delete_insn (start);
2749
2750 if (start == finish)
2751 break;
2752 start = next;
2753 }
2754 }
2755
2756 /* Delete the insns in a (non-live) block. We physically delete every
2757 non-deleted-note insn, and update the flow graph appropriately.
2758
2759 Return nonzero if we deleted an exception handler. */
2760
2761 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2762 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2763
2764 int
2765 flow_delete_block (b)
2766 basic_block b;
2767 {
2768 int deleted_handler = 0;
2769 rtx insn, end, tmp;
2770
2771 /* If the head of this block is a CODE_LABEL, then it might be the
2772 label for an exception handler which can't be reached.
2773
2774 We need to remove the label from the exception_handler_label list
2775 and remove the associated NOTE_INSN_EH_REGION_BEG and
2776 NOTE_INSN_EH_REGION_END notes. */
2777
2778 insn = b->head;
2779
2780 never_reached_warning (insn);
2781
2782 if (GET_CODE (insn) == CODE_LABEL)
2783 maybe_remove_eh_handler (insn);
2784
2785 /* Include any jump table following the basic block. */
2786 end = b->end;
2787 if (GET_CODE (end) == JUMP_INSN
2788 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2789 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2790 && GET_CODE (tmp) == JUMP_INSN
2791 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2792 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2793 end = tmp;
2794
2795 /* Include any barrier that may follow the basic block. */
2796 tmp = next_nonnote_insn (end);
2797 if (tmp && GET_CODE (tmp) == BARRIER)
2798 end = tmp;
2799
2800 /* Selectively delete the entire chain. */
2801 flow_delete_insn_chain (insn, end);
2802
2803 /* Remove the edges into and out of this block. Note that there may
2804 indeed be edges in, if we are removing an unreachable loop. */
2805 {
2806 edge e, next, *q;
2807
2808 for (e = b->pred; e; e = next)
2809 {
2810 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2811 continue;
2812 *q = e->succ_next;
2813 next = e->pred_next;
2814 n_edges--;
2815 free (e);
2816 }
2817 for (e = b->succ; e; e = next)
2818 {
2819 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2820 continue;
2821 *q = e->pred_next;
2822 next = e->succ_next;
2823 n_edges--;
2824 free (e);
2825 }
2826
2827 b->pred = NULL;
2828 b->succ = NULL;
2829 }
2830
2831 /* Remove the basic block from the array, and compact behind it. */
2832 expunge_block (b);
2833
2834 return deleted_handler;
2835 }
2836
2837 /* Remove block B from the basic block array and compact behind it. */
2838
2839 static void
2840 expunge_block (b)
2841 basic_block b;
2842 {
2843 int i, n = n_basic_blocks;
2844
2845 for (i = b->index; i + 1 < n; ++i)
2846 {
2847 basic_block x = BASIC_BLOCK (i + 1);
2848 BASIC_BLOCK (i) = x;
2849 x->index = i;
2850 }
2851
2852 basic_block_info->num_elements--;
2853 n_basic_blocks--;
2854 }
2855
2856 /* Delete INSN by patching it out. Return the next insn. */
2857
2858 rtx
2859 flow_delete_insn (insn)
2860 rtx insn;
2861 {
2862 rtx prev = PREV_INSN (insn);
2863 rtx next = NEXT_INSN (insn);
2864 rtx note;
2865
2866 PREV_INSN (insn) = NULL_RTX;
2867 NEXT_INSN (insn) = NULL_RTX;
2868 INSN_DELETED_P (insn) = 1;
2869
2870 if (prev)
2871 NEXT_INSN (prev) = next;
2872 if (next)
2873 PREV_INSN (next) = prev;
2874 else
2875 set_last_insn (prev);
2876
2877 if (GET_CODE (insn) == CODE_LABEL)
2878 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2879
2880 /* If deleting a jump, decrement the use count of the label. Deleting
2881 the label itself should happen in the normal course of block merging. */
2882 if (GET_CODE (insn) == JUMP_INSN
2883 && JUMP_LABEL (insn)
2884 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2885 LABEL_NUSES (JUMP_LABEL (insn))--;
2886
2887 /* Also if deleting an insn that references a label. */
2888 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2889 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2890 LABEL_NUSES (XEXP (note, 0))--;
2891
2892 if (GET_CODE (insn) == JUMP_INSN
2893 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2894 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2895 {
2896 rtx pat = PATTERN (insn);
2897 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2898 int len = XVECLEN (pat, diff_vec_p);
2899 int i;
2900
2901 for (i = 0; i < len; i++)
2902 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2903 }
2904
2905 return next;
2906 }
2907
2908 /* True if a given label can be deleted. */
2909
2910 static int
2911 can_delete_label_p (label)
2912 rtx label;
2913 {
2914 rtx x;
2915
2916 if (LABEL_PRESERVE_P (label))
2917 return 0;
2918
2919 for (x = forced_labels; x; x = XEXP (x, 1))
2920 if (label == XEXP (x, 0))
2921 return 0;
2922 for (x = label_value_list; x; x = XEXP (x, 1))
2923 if (label == XEXP (x, 0))
2924 return 0;
2925 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2926 if (label == XEXP (x, 0))
2927 return 0;
2928
2929 /* User declared labels must be preserved. */
2930 if (LABEL_NAME (label) != 0)
2931 return 0;
2932
2933 return 1;
2934 }
2935
2936 static int
2937 tail_recursion_label_p (label)
2938 rtx label;
2939 {
2940 rtx x;
2941
2942 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2943 if (label == XEXP (x, 0))
2944 return 1;
2945
2946 return 0;
2947 }
2948
2949 /* Blocks A and B are to be merged into a single block A. The insns
2950 are already contiguous, hence `nomove'. */
2951
2952 void
2953 merge_blocks_nomove (a, b)
2954 basic_block a, b;
2955 {
2956 edge e;
2957 rtx b_head, b_end, a_end;
2958 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2959 int b_empty = 0;
2960
2961 /* If there was a CODE_LABEL beginning B, delete it. */
2962 b_head = b->head;
2963 b_end = b->end;
2964 if (GET_CODE (b_head) == CODE_LABEL)
2965 {
2966 /* Detect basic blocks with nothing but a label. This can happen
2967 in particular at the end of a function. */
2968 if (b_head == b_end)
2969 b_empty = 1;
2970 del_first = del_last = b_head;
2971 b_head = NEXT_INSN (b_head);
2972 }
2973
2974 /* Delete the basic block note. */
2975 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2976 {
2977 if (b_head == b_end)
2978 b_empty = 1;
2979 if (! del_last)
2980 del_first = b_head;
2981 del_last = b_head;
2982 b_head = NEXT_INSN (b_head);
2983 }
2984
2985 /* If there was a jump out of A, delete it. */
2986 a_end = a->end;
2987 if (GET_CODE (a_end) == JUMP_INSN)
2988 {
2989 rtx prev;
2990
2991 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2992 if (GET_CODE (prev) != NOTE
2993 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2994 || prev == a->head)
2995 break;
2996
2997 del_first = a_end;
2998
2999 #ifdef HAVE_cc0
3000 /* If this was a conditional jump, we need to also delete
3001 the insn that set cc0. */
3002 if (prev && sets_cc0_p (prev))
3003 {
3004 rtx tmp = prev;
3005 prev = prev_nonnote_insn (prev);
3006 if (!prev)
3007 prev = a->head;
3008 del_first = tmp;
3009 }
3010 #endif
3011
3012 a_end = prev;
3013 }
3014 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3015 del_first = NEXT_INSN (a_end);
3016
3017 /* Delete everything marked above as well as crap that might be
3018 hanging out between the two blocks. */
3019 flow_delete_insn_chain (del_first, del_last);
3020
3021 /* Normally there should only be one successor of A and that is B, but
3022 partway though the merge of blocks for conditional_execution we'll
3023 be merging a TEST block with THEN and ELSE successors. Free the
3024 whole lot of them and hope the caller knows what they're doing. */
3025 while (a->succ)
3026 remove_edge (a->succ);
3027
3028 /* Adjust the edges out of B for the new owner. */
3029 for (e = b->succ; e; e = e->succ_next)
3030 e->src = a;
3031 a->succ = b->succ;
3032
3033 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3034 b->pred = b->succ = NULL;
3035
3036 /* Reassociate the insns of B with A. */
3037 if (!b_empty)
3038 {
3039 if (basic_block_for_insn)
3040 {
3041 BLOCK_FOR_INSN (b_head) = a;
3042 while (b_head != b_end)
3043 {
3044 b_head = NEXT_INSN (b_head);
3045 BLOCK_FOR_INSN (b_head) = a;
3046 }
3047 }
3048 a_end = b_end;
3049 }
3050 a->end = a_end;
3051
3052 expunge_block (b);
3053 }
3054
3055 /* Blocks A and B are to be merged into a single block. A has no incoming
3056 fallthru edge, so it can be moved before B without adding or modifying
3057 any jumps (aside from the jump from A to B). */
3058
3059 static int
3060 merge_blocks_move_predecessor_nojumps (a, b)
3061 basic_block a, b;
3062 {
3063 rtx start, end, barrier;
3064 int index;
3065
3066 start = a->head;
3067 end = a->end;
3068
3069 barrier = next_nonnote_insn (end);
3070 if (GET_CODE (barrier) != BARRIER)
3071 abort ();
3072 flow_delete_insn (barrier);
3073
3074 /* Move block and loop notes out of the chain so that we do not
3075 disturb their order.
3076
3077 ??? A better solution would be to squeeze out all the non-nested notes
3078 and adjust the block trees appropriately. Even better would be to have
3079 a tighter connection between block trees and rtl so that this is not
3080 necessary. */
3081 start = squeeze_notes (start, end);
3082
3083 /* Scramble the insn chain. */
3084 if (end != PREV_INSN (b->head))
3085 reorder_insns (start, end, PREV_INSN (b->head));
3086
3087 if (rtl_dump_file)
3088 {
3089 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3090 a->index, b->index);
3091 }
3092
3093 /* Swap the records for the two blocks around. Although we are deleting B,
3094 A is now where B was and we want to compact the BB array from where
3095 A used to be. */
3096 BASIC_BLOCK (a->index) = b;
3097 BASIC_BLOCK (b->index) = a;
3098 index = a->index;
3099 a->index = b->index;
3100 b->index = index;
3101
3102 /* Now blocks A and B are contiguous. Merge them. */
3103 merge_blocks_nomove (a, b);
3104
3105 return 1;
3106 }
3107
3108 /* Blocks A and B are to be merged into a single block. B has no outgoing
3109 fallthru edge, so it can be moved after A without adding or modifying
3110 any jumps (aside from the jump from A to B). */
3111
3112 static int
3113 merge_blocks_move_successor_nojumps (a, b)
3114 basic_block a, b;
3115 {
3116 rtx start, end, barrier;
3117
3118 start = b->head;
3119 end = b->end;
3120 barrier = NEXT_INSN (end);
3121
3122 /* Recognize a jump table following block B. */
3123 if (barrier
3124 && GET_CODE (barrier) == CODE_LABEL
3125 && NEXT_INSN (barrier)
3126 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3127 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3128 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3129 {
3130 end = NEXT_INSN (barrier);
3131 barrier = NEXT_INSN (end);
3132 }
3133
3134 /* There had better have been a barrier there. Delete it. */
3135 if (barrier && GET_CODE (barrier) == BARRIER)
3136 flow_delete_insn (barrier);
3137
3138 /* Move block and loop notes out of the chain so that we do not
3139 disturb their order.
3140
3141 ??? A better solution would be to squeeze out all the non-nested notes
3142 and adjust the block trees appropriately. Even better would be to have
3143 a tighter connection between block trees and rtl so that this is not
3144 necessary. */
3145 start = squeeze_notes (start, end);
3146
3147 /* Scramble the insn chain. */
3148 reorder_insns (start, end, a->end);
3149
3150 /* Now blocks A and B are contiguous. Merge them. */
3151 merge_blocks_nomove (a, b);
3152
3153 if (rtl_dump_file)
3154 {
3155 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3156 b->index, a->index);
3157 }
3158
3159 return 1;
3160 }
3161
3162 /* Attempt to merge basic blocks that are potentially non-adjacent.
3163 Return true iff the attempt succeeded. */
3164
3165 static int
3166 merge_blocks (e, b, c, mode)
3167 edge e;
3168 basic_block b, c;
3169 int mode;
3170 {
3171 /* If C has a tail recursion label, do not merge. There is no
3172 edge recorded from the call_placeholder back to this label, as
3173 that would make optimize_sibling_and_tail_recursive_calls more
3174 complex for no gain. */
3175 if (GET_CODE (c->head) == CODE_LABEL
3176 && tail_recursion_label_p (c->head))
3177 return 0;
3178
3179 /* If B has a fallthru edge to C, no need to move anything. */
3180 if (e->flags & EDGE_FALLTHRU)
3181 {
3182 merge_blocks_nomove (b, c);
3183
3184 if (rtl_dump_file)
3185 {
3186 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3187 b->index, c->index);
3188 }
3189
3190 return 1;
3191 }
3192 /* Otherwise we will need to move code around. Do that only if expensive
3193 transformations are allowed. */
3194 else if (mode & CLEANUP_EXPENSIVE)
3195 {
3196 edge tmp_edge, c_fallthru_edge;
3197 int c_has_outgoing_fallthru;
3198 int b_has_incoming_fallthru;
3199
3200 /* Avoid overactive code motion, as the forwarder blocks should be
3201 eliminated by edge redirection instead. One exception might have
3202 been if B is a forwarder block and C has no fallthru edge, but
3203 that should be cleaned up by bb-reorder instead. */
3204 if (forwarder_block_p (b) || forwarder_block_p (c))
3205 return 0;
3206
3207 /* We must make sure to not munge nesting of lexical blocks,
3208 and loop notes. This is done by squeezing out all the notes
3209 and leaving them there to lie. Not ideal, but functional. */
3210
3211 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3212 if (tmp_edge->flags & EDGE_FALLTHRU)
3213 break;
3214 c_has_outgoing_fallthru = (tmp_edge != NULL);
3215 c_fallthru_edge = tmp_edge;
3216
3217 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3218 if (tmp_edge->flags & EDGE_FALLTHRU)
3219 break;
3220 b_has_incoming_fallthru = (tmp_edge != NULL);
3221
3222 /* If B does not have an incoming fallthru, then it can be moved
3223 immediately before C without introducing or modifying jumps.
3224 C cannot be the first block, so we do not have to worry about
3225 accessing a non-existent block. */
3226 if (! b_has_incoming_fallthru)
3227 return merge_blocks_move_predecessor_nojumps (b, c);
3228
3229 /* Otherwise, we're going to try to move C after B. If C does
3230 not have an outgoing fallthru, then it can be moved
3231 immediately after B without introducing or modifying jumps. */
3232 if (! c_has_outgoing_fallthru)
3233 return merge_blocks_move_successor_nojumps (b, c);
3234
3235 /* Otherwise, we'll need to insert an extra jump, and possibly
3236 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3237 as we don't have explicit return instructions before epilogues
3238 are generated, so give up on that case. */
3239
3240 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3241 && merge_blocks_move_successor_nojumps (b, c))
3242 {
3243 basic_block target = c_fallthru_edge->dest;
3244 rtx barrier;
3245 basic_block new;
3246
3247 /* This is a dirty hack to avoid code duplication.
3248
3249 Set edge to point to wrong basic block, so
3250 redirect_edge_and_branch_force will do the trick
3251 and rewire edge back to the original location. */
3252 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3253 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3254
3255 /* We've just created barrier, but another barrier is
3256 already present in the stream. Avoid the duplicate. */
3257 barrier = next_nonnote_insn (new ? new->end : b->end);
3258 if (GET_CODE (barrier) != BARRIER)
3259 abort ();
3260 flow_delete_insn (barrier);
3261
3262 return 1;
3263 }
3264
3265 return 0;
3266 }
3267 return 0;
3268 }
3269
3270 /* Simplify a conditional jump around an unconditional jump.
3271 Return true if something changed. */
3272
3273 static bool
3274 try_simplify_condjump (cbranch_block)
3275 basic_block cbranch_block;
3276 {
3277 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3278 edge cbranch_jump_edge, cbranch_fallthru_edge;
3279 rtx cbranch_insn;
3280
3281 /* Verify that there are exactly two successors. */
3282 if (!cbranch_block->succ
3283 || !cbranch_block->succ->succ_next
3284 || cbranch_block->succ->succ_next->succ_next)
3285 return false;
3286
3287 /* Verify that we've got a normal conditional branch at the end
3288 of the block. */
3289 cbranch_insn = cbranch_block->end;
3290 if (!any_condjump_p (cbranch_insn))
3291 return false;
3292
3293 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3294 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3295
3296 /* The next block must not have multiple predecessors, must not
3297 be the last block in the function, and must contain just the
3298 unconditional jump. */
3299 jump_block = cbranch_fallthru_edge->dest;
3300 if (jump_block->pred->pred_next
3301 || jump_block->index == n_basic_blocks - 1
3302 || !forwarder_block_p (jump_block))
3303 return false;
3304 jump_dest_block = jump_block->succ->dest;
3305
3306 /* The conditional branch must target the block after the
3307 unconditional branch. */
3308 cbranch_dest_block = cbranch_jump_edge->dest;
3309
3310 if (!can_fallthru (jump_block, cbranch_dest_block))
3311 return false;
3312
3313 /* Invert the conditional branch. Prevent jump.c from deleting
3314 "unreachable" instructions. */
3315 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3316 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3317 {
3318 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3319 return false;
3320 }
3321
3322 if (rtl_dump_file)
3323 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3324 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3325
3326 /* Success. Update the CFG to match. Note that after this point
3327 the edge variable names appear backwards; the redirection is done
3328 this way to preserve edge profile data. */
3329 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3330 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3331 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3332 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3333
3334 /* Delete the block with the unconditional jump, and clean up the mess. */
3335 flow_delete_block (jump_block);
3336 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3337
3338 return true;
3339 }
3340
3341 /* Attempt to forward edges leaving basic block B.
3342 Return true if sucessful. */
3343
3344 static bool
3345 try_forward_edges (mode, b)
3346 basic_block b;
3347 int mode;
3348 {
3349 bool changed = false;
3350 edge e, next;
3351
3352 for (e = b->succ; e ; e = next)
3353 {
3354 basic_block target, first;
3355 int counter;
3356
3357 next = e->succ_next;
3358
3359 /* Skip complex edges because we don't know how to update them.
3360
3361 Still handle fallthru edges, as we can suceed to forward fallthru
3362 edge to the same place as the branch edge of conditional branch
3363 and turn conditional branch to an unconditonal branch. */
3364 if (e->flags & EDGE_COMPLEX)
3365 continue;
3366
3367 target = first = e->dest;
3368 counter = 0;
3369
3370 /* Look for the real destination of the jump.
3371 Avoid inifinite loop in the infinite empty loop by counting
3372 up to n_basic_blocks. */
3373 while (forwarder_block_p (target)
3374 && target->succ->dest != EXIT_BLOCK_PTR
3375 && counter < n_basic_blocks)
3376 {
3377 /* Bypass trivial infinite loops. */
3378 if (target == target->succ->dest)
3379 counter = n_basic_blocks;
3380
3381 /* Avoid killing of loop pre-headers, as it is the place loop
3382 optimizer wants to hoist code to.
3383
3384 For fallthru forwarders, the LOOP_BEG note must appear between
3385 the header of block and CODE_LABEL of the loop, for non forwarders
3386 it must appear before the JUMP_INSN. */
3387 if (mode & CLEANUP_PRE_LOOP)
3388 {
3389 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3390 ? target->head : prev_nonnote_insn (target->end));
3391
3392 if (GET_CODE (insn) != NOTE)
3393 insn = NEXT_INSN (insn);
3394
3395 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3396 insn = NEXT_INSN (insn))
3397 if (GET_CODE (insn) == NOTE
3398 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3399 break;
3400
3401 if (GET_CODE (insn) == NOTE)
3402 break;
3403 }
3404 target = target->succ->dest, counter++;
3405 }
3406
3407 if (counter >= n_basic_blocks)
3408 {
3409 if (rtl_dump_file)
3410 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3411 target->index);
3412 }
3413 else if (target == first)
3414 ; /* We didn't do anything. */
3415 else
3416 {
3417 /* Save the values now, as the edge may get removed. */
3418 gcov_type edge_count = e->count;
3419 int edge_probability = e->probability;
3420
3421 if (redirect_edge_and_branch (e, target))
3422 {
3423 /* We successfully forwarded the edge. Now update profile
3424 data: for each edge we traversed in the chain, remove
3425 the original edge's execution count. */
3426 int edge_frequency = ((edge_probability * b->frequency
3427 + REG_BR_PROB_BASE / 2)
3428 / REG_BR_PROB_BASE);
3429
3430 do
3431 {
3432 first->count -= edge_count;
3433 first->succ->count -= edge_count;
3434 first->frequency -= edge_frequency;
3435 first = first->succ->dest;
3436 }
3437 while (first != target);
3438
3439 changed = true;
3440 }
3441 else
3442 {
3443 if (rtl_dump_file)
3444 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3445 b->index, e->dest->index, target->index);
3446 }
3447 }
3448 }
3449
3450 return changed;
3451 }
3452
3453 /* Look through the insns at the end of BB1 and BB2 and find the longest
3454 sequence that are equivalent. Store the first insns for that sequence
3455 in *F1 and *F2 and return the sequence length.
3456
3457 To simplify callers of this function, if the blocks match exactly,
3458 store the head of the blocks in *F1 and *F2. */
3459
3460 static int
3461 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3462 int mode ATTRIBUTE_UNUSED;
3463 basic_block bb1, bb2;
3464 rtx *f1, *f2;
3465 {
3466 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3467 int ninsns = 0;
3468
3469 /* Skip simple jumps at the end of the blocks. Complex jumps still
3470 need to be compared for equivalence, which we'll do below. */
3471
3472 i1 = bb1->end;
3473 if (onlyjump_p (i1))
3474 i1 = PREV_INSN (i1);
3475 i2 = bb2->end;
3476 if (onlyjump_p (i2))
3477 i2 = PREV_INSN (i2);
3478
3479 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3480 while (true)
3481 {
3482 /* Ignore notes. */
3483 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3484 i1 = PREV_INSN (i1);
3485 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3486 i2 = PREV_INSN (i2);
3487
3488 if (i1 == bb1->head || i2 == bb2->head)
3489 break;
3490
3491 /* Verify that I1 and I2 are equivalent. */
3492
3493 if (GET_CODE (i1) != GET_CODE (i2))
3494 break;
3495
3496 p1 = PATTERN (i1);
3497 p2 = PATTERN (i2);
3498
3499 /* If this is a CALL_INSN, compare register usage information.
3500 If we don't check this on stack register machines, the two
3501 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3502 numbers of stack registers in the same basic block.
3503 If we don't check this on machines with delay slots, a delay slot may
3504 be filled that clobbers a parameter expected by the subroutine.
3505
3506 ??? We take the simple route for now and assume that if they're
3507 equal, they were constructed identically. */
3508
3509 if (GET_CODE (i1) == CALL_INSN
3510 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3511 CALL_INSN_FUNCTION_USAGE (i2)))
3512 break;
3513
3514 #ifdef STACK_REGS
3515 /* If cross_jump_death_matters is not 0, the insn's mode
3516 indicates whether or not the insn contains any stack-like
3517 regs. */
3518
3519 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3520 {
3521 /* If register stack conversion has already been done, then
3522 death notes must also be compared before it is certain that
3523 the two instruction streams match. */
3524
3525 rtx note;
3526 HARD_REG_SET i1_regset, i2_regset;
3527
3528 CLEAR_HARD_REG_SET (i1_regset);
3529 CLEAR_HARD_REG_SET (i2_regset);
3530
3531 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3532 if (REG_NOTE_KIND (note) == REG_DEAD
3533 && STACK_REG_P (XEXP (note, 0)))
3534 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3535
3536 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3537 if (REG_NOTE_KIND (note) == REG_DEAD
3538 && STACK_REG_P (XEXP (note, 0)))
3539 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3540
3541 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3542
3543 break;
3544
3545 done:
3546 ;
3547 }
3548 #endif
3549
3550 if (GET_CODE (p1) != GET_CODE (p2))
3551 break;
3552
3553 if (! rtx_renumbered_equal_p (p1, p2))
3554 {
3555 /* The following code helps take care of G++ cleanups. */
3556 rtx equiv1 = find_reg_equal_equiv_note (i1);
3557 rtx equiv2 = find_reg_equal_equiv_note (i2);
3558
3559 if (equiv1 && equiv2
3560 /* If the equivalences are not to a constant, they may
3561 reference pseudos that no longer exist, so we can't
3562 use them. */
3563 && CONSTANT_P (XEXP (equiv1, 0))
3564 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3565 {
3566 rtx s1 = single_set (i1);
3567 rtx s2 = single_set (i2);
3568 if (s1 != 0 && s2 != 0
3569 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3570 {
3571 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3572 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3573 if (! rtx_renumbered_equal_p (p1, p2))
3574 cancel_changes (0);
3575 else if (apply_change_group ())
3576 goto win;
3577 }
3578 }
3579 break;
3580 }
3581
3582 win:
3583 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3584 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3585 {
3586 afterlast1 = last1, afterlast2 = last2;
3587 last1 = i1, last2 = i2;
3588 ninsns++;
3589 }
3590 i1 = PREV_INSN (i1);
3591 i2 = PREV_INSN (i2);
3592 }
3593
3594 #ifdef HAVE_cc0
3595 if (ninsns)
3596 {
3597 /* Don't allow the insn after a compare to be shared by
3598 cross-jumping unless the compare is also shared. */
3599 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3600 last1 = afterlast1, last2 = afterlast2, ninsns--;
3601 }
3602 #endif
3603
3604 /* Include preceeding notes and labels in the cross-jump. One,
3605 this may bring us to the head of the blocks as requested above.
3606 Two, it keeps line number notes as matched as may be. */
3607 if (ninsns)
3608 {
3609 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3610 last1 = PREV_INSN (last1);
3611 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3612 last1 = PREV_INSN (last1);
3613 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3614 last2 = PREV_INSN (last2);
3615 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3616 last2 = PREV_INSN (last2);
3617
3618 *f1 = last1;
3619 *f2 = last2;
3620 }
3621
3622 return ninsns;
3623 }
3624
3625 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3626 the branch instruction. This means that if we commonize the control
3627 flow before end of the basic block, the semantic remains unchanged.
3628
3629 We may assume that there exists one edge with a common destination. */
3630
3631 static bool
3632 outgoing_edges_match (bb1, bb2)
3633 basic_block bb1;
3634 basic_block bb2;
3635 {
3636 /* If BB1 has only one successor, we must be looking at an unconditional
3637 jump. Which, by the assumption above, means that we only need to check
3638 that BB2 has one successor. */
3639 if (bb1->succ && !bb1->succ->succ_next)
3640 return (bb2->succ && !bb2->succ->succ_next);
3641
3642 /* Match conditional jumps - this may get tricky when fallthru and branch
3643 edges are crossed. */
3644 if (bb1->succ
3645 && bb1->succ->succ_next
3646 && !bb1->succ->succ_next->succ_next
3647 && any_condjump_p (bb1->end))
3648 {
3649 edge b1, f1, b2, f2;
3650 bool reverse, match;
3651 rtx set1, set2, cond1, cond2;
3652 enum rtx_code code1, code2;
3653
3654 if (!bb2->succ
3655 || !bb2->succ->succ_next
3656 || bb1->succ->succ_next->succ_next
3657 || !any_condjump_p (bb2->end))
3658 return false;
3659
3660 b1 = BRANCH_EDGE (bb1);
3661 b2 = BRANCH_EDGE (bb2);
3662 f1 = FALLTHRU_EDGE (bb1);
3663 f2 = FALLTHRU_EDGE (bb2);
3664
3665 /* Get around possible forwarders on fallthru edges. Other cases
3666 should be optimized out already. */
3667 if (forwarder_block_p (f1->dest))
3668 f1 = f1->dest->succ;
3669 if (forwarder_block_p (f2->dest))
3670 f2 = f2->dest->succ;
3671
3672 /* To simplify use of this function, return false if there are
3673 unneeded forwarder blocks. These will get eliminated later
3674 during cleanup_cfg. */
3675 if (forwarder_block_p (f1->dest)
3676 || forwarder_block_p (f2->dest)
3677 || forwarder_block_p (b1->dest)
3678 || forwarder_block_p (b2->dest))
3679 return false;
3680
3681 if (f1->dest == f2->dest && b1->dest == b2->dest)
3682 reverse = false;
3683 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3684 reverse = true;
3685 else
3686 return false;
3687
3688 set1 = pc_set (bb1->end);
3689 set2 = pc_set (bb2->end);
3690 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3691 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3692 reverse = !reverse;
3693
3694 cond1 = XEXP (SET_SRC (set1), 0);
3695 cond2 = XEXP (SET_SRC (set2), 0);
3696 code1 = GET_CODE (cond1);
3697 if (reverse)
3698 code2 = reversed_comparison_code (cond2, bb2->end);
3699 else
3700 code2 = GET_CODE (cond2);
3701 if (code2 == UNKNOWN)
3702 return false;
3703
3704 /* Verify codes and operands match. */
3705 match = ((code1 == code2
3706 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3707 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3708 || (code1 == swap_condition (code2)
3709 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3710 XEXP (cond2, 0))
3711 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3712 XEXP (cond2, 1))));
3713
3714 /* If we return true, we will join the blocks. Which means that
3715 we will only have one branch prediction bit to work with. Thus
3716 we require the existing branches to have probabilities that are
3717 roughly similar. */
3718 /* ??? We should use bb->frequency to allow merging in infrequently
3719 executed blocks, but at the moment it is not available when
3720 cleanup_cfg is run. */
3721 if (match && !optimize_size)
3722 {
3723 rtx note1, note2;
3724 int prob1, prob2;
3725 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3726 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3727
3728 if (note1 && note2)
3729 {
3730 prob1 = INTVAL (XEXP (note1, 0));
3731 prob2 = INTVAL (XEXP (note2, 0));
3732 if (reverse)
3733 prob2 = REG_BR_PROB_BASE - prob2;
3734
3735 /* Fail if the difference in probabilities is
3736 greater than 5%. */
3737 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3738 return false;
3739 }
3740 else if (note1 || note2)
3741 return false;
3742 }
3743
3744 if (rtl_dump_file && match)
3745 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3746 bb1->index, bb2->index);
3747
3748 return match;
3749 }
3750
3751 /* ??? We can handle computed jumps too. This may be important for
3752 inlined functions containing switch statements. Also jumps w/o
3753 fallthru edges can be handled by simply matching whole insn. */
3754 return false;
3755 }
3756
3757 /* E1 and E2 are edges with the same destination block. Search their
3758 predecessors for common code. If found, redirect control flow from
3759 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3760
3761 static bool
3762 try_crossjump_to_edge (mode, e1, e2)
3763 int mode;
3764 edge e1, e2;
3765 {
3766 int nmatch;
3767 basic_block src1 = e1->src, src2 = e2->src;
3768 basic_block redirect_to;
3769 rtx newpos1, newpos2;
3770 edge s;
3771 rtx last;
3772 rtx label;
3773 rtx note;
3774
3775 /* Search backward through forwarder blocks. We don't need to worry
3776 about multiple entry or chained forwarders, as they will be optimized
3777 away. We do this to look past the unconditional jump following a
3778 conditional jump that is required due to the current CFG shape. */
3779 if (src1->pred
3780 && !src1->pred->pred_next
3781 && forwarder_block_p (src1))
3782 {
3783 e1 = src1->pred;
3784 src1 = e1->src;
3785 }
3786 if (src2->pred
3787 && !src2->pred->pred_next
3788 && forwarder_block_p (src2))
3789 {
3790 e2 = src2->pred;
3791 src2 = e2->src;
3792 }
3793
3794 /* Nothing to do if we reach ENTRY, or a common source block. */
3795 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3796 return false;
3797 if (src1 == src2)
3798 return false;
3799
3800 /* Seeing more than 1 forwarder blocks would confuse us later... */
3801 if (forwarder_block_p (e1->dest)
3802 && forwarder_block_p (e1->dest->succ->dest))
3803 return false;
3804 if (forwarder_block_p (e2->dest)
3805 && forwarder_block_p (e2->dest->succ->dest))
3806 return false;
3807
3808 /* Likewise with dead code (possibly newly created by the other optimizations
3809 of cfg_cleanup). */
3810 if (!src1->pred || !src2->pred)
3811 return false;
3812
3813 /* Likewise with complex edges.
3814 ??? We should be able to handle most complex edges later with some
3815 care. */
3816 if (e1->flags & EDGE_COMPLEX)
3817 return false;
3818
3819 /* Look for the common insn sequence, part the first ... */
3820 if (!outgoing_edges_match (src1, src2))
3821 return false;
3822
3823 /* ... and part the second. */
3824 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3825 if (!nmatch)
3826 return false;
3827
3828 /* Avoid splitting if possible. */
3829 if (newpos2 == src2->head)
3830 redirect_to = src2;
3831 else
3832 {
3833 if (rtl_dump_file)
3834 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3835 src2->index, nmatch);
3836 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3837 }
3838
3839 if (rtl_dump_file)
3840 fprintf (rtl_dump_file,
3841 "Cross jumping from bb %i to bb %i; %i common insns\n",
3842 src1->index, src2->index, nmatch);
3843
3844 redirect_to->count += src1->count;
3845 redirect_to->frequency += src1->frequency;
3846
3847 /* Recompute the frequencies and counts of outgoing edges. */
3848 for (s = redirect_to->succ; s; s = s->succ_next)
3849 {
3850 edge s2;
3851 basic_block d = s->dest;
3852
3853 if (forwarder_block_p (d))
3854 d = d->succ->dest;
3855 for (s2 = src1->succ; ; s2 = s2->succ_next)
3856 {
3857 basic_block d2 = s2->dest;
3858 if (forwarder_block_p (d2))
3859 d2 = d2->succ->dest;
3860 if (d == d2)
3861 break;
3862 }
3863 s->count += s2->count;
3864
3865 /* Take care to update possible forwarder blocks. We verified
3866 that there is no more than one in the chain, so we can't run
3867 into infinite loop. */
3868 if (forwarder_block_p (s->dest))
3869 {
3870 s->dest->succ->count += s2->count;
3871 s->dest->count += s2->count;
3872 s->dest->frequency += EDGE_FREQUENCY (s);
3873 }
3874 if (forwarder_block_p (s2->dest))
3875 {
3876 s2->dest->succ->count -= s2->count;
3877 s2->dest->count -= s2->count;
3878 s2->dest->frequency -= EDGE_FREQUENCY (s);
3879 }
3880 if (!redirect_to->frequency && !src1->frequency)
3881 s->probability = (s->probability + s2->probability) / 2;
3882 else
3883 s->probability =
3884 ((s->probability * redirect_to->frequency +
3885 s2->probability * src1->frequency)
3886 / (redirect_to->frequency + src1->frequency));
3887 }
3888
3889 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3890 if (note)
3891 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3892
3893 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3894
3895 /* Skip possible basic block header. */
3896 if (GET_CODE (newpos1) == CODE_LABEL)
3897 newpos1 = NEXT_INSN (newpos1);
3898 if (GET_CODE (newpos1) == NOTE)
3899 newpos1 = NEXT_INSN (newpos1);
3900 last = src1->end;
3901
3902 /* Emit the jump insn. */
3903 label = block_label (redirect_to);
3904 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3905 JUMP_LABEL (src1->end) = label;
3906 LABEL_NUSES (label)++;
3907 if (basic_block_for_insn)
3908 set_block_for_new_insns (src1->end, src1);
3909
3910 /* Delete the now unreachable instructions. */
3911 flow_delete_insn_chain (newpos1, last);
3912
3913 /* Make sure there is a barrier after the new jump. */
3914 last = next_nonnote_insn (src1->end);
3915 if (!last || GET_CODE (last) != BARRIER)
3916 emit_barrier_after (src1->end);
3917
3918 /* Update CFG. */
3919 while (src1->succ)
3920 remove_edge (src1->succ);
3921 make_edge (NULL, src1, redirect_to, 0);
3922 src1->succ->probability = REG_BR_PROB_BASE;
3923 src1->succ->count = src1->count;
3924
3925 return true;
3926 }
3927
3928 /* Search the predecessors of BB for common insn sequences. When found,
3929 share code between them by redirecting control flow. Return true if
3930 any changes made. */
3931
3932 static bool
3933 try_crossjump_bb (mode, bb)
3934 int mode;
3935 basic_block bb;
3936 {
3937 edge e, e2, nexte2, nexte, fallthru;
3938 bool changed;
3939
3940 /* Nothing to do if there is not at least two incomming edges. */
3941 if (!bb->pred || !bb->pred->pred_next)
3942 return false;
3943
3944 /* It is always cheapest to redirect a block that ends in a branch to
3945 a block that falls through into BB, as that adds no branches to the
3946 program. We'll try that combination first. */
3947 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3948 if (fallthru->flags & EDGE_FALLTHRU)
3949 break;
3950
3951 changed = false;
3952 for (e = bb->pred; e; e = nexte)
3953 {
3954 nexte = e->pred_next;
3955
3956 /* Elide complex edges now, as neither try_crossjump_to_edge
3957 nor outgoing_edges_match can handle them. */
3958 if (e->flags & EDGE_COMPLEX)
3959 continue;
3960
3961 /* As noted above, first try with the fallthru predecessor. */
3962 if (fallthru)
3963 {
3964 /* Don't combine the fallthru edge into anything else.
3965 If there is a match, we'll do it the other way around. */
3966 if (e == fallthru)
3967 continue;
3968
3969 if (try_crossjump_to_edge (mode, e, fallthru))
3970 {
3971 changed = true;
3972 nexte = bb->pred;
3973 continue;
3974 }
3975 }
3976
3977 /* Non-obvious work limiting check: Recognize that we're going
3978 to call try_crossjump_bb on every basic block. So if we have
3979 two blocks with lots of outgoing edges (a switch) and they
3980 share lots of common destinations, then we would do the
3981 cross-jump check once for each common destination.
3982
3983 Now, if the blocks actually are cross-jump candidates, then
3984 all of their destinations will be shared. Which means that
3985 we only need check them for cross-jump candidacy once. We
3986 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3987 choosing to do the check from the block for which the edge
3988 in question is the first successor of A. */
3989 if (e->src->succ != e)
3990 continue;
3991
3992 for (e2 = bb->pred; e2; e2 = nexte2)
3993 {
3994 nexte2 = e2->pred_next;
3995
3996 if (e2 == e)
3997 continue;
3998
3999 /* We've already checked the fallthru edge above. */
4000 if (e2 == fallthru)
4001 continue;
4002
4003 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4004 can handle complex edges. */
4005 if (e2->flags & EDGE_COMPLEX)
4006 continue;
4007
4008 /* The "first successor" check above only prevents multiple
4009 checks of crossjump(A,B). In order to prevent redundant
4010 checks of crossjump(B,A), require that A be the block
4011 with the lowest index. */
4012 if (e->src->index > e2->src->index)
4013 continue;
4014
4015 if (try_crossjump_to_edge (mode, e, e2))
4016 {
4017 changed = true;
4018 nexte = bb->pred;
4019 break;
4020 }
4021 }
4022 }
4023
4024 return changed;
4025 }
4026
4027 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4028 instructions etc. Return nonzero if changes were made. */
4029
4030 static bool
4031 try_optimize_cfg (mode)
4032 int mode;
4033 {
4034 int i;
4035 bool changed_overall = false;
4036 bool changed;
4037 int iterations = 0;
4038
4039 /* Attempt to merge blocks as made possible by edge removal. If a block
4040 has only one successor, and the successor has only one predecessor,
4041 they may be combined. */
4042
4043 do
4044 {
4045 changed = false;
4046 iterations++;
4047
4048 if (rtl_dump_file)
4049 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4050 iterations);
4051
4052 for (i = 0; i < n_basic_blocks;)
4053 {
4054 basic_block c, b = BASIC_BLOCK (i);
4055 edge s;
4056 bool changed_here = false;
4057
4058 /* Delete trivially dead basic blocks. */
4059 while (b->pred == NULL)
4060 {
4061 c = BASIC_BLOCK (b->index - 1);
4062 if (rtl_dump_file)
4063 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4064 flow_delete_block (b);
4065 changed = true;
4066 b = c;
4067 }
4068
4069 /* Remove code labels no longer used. Don't do this before
4070 CALL_PLACEHOLDER is removed, as some branches may be hidden
4071 within. */
4072 if (b->pred->pred_next == NULL
4073 && (b->pred->flags & EDGE_FALLTHRU)
4074 && !(b->pred->flags & EDGE_COMPLEX)
4075 && GET_CODE (b->head) == CODE_LABEL
4076 && (!(mode & CLEANUP_PRE_SIBCALL)
4077 || !tail_recursion_label_p (b->head))
4078 /* If previous block ends with condjump jumping to next BB,
4079 we can't delete the label. */
4080 && (b->pred->src == ENTRY_BLOCK_PTR
4081 || !reg_mentioned_p (b->head, b->pred->src->end)))
4082 {
4083 rtx label = b->head;
4084 b->head = NEXT_INSN (b->head);
4085 flow_delete_insn_chain (label, label);
4086 if (rtl_dump_file)
4087 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4088 b->index);
4089 }
4090
4091 /* If we fall through an empty block, we can remove it. */
4092 if (b->pred->pred_next == NULL
4093 && (b->pred->flags & EDGE_FALLTHRU)
4094 && GET_CODE (b->head) != CODE_LABEL
4095 && forwarder_block_p (b)
4096 /* Note that forwarder_block_p true ensures that there
4097 is a successor for this block. */
4098 && (b->succ->flags & EDGE_FALLTHRU)
4099 && n_basic_blocks > 1)
4100 {
4101 if (rtl_dump_file)
4102 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4103 b->index);
4104 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4105 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4106 flow_delete_block (b);
4107 changed = true;
4108 b = c;
4109 }
4110
4111 /* Merge blocks. Loop because chains of blocks might be
4112 combineable. */
4113 while ((s = b->succ) != NULL
4114 && s->succ_next == NULL
4115 && !(s->flags & EDGE_COMPLEX)
4116 && (c = s->dest) != EXIT_BLOCK_PTR
4117 && c->pred->pred_next == NULL
4118 /* If the jump insn has side effects,
4119 we can't kill the edge. */
4120 && (GET_CODE (b->end) != JUMP_INSN
4121 || onlyjump_p (b->end))
4122 && merge_blocks (s, b, c, mode))
4123 changed_here = true;
4124
4125 /* Simplify branch over branch. */
4126 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4127 changed_here = true;
4128
4129 /* If B has a single outgoing edge, but uses a non-trivial jump
4130 instruction without side-effects, we can either delete the
4131 jump entirely, or replace it with a simple unconditional jump.
4132 Use redirect_edge_and_branch to do the dirty work. */
4133 if (b->succ
4134 && ! b->succ->succ_next
4135 && b->succ->dest != EXIT_BLOCK_PTR
4136 && onlyjump_p (b->end)
4137 && redirect_edge_and_branch (b->succ, b->succ->dest))
4138 changed_here = true;
4139
4140 /* Simplify branch to branch. */
4141 if (try_forward_edges (mode, b))
4142 changed_here = true;
4143
4144 /* Look for shared code between blocks. */
4145 if ((mode & CLEANUP_CROSSJUMP)
4146 && try_crossjump_bb (mode, b))
4147 changed_here = true;
4148
4149 /* Don't get confused by the index shift caused by deleting
4150 blocks. */
4151 if (!changed_here)
4152 i = b->index + 1;
4153 else
4154 changed = true;
4155 }
4156
4157 if ((mode & CLEANUP_CROSSJUMP)
4158 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4159 changed = true;
4160
4161 #ifdef ENABLE_CHECKING
4162 if (changed)
4163 verify_flow_info ();
4164 #endif
4165
4166 changed_overall |= changed;
4167 }
4168 while (changed);
4169 return changed_overall;
4170 }
4171
4172 /* The given edge should potentially be a fallthru edge. If that is in
4173 fact true, delete the jump and barriers that are in the way. */
4174
4175 void
4176 tidy_fallthru_edge (e, b, c)
4177 edge e;
4178 basic_block b, c;
4179 {
4180 rtx q;
4181
4182 /* ??? In a late-running flow pass, other folks may have deleted basic
4183 blocks by nopping out blocks, leaving multiple BARRIERs between here
4184 and the target label. They ought to be chastized and fixed.
4185
4186 We can also wind up with a sequence of undeletable labels between
4187 one block and the next.
4188
4189 So search through a sequence of barriers, labels, and notes for
4190 the head of block C and assert that we really do fall through. */
4191
4192 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4193 return;
4194
4195 /* Remove what will soon cease being the jump insn from the source block.
4196 If block B consisted only of this single jump, turn it into a deleted
4197 note. */
4198 q = b->end;
4199 if (GET_CODE (q) == JUMP_INSN
4200 && onlyjump_p (q)
4201 && (any_uncondjump_p (q)
4202 || (b->succ == e && e->succ_next == NULL)))
4203 {
4204 #ifdef HAVE_cc0
4205 /* If this was a conditional jump, we need to also delete
4206 the insn that set cc0. */
4207 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
4208 q = PREV_INSN (q);
4209 #endif
4210
4211 if (b->head == q)
4212 {
4213 PUT_CODE (q, NOTE);
4214 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4215 NOTE_SOURCE_FILE (q) = 0;
4216 }
4217 else
4218 {
4219 q = PREV_INSN (q);
4220
4221 /* We don't want a block to end on a line-number note since that has
4222 the potential of changing the code between -g and not -g. */
4223 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4224 q = PREV_INSN (q);
4225 }
4226
4227 b->end = q;
4228 }
4229
4230 /* Selectively unlink the sequence. */
4231 if (q != PREV_INSN (c->head))
4232 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4233
4234 e->flags |= EDGE_FALLTHRU;
4235 }
4236
4237 /* Fix up edges that now fall through, or rather should now fall through
4238 but previously required a jump around now deleted blocks. Simplify
4239 the search by only examining blocks numerically adjacent, since this
4240 is how find_basic_blocks created them. */
4241
4242 static void
4243 tidy_fallthru_edges ()
4244 {
4245 int i;
4246
4247 for (i = 1; i < n_basic_blocks; ++i)
4248 {
4249 basic_block b = BASIC_BLOCK (i - 1);
4250 basic_block c = BASIC_BLOCK (i);
4251 edge s;
4252
4253 /* We care about simple conditional or unconditional jumps with
4254 a single successor.
4255
4256 If we had a conditional branch to the next instruction when
4257 find_basic_blocks was called, then there will only be one
4258 out edge for the block which ended with the conditional
4259 branch (since we do not create duplicate edges).
4260
4261 Furthermore, the edge will be marked as a fallthru because we
4262 merge the flags for the duplicate edges. So we do not want to
4263 check that the edge is not a FALLTHRU edge. */
4264 if ((s = b->succ) != NULL
4265 && ! (s->flags & EDGE_COMPLEX)
4266 && s->succ_next == NULL
4267 && s->dest == c
4268 /* If the jump insn has side effects, we can't tidy the edge. */
4269 && (GET_CODE (b->end) != JUMP_INSN
4270 || onlyjump_p (b->end)))
4271 tidy_fallthru_edge (s, b, c);
4272 }
4273 }
4274 \f
4275 /* Perform data flow analysis.
4276 F is the first insn of the function; FLAGS is a set of PROP_* flags
4277 to be used in accumulating flow info. */
4278
4279 void
4280 life_analysis (f, file, flags)
4281 rtx f;
4282 FILE *file;
4283 int flags;
4284 {
4285 #ifdef ELIMINABLE_REGS
4286 register int i;
4287 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4288 #endif
4289
4290 /* Record which registers will be eliminated. We use this in
4291 mark_used_regs. */
4292
4293 CLEAR_HARD_REG_SET (elim_reg_set);
4294
4295 #ifdef ELIMINABLE_REGS
4296 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4297 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4298 #else
4299 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4300 #endif
4301
4302 if (! optimize)
4303 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4304
4305 /* The post-reload life analysis have (on a global basis) the same
4306 registers live as was computed by reload itself. elimination
4307 Otherwise offsets and such may be incorrect.
4308
4309 Reload will make some registers as live even though they do not
4310 appear in the rtl.
4311
4312 We don't want to create new auto-incs after reload, since they
4313 are unlikely to be useful and can cause problems with shared
4314 stack slots. */
4315 if (reload_completed)
4316 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4317
4318 /* We want alias analysis information for local dead store elimination. */
4319 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4320 init_alias_analysis ();
4321
4322 /* Always remove no-op moves. Do this before other processing so
4323 that we don't have to keep re-scanning them. */
4324 delete_noop_moves (f);
4325
4326 /* Some targets can emit simpler epilogues if they know that sp was
4327 not ever modified during the function. After reload, of course,
4328 we've already emitted the epilogue so there's no sense searching. */
4329 if (! reload_completed)
4330 notice_stack_pointer_modification (f);
4331
4332 /* Allocate and zero out data structures that will record the
4333 data from lifetime analysis. */
4334 allocate_reg_life_data ();
4335 allocate_bb_life_data ();
4336
4337 /* Find the set of registers live on function exit. */
4338 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4339
4340 /* "Update" life info from zero. It'd be nice to begin the
4341 relaxation with just the exit and noreturn blocks, but that set
4342 is not immediately handy. */
4343
4344 if (flags & PROP_REG_INFO)
4345 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4346 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4347
4348 /* Clean up. */
4349 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4350 end_alias_analysis ();
4351
4352 if (file)
4353 dump_flow_info (file);
4354
4355 free_basic_block_vars (1);
4356
4357 #ifdef ENABLE_CHECKING
4358 {
4359 rtx insn;
4360
4361 /* Search for any REG_LABEL notes which reference deleted labels. */
4362 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4363 {
4364 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4365
4366 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4367 abort ();
4368 }
4369 }
4370 #endif
4371 /* Removing dead insns should've made jumptables really dead. */
4372 delete_dead_jumptables ();
4373 }
4374
4375 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4376 Search for REGNO. If found, abort if it is not wider than word_mode. */
4377
4378 static int
4379 verify_wide_reg_1 (px, pregno)
4380 rtx *px;
4381 void *pregno;
4382 {
4383 rtx x = *px;
4384 unsigned int regno = *(int *) pregno;
4385
4386 if (GET_CODE (x) == REG && REGNO (x) == regno)
4387 {
4388 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4389 abort ();
4390 return 1;
4391 }
4392 return 0;
4393 }
4394
4395 /* A subroutine of verify_local_live_at_start. Search through insns
4396 between HEAD and END looking for register REGNO. */
4397
4398 static void
4399 verify_wide_reg (regno, head, end)
4400 int regno;
4401 rtx head, end;
4402 {
4403 while (1)
4404 {
4405 if (INSN_P (head)
4406 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4407 return;
4408 if (head == end)
4409 break;
4410 head = NEXT_INSN (head);
4411 }
4412
4413 /* We didn't find the register at all. Something's way screwy. */
4414 if (rtl_dump_file)
4415 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4416 print_rtl_and_abort ();
4417 }
4418
4419 /* A subroutine of update_life_info. Verify that there are no untoward
4420 changes in live_at_start during a local update. */
4421
4422 static void
4423 verify_local_live_at_start (new_live_at_start, bb)
4424 regset new_live_at_start;
4425 basic_block bb;
4426 {
4427 if (reload_completed)
4428 {
4429 /* After reload, there are no pseudos, nor subregs of multi-word
4430 registers. The regsets should exactly match. */
4431 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4432 {
4433 if (rtl_dump_file)
4434 {
4435 fprintf (rtl_dump_file,
4436 "live_at_start mismatch in bb %d, aborting\n",
4437 bb->index);
4438 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4439 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4440 }
4441 print_rtl_and_abort ();
4442 }
4443 }
4444 else
4445 {
4446 int i;
4447
4448 /* Find the set of changed registers. */
4449 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4450
4451 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4452 {
4453 /* No registers should die. */
4454 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4455 {
4456 if (rtl_dump_file)
4457 fprintf (rtl_dump_file,
4458 "Register %d died unexpectedly in block %d\n", i,
4459 bb->index);
4460 print_rtl_and_abort ();
4461 }
4462
4463 /* Verify that the now-live register is wider than word_mode. */
4464 verify_wide_reg (i, bb->head, bb->end);
4465 });
4466 }
4467 }
4468
4469 /* Updates life information starting with the basic blocks set in BLOCKS.
4470 If BLOCKS is null, consider it to be the universal set.
4471
4472 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4473 we are only expecting local modifications to basic blocks. If we find
4474 extra registers live at the beginning of a block, then we either killed
4475 useful data, or we have a broken split that wants data not provided.
4476 If we find registers removed from live_at_start, that means we have
4477 a broken peephole that is killing a register it shouldn't.
4478
4479 ??? This is not true in one situation -- when a pre-reload splitter
4480 generates subregs of a multi-word pseudo, current life analysis will
4481 lose the kill. So we _can_ have a pseudo go live. How irritating.
4482
4483 Including PROP_REG_INFO does not properly refresh regs_ever_live
4484 unless the caller resets it to zero. */
4485
4486 void
4487 update_life_info (blocks, extent, prop_flags)
4488 sbitmap blocks;
4489 enum update_life_extent extent;
4490 int prop_flags;
4491 {
4492 regset tmp;
4493 regset_head tmp_head;
4494 int i;
4495
4496 tmp = INITIALIZE_REG_SET (tmp_head);
4497
4498 /* Changes to the CFG are only allowed when
4499 doing a global update for the entire CFG. */
4500 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4501 && (extent == UPDATE_LIFE_LOCAL || blocks))
4502 abort ();
4503
4504 /* For a global update, we go through the relaxation process again. */
4505 if (extent != UPDATE_LIFE_LOCAL)
4506 {
4507 for ( ; ; )
4508 {
4509 int changed = 0;
4510
4511 calculate_global_regs_live (blocks, blocks,
4512 prop_flags & (PROP_SCAN_DEAD_CODE
4513 | PROP_ALLOW_CFG_CHANGES));
4514
4515 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4516 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4517 break;
4518
4519 /* Removing dead code may allow the CFG to be simplified which
4520 in turn may allow for further dead code detection / removal. */
4521 for (i = n_basic_blocks - 1; i >= 0; --i)
4522 {
4523 basic_block bb = BASIC_BLOCK (i);
4524
4525 COPY_REG_SET (tmp, bb->global_live_at_end);
4526 changed |= propagate_block (bb, tmp, NULL, NULL,
4527 prop_flags & (PROP_SCAN_DEAD_CODE
4528 | PROP_KILL_DEAD_CODE));
4529 }
4530
4531 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4532 break;
4533
4534 delete_unreachable_blocks ();
4535 mark_critical_edges ();
4536 }
4537
4538 /* If asked, remove notes from the blocks we'll update. */
4539 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4540 count_or_remove_death_notes (blocks, 1);
4541 }
4542
4543 if (blocks)
4544 {
4545 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4546 {
4547 basic_block bb = BASIC_BLOCK (i);
4548
4549 COPY_REG_SET (tmp, bb->global_live_at_end);
4550 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4551
4552 if (extent == UPDATE_LIFE_LOCAL)
4553 verify_local_live_at_start (tmp, bb);
4554 });
4555 }
4556 else
4557 {
4558 for (i = n_basic_blocks - 1; i >= 0; --i)
4559 {
4560 basic_block bb = BASIC_BLOCK (i);
4561
4562 COPY_REG_SET (tmp, bb->global_live_at_end);
4563 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4564
4565 if (extent == UPDATE_LIFE_LOCAL)
4566 verify_local_live_at_start (tmp, bb);
4567 }
4568 }
4569
4570 FREE_REG_SET (tmp);
4571
4572 if (prop_flags & PROP_REG_INFO)
4573 {
4574 /* The only pseudos that are live at the beginning of the function
4575 are those that were not set anywhere in the function. local-alloc
4576 doesn't know how to handle these correctly, so mark them as not
4577 local to any one basic block. */
4578 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4579 FIRST_PSEUDO_REGISTER, i,
4580 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4581
4582 /* We have a problem with any pseudoreg that lives across the setjmp.
4583 ANSI says that if a user variable does not change in value between
4584 the setjmp and the longjmp, then the longjmp preserves it. This
4585 includes longjmp from a place where the pseudo appears dead.
4586 (In principle, the value still exists if it is in scope.)
4587 If the pseudo goes in a hard reg, some other value may occupy
4588 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4589 Conclusion: such a pseudo must not go in a hard reg. */
4590 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4591 FIRST_PSEUDO_REGISTER, i,
4592 {
4593 if (regno_reg_rtx[i] != 0)
4594 {
4595 REG_LIVE_LENGTH (i) = -1;
4596 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4597 }
4598 });
4599 }
4600 }
4601
4602 /* Free the variables allocated by find_basic_blocks.
4603
4604 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4605
4606 void
4607 free_basic_block_vars (keep_head_end_p)
4608 int keep_head_end_p;
4609 {
4610 if (basic_block_for_insn)
4611 {
4612 VARRAY_FREE (basic_block_for_insn);
4613 basic_block_for_insn = NULL;
4614 }
4615
4616 if (! keep_head_end_p)
4617 {
4618 if (basic_block_info)
4619 {
4620 clear_edges ();
4621 VARRAY_FREE (basic_block_info);
4622 }
4623 n_basic_blocks = 0;
4624
4625 ENTRY_BLOCK_PTR->aux = NULL;
4626 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4627 EXIT_BLOCK_PTR->aux = NULL;
4628 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4629 }
4630 }
4631
4632 /* Delete any insns that copy a register to itself. */
4633
4634 void
4635 delete_noop_moves (f)
4636 rtx f ATTRIBUTE_UNUSED;
4637 {
4638 int i;
4639 rtx insn, next;
4640 basic_block bb;
4641
4642 for (i = 0; i < n_basic_blocks; i++)
4643 {
4644 bb = BASIC_BLOCK (i);
4645 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4646 {
4647 next = NEXT_INSN (insn);
4648 if (INSN_P (insn) && noop_move_p (insn))
4649 {
4650 /* Do not call flow_delete_insn here to not confuse backward
4651 pointers of LIBCALL block. */
4652 PUT_CODE (insn, NOTE);
4653 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4654 NOTE_SOURCE_FILE (insn) = 0;
4655 }
4656 }
4657 }
4658 }
4659
4660 /* Delete any jump tables never referenced. We can't delete them at the
4661 time of removing tablejump insn as they are referenced by the preceeding
4662 insns computing the destination, so we delay deleting and garbagecollect
4663 them once life information is computed. */
4664 static void
4665 delete_dead_jumptables ()
4666 {
4667 rtx insn, next;
4668 for (insn = get_insns (); insn; insn = next)
4669 {
4670 next = NEXT_INSN (insn);
4671 if (GET_CODE (insn) == CODE_LABEL
4672 && LABEL_NUSES (insn) == 0
4673 && GET_CODE (next) == JUMP_INSN
4674 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4675 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4676 {
4677 if (rtl_dump_file)
4678 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4679 flow_delete_insn (NEXT_INSN (insn));
4680 flow_delete_insn (insn);
4681 next = NEXT_INSN (next);
4682 }
4683 }
4684 }
4685
4686 /* Determine if the stack pointer is constant over the life of the function.
4687 Only useful before prologues have been emitted. */
4688
4689 static void
4690 notice_stack_pointer_modification_1 (x, pat, data)
4691 rtx x;
4692 rtx pat ATTRIBUTE_UNUSED;
4693 void *data ATTRIBUTE_UNUSED;
4694 {
4695 if (x == stack_pointer_rtx
4696 /* The stack pointer is only modified indirectly as the result
4697 of a push until later in flow. See the comments in rtl.texi
4698 regarding Embedded Side-Effects on Addresses. */
4699 || (GET_CODE (x) == MEM
4700 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4701 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4702 current_function_sp_is_unchanging = 0;
4703 }
4704
4705 static void
4706 notice_stack_pointer_modification (f)
4707 rtx f;
4708 {
4709 rtx insn;
4710
4711 /* Assume that the stack pointer is unchanging if alloca hasn't
4712 been used. */
4713 current_function_sp_is_unchanging = !current_function_calls_alloca;
4714 if (! current_function_sp_is_unchanging)
4715 return;
4716
4717 for (insn = f; insn; insn = NEXT_INSN (insn))
4718 {
4719 if (INSN_P (insn))
4720 {
4721 /* Check if insn modifies the stack pointer. */
4722 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4723 NULL);
4724 if (! current_function_sp_is_unchanging)
4725 return;
4726 }
4727 }
4728 }
4729
4730 /* Mark a register in SET. Hard registers in large modes get all
4731 of their component registers set as well. */
4732
4733 static void
4734 mark_reg (reg, xset)
4735 rtx reg;
4736 void *xset;
4737 {
4738 regset set = (regset) xset;
4739 int regno = REGNO (reg);
4740
4741 if (GET_MODE (reg) == BLKmode)
4742 abort ();
4743
4744 SET_REGNO_REG_SET (set, regno);
4745 if (regno < FIRST_PSEUDO_REGISTER)
4746 {
4747 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4748 while (--n > 0)
4749 SET_REGNO_REG_SET (set, regno + n);
4750 }
4751 }
4752
4753 /* Mark those regs which are needed at the end of the function as live
4754 at the end of the last basic block. */
4755
4756 static void
4757 mark_regs_live_at_end (set)
4758 regset set;
4759 {
4760 unsigned int i;
4761
4762 /* If exiting needs the right stack value, consider the stack pointer
4763 live at the end of the function. */
4764 if ((HAVE_epilogue && reload_completed)
4765 || ! EXIT_IGNORE_STACK
4766 || (! FRAME_POINTER_REQUIRED
4767 && ! current_function_calls_alloca
4768 && flag_omit_frame_pointer)
4769 || current_function_sp_is_unchanging)
4770 {
4771 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4772 }
4773
4774 /* Mark the frame pointer if needed at the end of the function. If
4775 we end up eliminating it, it will be removed from the live list
4776 of each basic block by reload. */
4777
4778 if (! reload_completed || frame_pointer_needed)
4779 {
4780 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4781 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4782 /* If they are different, also mark the hard frame pointer as live. */
4783 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4784 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4785 #endif
4786 }
4787
4788 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4789 /* Many architectures have a GP register even without flag_pic.
4790 Assume the pic register is not in use, or will be handled by
4791 other means, if it is not fixed. */
4792 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4793 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4794 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4795 #endif
4796
4797 /* Mark all global registers, and all registers used by the epilogue
4798 as being live at the end of the function since they may be
4799 referenced by our caller. */
4800 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4801 if (global_regs[i] || EPILOGUE_USES (i))
4802 SET_REGNO_REG_SET (set, i);
4803
4804 if (HAVE_epilogue && reload_completed)
4805 {
4806 /* Mark all call-saved registers that we actually used. */
4807 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4808 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4809 SET_REGNO_REG_SET (set, i);
4810 }
4811
4812 #ifdef EH_RETURN_DATA_REGNO
4813 /* Mark the registers that will contain data for the handler. */
4814 if (reload_completed && current_function_calls_eh_return)
4815 for (i = 0; ; ++i)
4816 {
4817 unsigned regno = EH_RETURN_DATA_REGNO(i);
4818 if (regno == INVALID_REGNUM)
4819 break;
4820 SET_REGNO_REG_SET (set, regno);
4821 }
4822 #endif
4823 #ifdef EH_RETURN_STACKADJ_RTX
4824 if ((! HAVE_epilogue || ! reload_completed)
4825 && current_function_calls_eh_return)
4826 {
4827 rtx tmp = EH_RETURN_STACKADJ_RTX;
4828 if (tmp && REG_P (tmp))
4829 mark_reg (tmp, set);
4830 }
4831 #endif
4832 #ifdef EH_RETURN_HANDLER_RTX
4833 if ((! HAVE_epilogue || ! reload_completed)
4834 && current_function_calls_eh_return)
4835 {
4836 rtx tmp = EH_RETURN_HANDLER_RTX;
4837 if (tmp && REG_P (tmp))
4838 mark_reg (tmp, set);
4839 }
4840 #endif
4841
4842 /* Mark function return value. */
4843 diddle_return_value (mark_reg, set);
4844 }
4845
4846 /* Callback function for for_each_successor_phi. DATA is a regset.
4847 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4848 INSN, in the regset. */
4849
4850 static int
4851 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4852 rtx insn ATTRIBUTE_UNUSED;
4853 int dest_regno ATTRIBUTE_UNUSED;
4854 int src_regno;
4855 void *data;
4856 {
4857 regset live = (regset) data;
4858 SET_REGNO_REG_SET (live, src_regno);
4859 return 0;
4860 }
4861
4862 /* Propagate global life info around the graph of basic blocks. Begin
4863 considering blocks with their corresponding bit set in BLOCKS_IN.
4864 If BLOCKS_IN is null, consider it the universal set.
4865
4866 BLOCKS_OUT is set for every block that was changed. */
4867
4868 static void
4869 calculate_global_regs_live (blocks_in, blocks_out, flags)
4870 sbitmap blocks_in, blocks_out;
4871 int flags;
4872 {
4873 basic_block *queue, *qhead, *qtail, *qend;
4874 regset tmp, new_live_at_end, call_used;
4875 regset_head tmp_head, call_used_head;
4876 regset_head new_live_at_end_head;
4877 int i;
4878
4879 tmp = INITIALIZE_REG_SET (tmp_head);
4880 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4881 call_used = INITIALIZE_REG_SET (call_used_head);
4882
4883 /* Inconveniently, this is only redily available in hard reg set form. */
4884 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4885 if (call_used_regs[i])
4886 SET_REGNO_REG_SET (call_used, i);
4887
4888 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4889 because the `head == tail' style test for an empty queue doesn't
4890 work with a full queue. */
4891 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4892 qtail = queue;
4893 qhead = qend = queue + n_basic_blocks + 2;
4894
4895 /* Queue the blocks set in the initial mask. Do this in reverse block
4896 number order so that we are more likely for the first round to do
4897 useful work. We use AUX non-null to flag that the block is queued. */
4898 if (blocks_in)
4899 {
4900 /* Clear out the garbage that might be hanging out in bb->aux. */
4901 for (i = n_basic_blocks - 1; i >= 0; --i)
4902 BASIC_BLOCK (i)->aux = NULL;
4903
4904 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4905 {
4906 basic_block bb = BASIC_BLOCK (i);
4907 *--qhead = bb;
4908 bb->aux = bb;
4909 });
4910 }
4911 else
4912 {
4913 for (i = 0; i < n_basic_blocks; ++i)
4914 {
4915 basic_block bb = BASIC_BLOCK (i);
4916 *--qhead = bb;
4917 bb->aux = bb;
4918 }
4919 }
4920
4921 if (blocks_out)
4922 sbitmap_zero (blocks_out);
4923
4924 /* We work through the queue until there are no more blocks. What
4925 is live at the end of this block is precisely the union of what
4926 is live at the beginning of all its successors. So, we set its
4927 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4928 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4929 this block by walking through the instructions in this block in
4930 reverse order and updating as we go. If that changed
4931 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4932 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4933
4934 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4935 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4936 must either be live at the end of the block, or used within the
4937 block. In the latter case, it will certainly never disappear
4938 from GLOBAL_LIVE_AT_START. In the former case, the register
4939 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4940 for one of the successor blocks. By induction, that cannot
4941 occur. */
4942 while (qhead != qtail)
4943 {
4944 int rescan, changed;
4945 basic_block bb;
4946 edge e;
4947
4948 bb = *qhead++;
4949 if (qhead == qend)
4950 qhead = queue;
4951 bb->aux = NULL;
4952
4953 /* Begin by propagating live_at_start from the successor blocks. */
4954 CLEAR_REG_SET (new_live_at_end);
4955 for (e = bb->succ; e; e = e->succ_next)
4956 {
4957 basic_block sb = e->dest;
4958
4959 /* Call-clobbered registers die across exception and call edges. */
4960 /* ??? Abnormal call edges ignored for the moment, as this gets
4961 confused by sibling call edges, which crashes reg-stack. */
4962 if (e->flags & EDGE_EH)
4963 {
4964 bitmap_operation (tmp, sb->global_live_at_start,
4965 call_used, BITMAP_AND_COMPL);
4966 IOR_REG_SET (new_live_at_end, tmp);
4967 }
4968 else
4969 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4970 }
4971
4972 /* The all-important stack pointer must always be live. */
4973 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4974
4975 /* Before reload, there are a few registers that must be forced
4976 live everywhere -- which might not already be the case for
4977 blocks within infinite loops. */
4978 if (! reload_completed)
4979 {
4980 /* Any reference to any pseudo before reload is a potential
4981 reference of the frame pointer. */
4982 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4983
4984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4985 /* Pseudos with argument area equivalences may require
4986 reloading via the argument pointer. */
4987 if (fixed_regs[ARG_POINTER_REGNUM])
4988 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4989 #endif
4990
4991 /* Any constant, or pseudo with constant equivalences, may
4992 require reloading from memory using the pic register. */
4993 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4994 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4995 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4996 }
4997
4998 /* Regs used in phi nodes are not included in
4999 global_live_at_start, since they are live only along a
5000 particular edge. Set those regs that are live because of a
5001 phi node alternative corresponding to this particular block. */
5002 if (in_ssa_form)
5003 for_each_successor_phi (bb, &set_phi_alternative_reg,
5004 new_live_at_end);
5005
5006 if (bb == ENTRY_BLOCK_PTR)
5007 {
5008 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5009 continue;
5010 }
5011
5012 /* On our first pass through this block, we'll go ahead and continue.
5013 Recognize first pass by local_set NULL. On subsequent passes, we
5014 get to skip out early if live_at_end wouldn't have changed. */
5015
5016 if (bb->local_set == NULL)
5017 {
5018 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5019 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5020 rescan = 1;
5021 }
5022 else
5023 {
5024 /* If any bits were removed from live_at_end, we'll have to
5025 rescan the block. This wouldn't be necessary if we had
5026 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5027 local_live is really dependent on live_at_end. */
5028 CLEAR_REG_SET (tmp);
5029 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5030 new_live_at_end, BITMAP_AND_COMPL);
5031
5032 if (! rescan)
5033 {
5034 /* If any of the registers in the new live_at_end set are
5035 conditionally set in this basic block, we must rescan.
5036 This is because conditional lifetimes at the end of the
5037 block do not just take the live_at_end set into account,
5038 but also the liveness at the start of each successor
5039 block. We can miss changes in those sets if we only
5040 compare the new live_at_end against the previous one. */
5041 CLEAR_REG_SET (tmp);
5042 rescan = bitmap_operation (tmp, new_live_at_end,
5043 bb->cond_local_set, BITMAP_AND);
5044 }
5045
5046 if (! rescan)
5047 {
5048 /* Find the set of changed bits. Take this opportunity
5049 to notice that this set is empty and early out. */
5050 CLEAR_REG_SET (tmp);
5051 changed = bitmap_operation (tmp, bb->global_live_at_end,
5052 new_live_at_end, BITMAP_XOR);
5053 if (! changed)
5054 continue;
5055
5056 /* If any of the changed bits overlap with local_set,
5057 we'll have to rescan the block. Detect overlap by
5058 the AND with ~local_set turning off bits. */
5059 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5060 BITMAP_AND_COMPL);
5061 }
5062 }
5063
5064 /* Let our caller know that BB changed enough to require its
5065 death notes updated. */
5066 if (blocks_out)
5067 SET_BIT (blocks_out, bb->index);
5068
5069 if (! rescan)
5070 {
5071 /* Add to live_at_start the set of all registers in
5072 new_live_at_end that aren't in the old live_at_end. */
5073
5074 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5075 BITMAP_AND_COMPL);
5076 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5077
5078 changed = bitmap_operation (bb->global_live_at_start,
5079 bb->global_live_at_start,
5080 tmp, BITMAP_IOR);
5081 if (! changed)
5082 continue;
5083 }
5084 else
5085 {
5086 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5087
5088 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5089 into live_at_start. */
5090 propagate_block (bb, new_live_at_end, bb->local_set,
5091 bb->cond_local_set, flags);
5092
5093 /* If live_at start didn't change, no need to go farther. */
5094 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5095 continue;
5096
5097 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5098 }
5099
5100 /* Queue all predecessors of BB so that we may re-examine
5101 their live_at_end. */
5102 for (e = bb->pred; e; e = e->pred_next)
5103 {
5104 basic_block pb = e->src;
5105 if (pb->aux == NULL)
5106 {
5107 *qtail++ = pb;
5108 if (qtail == qend)
5109 qtail = queue;
5110 pb->aux = pb;
5111 }
5112 }
5113 }
5114
5115 FREE_REG_SET (tmp);
5116 FREE_REG_SET (new_live_at_end);
5117 FREE_REG_SET (call_used);
5118
5119 if (blocks_out)
5120 {
5121 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5122 {
5123 basic_block bb = BASIC_BLOCK (i);
5124 FREE_REG_SET (bb->local_set);
5125 FREE_REG_SET (bb->cond_local_set);
5126 });
5127 }
5128 else
5129 {
5130 for (i = n_basic_blocks - 1; i >= 0; --i)
5131 {
5132 basic_block bb = BASIC_BLOCK (i);
5133 FREE_REG_SET (bb->local_set);
5134 FREE_REG_SET (bb->cond_local_set);
5135 }
5136 }
5137
5138 free (queue);
5139 }
5140 \f
5141 /* Subroutines of life analysis. */
5142
5143 /* Allocate the permanent data structures that represent the results
5144 of life analysis. Not static since used also for stupid life analysis. */
5145
5146 void
5147 allocate_bb_life_data ()
5148 {
5149 register int i;
5150
5151 for (i = 0; i < n_basic_blocks; i++)
5152 {
5153 basic_block bb = BASIC_BLOCK (i);
5154
5155 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5156 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5157 }
5158
5159 ENTRY_BLOCK_PTR->global_live_at_end
5160 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5161 EXIT_BLOCK_PTR->global_live_at_start
5162 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5163
5164 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5165 }
5166
5167 void
5168 allocate_reg_life_data ()
5169 {
5170 int i;
5171
5172 max_regno = max_reg_num ();
5173
5174 /* Recalculate the register space, in case it has grown. Old style
5175 vector oriented regsets would set regset_{size,bytes} here also. */
5176 allocate_reg_info (max_regno, FALSE, FALSE);
5177
5178 /* Reset all the data we'll collect in propagate_block and its
5179 subroutines. */
5180 for (i = 0; i < max_regno; i++)
5181 {
5182 REG_N_SETS (i) = 0;
5183 REG_N_REFS (i) = 0;
5184 REG_N_DEATHS (i) = 0;
5185 REG_N_CALLS_CROSSED (i) = 0;
5186 REG_LIVE_LENGTH (i) = 0;
5187 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5188 }
5189 }
5190
5191 /* Delete dead instructions for propagate_block. */
5192
5193 static void
5194 propagate_block_delete_insn (bb, insn)
5195 basic_block bb;
5196 rtx insn;
5197 {
5198 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5199
5200 /* If the insn referred to a label, and that label was attached to
5201 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5202 pretty much mandatory to delete it, because the ADDR_VEC may be
5203 referencing labels that no longer exist.
5204
5205 INSN may reference a deleted label, particularly when a jump
5206 table has been optimized into a direct jump. There's no
5207 real good way to fix up the reference to the deleted label
5208 when the label is deleted, so we just allow it here.
5209
5210 After dead code elimination is complete, we do search for
5211 any REG_LABEL notes which reference deleted labels as a
5212 sanity check. */
5213
5214 if (inote && GET_CODE (inote) == CODE_LABEL)
5215 {
5216 rtx label = XEXP (inote, 0);
5217 rtx next;
5218
5219 /* The label may be forced if it has been put in the constant
5220 pool. If that is the only use we must discard the table
5221 jump following it, but not the label itself. */
5222 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5223 && (next = next_nonnote_insn (label)) != NULL
5224 && GET_CODE (next) == JUMP_INSN
5225 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5226 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5227 {
5228 rtx pat = PATTERN (next);
5229 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5230 int len = XVECLEN (pat, diff_vec_p);
5231 int i;
5232
5233 for (i = 0; i < len; i++)
5234 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5235
5236 flow_delete_insn (next);
5237 }
5238 }
5239
5240 if (bb->end == insn)
5241 bb->end = PREV_INSN (insn);
5242 flow_delete_insn (insn);
5243 }
5244
5245 /* Delete dead libcalls for propagate_block. Return the insn
5246 before the libcall. */
5247
5248 static rtx
5249 propagate_block_delete_libcall (bb, insn, note)
5250 basic_block bb;
5251 rtx insn, note;
5252 {
5253 rtx first = XEXP (note, 0);
5254 rtx before = PREV_INSN (first);
5255
5256 if (insn == bb->end)
5257 bb->end = before;
5258
5259 flow_delete_insn_chain (first, insn);
5260 return before;
5261 }
5262
5263 /* Update the life-status of regs for one insn. Return the previous insn. */
5264
5265 rtx
5266 propagate_one_insn (pbi, insn)
5267 struct propagate_block_info *pbi;
5268 rtx insn;
5269 {
5270 rtx prev = PREV_INSN (insn);
5271 int flags = pbi->flags;
5272 int insn_is_dead = 0;
5273 int libcall_is_dead = 0;
5274 rtx note;
5275 int i;
5276
5277 if (! INSN_P (insn))
5278 return prev;
5279
5280 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5281 if (flags & PROP_SCAN_DEAD_CODE)
5282 {
5283 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5284 libcall_is_dead = (insn_is_dead && note != 0
5285 && libcall_dead_p (pbi, note, insn));
5286 }
5287
5288 /* If an instruction consists of just dead store(s) on final pass,
5289 delete it. */
5290 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5291 {
5292 /* If we're trying to delete a prologue or epilogue instruction
5293 that isn't flagged as possibly being dead, something is wrong.
5294 But if we are keeping the stack pointer depressed, we might well
5295 be deleting insns that are used to compute the amount to update
5296 it by, so they are fine. */
5297 if (reload_completed
5298 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5299 && (TYPE_RETURNS_STACK_DEPRESSED
5300 (TREE_TYPE (current_function_decl))))
5301 && (((HAVE_epilogue || HAVE_prologue)
5302 && prologue_epilogue_contains (insn))
5303 || (HAVE_sibcall_epilogue
5304 && sibcall_epilogue_contains (insn)))
5305 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5306 abort ();
5307
5308 /* Record sets. Do this even for dead instructions, since they
5309 would have killed the values if they hadn't been deleted. */
5310 mark_set_regs (pbi, PATTERN (insn), insn);
5311
5312 /* CC0 is now known to be dead. Either this insn used it,
5313 in which case it doesn't anymore, or clobbered it,
5314 so the next insn can't use it. */
5315 pbi->cc0_live = 0;
5316
5317 if (libcall_is_dead)
5318 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5319 else
5320 propagate_block_delete_insn (pbi->bb, insn);
5321
5322 return prev;
5323 }
5324
5325 /* See if this is an increment or decrement that can be merged into
5326 a following memory address. */
5327 #ifdef AUTO_INC_DEC
5328 {
5329 register rtx x = single_set (insn);
5330
5331 /* Does this instruction increment or decrement a register? */
5332 if ((flags & PROP_AUTOINC)
5333 && x != 0
5334 && GET_CODE (SET_DEST (x)) == REG
5335 && (GET_CODE (SET_SRC (x)) == PLUS
5336 || GET_CODE (SET_SRC (x)) == MINUS)
5337 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5338 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5339 /* Ok, look for a following memory ref we can combine with.
5340 If one is found, change the memory ref to a PRE_INC
5341 or PRE_DEC, cancel this insn, and return 1.
5342 Return 0 if nothing has been done. */
5343 && try_pre_increment_1 (pbi, insn))
5344 return prev;
5345 }
5346 #endif /* AUTO_INC_DEC */
5347
5348 CLEAR_REG_SET (pbi->new_set);
5349
5350 /* If this is not the final pass, and this insn is copying the value of
5351 a library call and it's dead, don't scan the insns that perform the
5352 library call, so that the call's arguments are not marked live. */
5353 if (libcall_is_dead)
5354 {
5355 /* Record the death of the dest reg. */
5356 mark_set_regs (pbi, PATTERN (insn), insn);
5357
5358 insn = XEXP (note, 0);
5359 return PREV_INSN (insn);
5360 }
5361 else if (GET_CODE (PATTERN (insn)) == SET
5362 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5363 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5364 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5365 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5366 /* We have an insn to pop a constant amount off the stack.
5367 (Such insns use PLUS regardless of the direction of the stack,
5368 and any insn to adjust the stack by a constant is always a pop.)
5369 These insns, if not dead stores, have no effect on life. */
5370 ;
5371 else
5372 {
5373 /* Any regs live at the time of a call instruction must not go
5374 in a register clobbered by calls. Find all regs now live and
5375 record this for them. */
5376
5377 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5378 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5379 { REG_N_CALLS_CROSSED (i)++; });
5380
5381 /* Record sets. Do this even for dead instructions, since they
5382 would have killed the values if they hadn't been deleted. */
5383 mark_set_regs (pbi, PATTERN (insn), insn);
5384
5385 if (GET_CODE (insn) == CALL_INSN)
5386 {
5387 register int i;
5388 rtx note, cond;
5389
5390 cond = NULL_RTX;
5391 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5392 cond = COND_EXEC_TEST (PATTERN (insn));
5393
5394 /* Non-constant calls clobber memory. */
5395 if (! CONST_CALL_P (insn))
5396 {
5397 free_EXPR_LIST_list (&pbi->mem_set_list);
5398 pbi->mem_set_list_len = 0;
5399 }
5400
5401 /* There may be extra registers to be clobbered. */
5402 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5403 note;
5404 note = XEXP (note, 1))
5405 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5406 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5407 cond, insn, pbi->flags);
5408
5409 /* Calls change all call-used and global registers. */
5410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5411 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5412 {
5413 /* We do not want REG_UNUSED notes for these registers. */
5414 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5415 cond, insn,
5416 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5417 }
5418 }
5419
5420 /* If an insn doesn't use CC0, it becomes dead since we assume
5421 that every insn clobbers it. So show it dead here;
5422 mark_used_regs will set it live if it is referenced. */
5423 pbi->cc0_live = 0;
5424
5425 /* Record uses. */
5426 if (! insn_is_dead)
5427 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5428
5429 /* Sometimes we may have inserted something before INSN (such as a move)
5430 when we make an auto-inc. So ensure we will scan those insns. */
5431 #ifdef AUTO_INC_DEC
5432 prev = PREV_INSN (insn);
5433 #endif
5434
5435 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5436 {
5437 register int i;
5438 rtx note, cond;
5439
5440 cond = NULL_RTX;
5441 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5442 cond = COND_EXEC_TEST (PATTERN (insn));
5443
5444 /* Calls use their arguments. */
5445 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5446 note;
5447 note = XEXP (note, 1))
5448 if (GET_CODE (XEXP (note, 0)) == USE)
5449 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5450 cond, insn);
5451
5452 /* The stack ptr is used (honorarily) by a CALL insn. */
5453 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5454
5455 /* Calls may also reference any of the global registers,
5456 so they are made live. */
5457 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5458 if (global_regs[i])
5459 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5460 cond, insn);
5461 }
5462 }
5463
5464 /* On final pass, update counts of how many insns in which each reg
5465 is live. */
5466 if (flags & PROP_REG_INFO)
5467 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5468 { REG_LIVE_LENGTH (i)++; });
5469
5470 return prev;
5471 }
5472
5473 /* Initialize a propagate_block_info struct for public consumption.
5474 Note that the structure itself is opaque to this file, but that
5475 the user can use the regsets provided here. */
5476
5477 struct propagate_block_info *
5478 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5479 basic_block bb;
5480 regset live, local_set, cond_local_set;
5481 int flags;
5482 {
5483 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5484
5485 pbi->bb = bb;
5486 pbi->reg_live = live;
5487 pbi->mem_set_list = NULL_RTX;
5488 pbi->mem_set_list_len = 0;
5489 pbi->local_set = local_set;
5490 pbi->cond_local_set = cond_local_set;
5491 pbi->cc0_live = 0;
5492 pbi->flags = flags;
5493
5494 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5495 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5496 else
5497 pbi->reg_next_use = NULL;
5498
5499 pbi->new_set = BITMAP_XMALLOC ();
5500
5501 #ifdef HAVE_conditional_execution
5502 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5503 free_reg_cond_life_info);
5504 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5505
5506 /* If this block ends in a conditional branch, for each register live
5507 from one side of the branch and not the other, record the register
5508 as conditionally dead. */
5509 if (GET_CODE (bb->end) == JUMP_INSN
5510 && any_condjump_p (bb->end))
5511 {
5512 regset_head diff_head;
5513 regset diff = INITIALIZE_REG_SET (diff_head);
5514 basic_block bb_true, bb_false;
5515 rtx cond_true, cond_false, set_src;
5516 int i;
5517
5518 /* Identify the successor blocks. */
5519 bb_true = bb->succ->dest;
5520 if (bb->succ->succ_next != NULL)
5521 {
5522 bb_false = bb->succ->succ_next->dest;
5523
5524 if (bb->succ->flags & EDGE_FALLTHRU)
5525 {
5526 basic_block t = bb_false;
5527 bb_false = bb_true;
5528 bb_true = t;
5529 }
5530 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5531 abort ();
5532 }
5533 else
5534 {
5535 /* This can happen with a conditional jump to the next insn. */
5536 if (JUMP_LABEL (bb->end) != bb_true->head)
5537 abort ();
5538
5539 /* Simplest way to do nothing. */
5540 bb_false = bb_true;
5541 }
5542
5543 /* Extract the condition from the branch. */
5544 set_src = SET_SRC (pc_set (bb->end));
5545 cond_true = XEXP (set_src, 0);
5546 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5547 GET_MODE (cond_true), XEXP (cond_true, 0),
5548 XEXP (cond_true, 1));
5549 if (GET_CODE (XEXP (set_src, 1)) == PC)
5550 {
5551 rtx t = cond_false;
5552 cond_false = cond_true;
5553 cond_true = t;
5554 }
5555
5556 /* Compute which register lead different lives in the successors. */
5557 if (bitmap_operation (diff, bb_true->global_live_at_start,
5558 bb_false->global_live_at_start, BITMAP_XOR))
5559 {
5560 rtx reg = XEXP (cond_true, 0);
5561
5562 if (GET_CODE (reg) == SUBREG)
5563 reg = SUBREG_REG (reg);
5564
5565 if (GET_CODE (reg) != REG)
5566 abort ();
5567
5568 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5569
5570 /* For each such register, mark it conditionally dead. */
5571 EXECUTE_IF_SET_IN_REG_SET
5572 (diff, 0, i,
5573 {
5574 struct reg_cond_life_info *rcli;
5575 rtx cond;
5576
5577 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5578
5579 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5580 cond = cond_false;
5581 else
5582 cond = cond_true;
5583 rcli->condition = cond;
5584 rcli->stores = const0_rtx;
5585 rcli->orig_condition = cond;
5586
5587 splay_tree_insert (pbi->reg_cond_dead, i,
5588 (splay_tree_value) rcli);
5589 });
5590 }
5591
5592 FREE_REG_SET (diff);
5593 }
5594 #endif
5595
5596 /* If this block has no successors, any stores to the frame that aren't
5597 used later in the block are dead. So make a pass over the block
5598 recording any such that are made and show them dead at the end. We do
5599 a very conservative and simple job here. */
5600 if (optimize
5601 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5602 && (TYPE_RETURNS_STACK_DEPRESSED
5603 (TREE_TYPE (current_function_decl))))
5604 && (flags & PROP_SCAN_DEAD_CODE)
5605 && (bb->succ == NULL
5606 || (bb->succ->succ_next == NULL
5607 && bb->succ->dest == EXIT_BLOCK_PTR
5608 && ! current_function_calls_eh_return)))
5609 {
5610 rtx insn, set;
5611 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5612 if (GET_CODE (insn) == INSN
5613 && (set = single_set (insn))
5614 && GET_CODE (SET_DEST (set)) == MEM)
5615 {
5616 rtx mem = SET_DEST (set);
5617 rtx canon_mem = canon_rtx (mem);
5618
5619 /* This optimization is performed by faking a store to the
5620 memory at the end of the block. This doesn't work for
5621 unchanging memories because multiple stores to unchanging
5622 memory is illegal and alias analysis doesn't consider it. */
5623 if (RTX_UNCHANGING_P (canon_mem))
5624 continue;
5625
5626 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5627 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5628 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5629 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5630 add_to_mem_set_list (pbi, canon_mem);
5631 }
5632 }
5633
5634 return pbi;
5635 }
5636
5637 /* Release a propagate_block_info struct. */
5638
5639 void
5640 free_propagate_block_info (pbi)
5641 struct propagate_block_info *pbi;
5642 {
5643 free_EXPR_LIST_list (&pbi->mem_set_list);
5644
5645 BITMAP_XFREE (pbi->new_set);
5646
5647 #ifdef HAVE_conditional_execution
5648 splay_tree_delete (pbi->reg_cond_dead);
5649 BITMAP_XFREE (pbi->reg_cond_reg);
5650 #endif
5651
5652 if (pbi->reg_next_use)
5653 free (pbi->reg_next_use);
5654
5655 free (pbi);
5656 }
5657
5658 /* Compute the registers live at the beginning of a basic block BB from
5659 those live at the end.
5660
5661 When called, REG_LIVE contains those live at the end. On return, it
5662 contains those live at the beginning.
5663
5664 LOCAL_SET, if non-null, will be set with all registers killed
5665 unconditionally by this basic block.
5666 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5667 killed conditionally by this basic block. If there is any unconditional
5668 set of a register, then the corresponding bit will be set in LOCAL_SET
5669 and cleared in COND_LOCAL_SET.
5670 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5671 case, the resulting set will be equal to the union of the two sets that
5672 would otherwise be computed.
5673
5674 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5675
5676 int
5677 propagate_block (bb, live, local_set, cond_local_set, flags)
5678 basic_block bb;
5679 regset live;
5680 regset local_set;
5681 regset cond_local_set;
5682 int flags;
5683 {
5684 struct propagate_block_info *pbi;
5685 rtx insn, prev;
5686 int changed;
5687
5688 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5689
5690 if (flags & PROP_REG_INFO)
5691 {
5692 register int i;
5693
5694 /* Process the regs live at the end of the block.
5695 Mark them as not local to any one basic block. */
5696 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5697 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5698 }
5699
5700 /* Scan the block an insn at a time from end to beginning. */
5701
5702 changed = 0;
5703 for (insn = bb->end;; insn = prev)
5704 {
5705 /* If this is a call to `setjmp' et al, warn if any
5706 non-volatile datum is live. */
5707 if ((flags & PROP_REG_INFO)
5708 && GET_CODE (insn) == NOTE
5709 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5710 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5711
5712 prev = propagate_one_insn (pbi, insn);
5713 changed |= NEXT_INSN (prev) != insn;
5714
5715 if (insn == bb->head)
5716 break;
5717 }
5718
5719 free_propagate_block_info (pbi);
5720
5721 return changed;
5722 }
5723 \f
5724 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5725 (SET expressions whose destinations are registers dead after the insn).
5726 NEEDED is the regset that says which regs are alive after the insn.
5727
5728 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5729
5730 If X is the entire body of an insn, NOTES contains the reg notes
5731 pertaining to the insn. */
5732
5733 static int
5734 insn_dead_p (pbi, x, call_ok, notes)
5735 struct propagate_block_info *pbi;
5736 rtx x;
5737 int call_ok;
5738 rtx notes ATTRIBUTE_UNUSED;
5739 {
5740 enum rtx_code code = GET_CODE (x);
5741
5742 #ifdef AUTO_INC_DEC
5743 /* If flow is invoked after reload, we must take existing AUTO_INC
5744 expresions into account. */
5745 if (reload_completed)
5746 {
5747 for (; notes; notes = XEXP (notes, 1))
5748 {
5749 if (REG_NOTE_KIND (notes) == REG_INC)
5750 {
5751 int regno = REGNO (XEXP (notes, 0));
5752
5753 /* Don't delete insns to set global regs. */
5754 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5755 || REGNO_REG_SET_P (pbi->reg_live, regno))
5756 return 0;
5757 }
5758 }
5759 }
5760 #endif
5761
5762 /* If setting something that's a reg or part of one,
5763 see if that register's altered value will be live. */
5764
5765 if (code == SET)
5766 {
5767 rtx r = SET_DEST (x);
5768
5769 #ifdef HAVE_cc0
5770 if (GET_CODE (r) == CC0)
5771 return ! pbi->cc0_live;
5772 #endif
5773
5774 /* A SET that is a subroutine call cannot be dead. */
5775 if (GET_CODE (SET_SRC (x)) == CALL)
5776 {
5777 if (! call_ok)
5778 return 0;
5779 }
5780
5781 /* Don't eliminate loads from volatile memory or volatile asms. */
5782 else if (volatile_refs_p (SET_SRC (x)))
5783 return 0;
5784
5785 if (GET_CODE (r) == MEM)
5786 {
5787 rtx temp, canon_r;
5788
5789 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5790 return 0;
5791
5792 canon_r = canon_rtx (r);
5793
5794 /* Walk the set of memory locations we are currently tracking
5795 and see if one is an identical match to this memory location.
5796 If so, this memory write is dead (remember, we're walking
5797 backwards from the end of the block to the start). Since
5798 rtx_equal_p does not check the alias set or flags, we also
5799 must have the potential for them to conflict (anti_dependence). */
5800 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5801 if (anti_dependence (r, XEXP (temp, 0)))
5802 {
5803 rtx mem = XEXP (temp, 0);
5804
5805 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5806 && (GET_MODE_SIZE (GET_MODE (canon_r))
5807 <= GET_MODE_SIZE (GET_MODE (mem))))
5808 return 1;
5809
5810 #ifdef AUTO_INC_DEC
5811 /* Check if memory reference matches an auto increment. Only
5812 post increment/decrement or modify are valid. */
5813 if (GET_MODE (mem) == GET_MODE (r)
5814 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5815 || GET_CODE (XEXP (mem, 0)) == POST_INC
5816 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5817 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5818 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5819 return 1;
5820 #endif
5821 }
5822 }
5823 else
5824 {
5825 while (GET_CODE (r) == SUBREG
5826 || GET_CODE (r) == STRICT_LOW_PART
5827 || GET_CODE (r) == ZERO_EXTRACT)
5828 r = XEXP (r, 0);
5829
5830 if (GET_CODE (r) == REG)
5831 {
5832 int regno = REGNO (r);
5833
5834 /* Obvious. */
5835 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5836 return 0;
5837
5838 /* If this is a hard register, verify that subsequent
5839 words are not needed. */
5840 if (regno < FIRST_PSEUDO_REGISTER)
5841 {
5842 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5843
5844 while (--n > 0)
5845 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5846 return 0;
5847 }
5848
5849 /* Don't delete insns to set global regs. */
5850 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5851 return 0;
5852
5853 /* Make sure insns to set the stack pointer aren't deleted. */
5854 if (regno == STACK_POINTER_REGNUM)
5855 return 0;
5856
5857 /* ??? These bits might be redundant with the force live bits
5858 in calculate_global_regs_live. We would delete from
5859 sequential sets; whether this actually affects real code
5860 for anything but the stack pointer I don't know. */
5861 /* Make sure insns to set the frame pointer aren't deleted. */
5862 if (regno == FRAME_POINTER_REGNUM
5863 && (! reload_completed || frame_pointer_needed))
5864 return 0;
5865 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5866 if (regno == HARD_FRAME_POINTER_REGNUM
5867 && (! reload_completed || frame_pointer_needed))
5868 return 0;
5869 #endif
5870
5871 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5872 /* Make sure insns to set arg pointer are never deleted
5873 (if the arg pointer isn't fixed, there will be a USE
5874 for it, so we can treat it normally). */
5875 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5876 return 0;
5877 #endif
5878
5879 /* Otherwise, the set is dead. */
5880 return 1;
5881 }
5882 }
5883 }
5884
5885 /* If performing several activities, insn is dead if each activity
5886 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5887 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5888 worth keeping. */
5889 else if (code == PARALLEL)
5890 {
5891 int i = XVECLEN (x, 0);
5892
5893 for (i--; i >= 0; i--)
5894 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5895 && GET_CODE (XVECEXP (x, 0, i)) != USE
5896 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5897 return 0;
5898
5899 return 1;
5900 }
5901
5902 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5903 is not necessarily true for hard registers. */
5904 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5905 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5906 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5907 return 1;
5908
5909 /* We do not check other CLOBBER or USE here. An insn consisting of just
5910 a CLOBBER or just a USE should not be deleted. */
5911 return 0;
5912 }
5913
5914 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5915 return 1 if the entire library call is dead.
5916 This is true if INSN copies a register (hard or pseudo)
5917 and if the hard return reg of the call insn is dead.
5918 (The caller should have tested the destination of the SET inside
5919 INSN already for death.)
5920
5921 If this insn doesn't just copy a register, then we don't
5922 have an ordinary libcall. In that case, cse could not have
5923 managed to substitute the source for the dest later on,
5924 so we can assume the libcall is dead.
5925
5926 PBI is the block info giving pseudoregs live before this insn.
5927 NOTE is the REG_RETVAL note of the insn. */
5928
5929 static int
5930 libcall_dead_p (pbi, note, insn)
5931 struct propagate_block_info *pbi;
5932 rtx note;
5933 rtx insn;
5934 {
5935 rtx x = single_set (insn);
5936
5937 if (x)
5938 {
5939 register rtx r = SET_SRC (x);
5940 if (GET_CODE (r) == REG)
5941 {
5942 rtx call = XEXP (note, 0);
5943 rtx call_pat;
5944 register int i;
5945
5946 /* Find the call insn. */
5947 while (call != insn && GET_CODE (call) != CALL_INSN)
5948 call = NEXT_INSN (call);
5949
5950 /* If there is none, do nothing special,
5951 since ordinary death handling can understand these insns. */
5952 if (call == insn)
5953 return 0;
5954
5955 /* See if the hard reg holding the value is dead.
5956 If this is a PARALLEL, find the call within it. */
5957 call_pat = PATTERN (call);
5958 if (GET_CODE (call_pat) == PARALLEL)
5959 {
5960 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5961 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5962 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5963 break;
5964
5965 /* This may be a library call that is returning a value
5966 via invisible pointer. Do nothing special, since
5967 ordinary death handling can understand these insns. */
5968 if (i < 0)
5969 return 0;
5970
5971 call_pat = XVECEXP (call_pat, 0, i);
5972 }
5973
5974 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5975 }
5976 }
5977 return 1;
5978 }
5979
5980 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5981 live at function entry. Don't count global register variables, variables
5982 in registers that can be used for function arg passing, or variables in
5983 fixed hard registers. */
5984
5985 int
5986 regno_uninitialized (regno)
5987 int regno;
5988 {
5989 if (n_basic_blocks == 0
5990 || (regno < FIRST_PSEUDO_REGISTER
5991 && (global_regs[regno]
5992 || fixed_regs[regno]
5993 || FUNCTION_ARG_REGNO_P (regno))))
5994 return 0;
5995
5996 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5997 }
5998
5999 /* 1 if register REGNO was alive at a place where `setjmp' was called
6000 and was set more than once or is an argument.
6001 Such regs may be clobbered by `longjmp'. */
6002
6003 int
6004 regno_clobbered_at_setjmp (regno)
6005 int regno;
6006 {
6007 if (n_basic_blocks == 0)
6008 return 0;
6009
6010 return ((REG_N_SETS (regno) > 1
6011 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6012 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6013 }
6014 \f
6015 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6016 maximal list size; look for overlaps in mode and select the largest. */
6017 static void
6018 add_to_mem_set_list (pbi, mem)
6019 struct propagate_block_info *pbi;
6020 rtx mem;
6021 {
6022 rtx i;
6023
6024 /* We don't know how large a BLKmode store is, so we must not
6025 take them into consideration. */
6026 if (GET_MODE (mem) == BLKmode)
6027 return;
6028
6029 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6030 {
6031 rtx e = XEXP (i, 0);
6032 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6033 {
6034 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6035 {
6036 #ifdef AUTO_INC_DEC
6037 /* If we must store a copy of the mem, we can just modify
6038 the mode of the stored copy. */
6039 if (pbi->flags & PROP_AUTOINC)
6040 PUT_MODE (e, GET_MODE (mem));
6041 else
6042 #endif
6043 XEXP (i, 0) = mem;
6044 }
6045 return;
6046 }
6047 }
6048
6049 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6050 {
6051 #ifdef AUTO_INC_DEC
6052 /* Store a copy of mem, otherwise the address may be
6053 scrogged by find_auto_inc. */
6054 if (pbi->flags & PROP_AUTOINC)
6055 mem = shallow_copy_rtx (mem);
6056 #endif
6057 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6058 pbi->mem_set_list_len++;
6059 }
6060 }
6061
6062 /* INSN references memory, possibly using autoincrement addressing modes.
6063 Find any entries on the mem_set_list that need to be invalidated due
6064 to an address change. */
6065
6066 static void
6067 invalidate_mems_from_autoinc (pbi, insn)
6068 struct propagate_block_info *pbi;
6069 rtx insn;
6070 {
6071 rtx note = REG_NOTES (insn);
6072 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6073 if (REG_NOTE_KIND (note) == REG_INC)
6074 invalidate_mems_from_set (pbi, XEXP (note, 0));
6075 }
6076
6077 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6078
6079 static void
6080 invalidate_mems_from_set (pbi, exp)
6081 struct propagate_block_info *pbi;
6082 rtx exp;
6083 {
6084 rtx temp = pbi->mem_set_list;
6085 rtx prev = NULL_RTX;
6086 rtx next;
6087
6088 while (temp)
6089 {
6090 next = XEXP (temp, 1);
6091 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6092 {
6093 /* Splice this entry out of the list. */
6094 if (prev)
6095 XEXP (prev, 1) = next;
6096 else
6097 pbi->mem_set_list = next;
6098 free_EXPR_LIST_node (temp);
6099 pbi->mem_set_list_len--;
6100 }
6101 else
6102 prev = temp;
6103 temp = next;
6104 }
6105 }
6106
6107 /* Process the registers that are set within X. Their bits are set to
6108 1 in the regset DEAD, because they are dead prior to this insn.
6109
6110 If INSN is nonzero, it is the insn being processed.
6111
6112 FLAGS is the set of operations to perform. */
6113
6114 static void
6115 mark_set_regs (pbi, x, insn)
6116 struct propagate_block_info *pbi;
6117 rtx x, insn;
6118 {
6119 rtx cond = NULL_RTX;
6120 rtx link;
6121 enum rtx_code code;
6122
6123 if (insn)
6124 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6125 {
6126 if (REG_NOTE_KIND (link) == REG_INC)
6127 mark_set_1 (pbi, SET, XEXP (link, 0),
6128 (GET_CODE (x) == COND_EXEC
6129 ? COND_EXEC_TEST (x) : NULL_RTX),
6130 insn, pbi->flags);
6131 }
6132 retry:
6133 switch (code = GET_CODE (x))
6134 {
6135 case SET:
6136 case CLOBBER:
6137 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6138 return;
6139
6140 case COND_EXEC:
6141 cond = COND_EXEC_TEST (x);
6142 x = COND_EXEC_CODE (x);
6143 goto retry;
6144
6145 case PARALLEL:
6146 {
6147 register int i;
6148 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6149 {
6150 rtx sub = XVECEXP (x, 0, i);
6151 switch (code = GET_CODE (sub))
6152 {
6153 case COND_EXEC:
6154 if (cond != NULL_RTX)
6155 abort ();
6156
6157 cond = COND_EXEC_TEST (sub);
6158 sub = COND_EXEC_CODE (sub);
6159 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6160 break;
6161 /* Fall through. */
6162
6163 case SET:
6164 case CLOBBER:
6165 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6166 break;
6167
6168 default:
6169 break;
6170 }
6171 }
6172 break;
6173 }
6174
6175 default:
6176 break;
6177 }
6178 }
6179
6180 /* Process a single set, which appears in INSN. REG (which may not
6181 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6182 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6183 If the set is conditional (because it appear in a COND_EXEC), COND
6184 will be the condition. */
6185
6186 static void
6187 mark_set_1 (pbi, code, reg, cond, insn, flags)
6188 struct propagate_block_info *pbi;
6189 enum rtx_code code;
6190 rtx reg, cond, insn;
6191 int flags;
6192 {
6193 int regno_first = -1, regno_last = -1;
6194 unsigned long not_dead = 0;
6195 int i;
6196
6197 /* Modifying just one hardware register of a multi-reg value or just a
6198 byte field of a register does not mean the value from before this insn
6199 is now dead. Of course, if it was dead after it's unused now. */
6200
6201 switch (GET_CODE (reg))
6202 {
6203 case PARALLEL:
6204 /* Some targets place small structures in registers for return values of
6205 functions. We have to detect this case specially here to get correct
6206 flow information. */
6207 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6208 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6209 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6210 flags);
6211 return;
6212
6213 case ZERO_EXTRACT:
6214 case SIGN_EXTRACT:
6215 case STRICT_LOW_PART:
6216 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6217 do
6218 reg = XEXP (reg, 0);
6219 while (GET_CODE (reg) == SUBREG
6220 || GET_CODE (reg) == ZERO_EXTRACT
6221 || GET_CODE (reg) == SIGN_EXTRACT
6222 || GET_CODE (reg) == STRICT_LOW_PART);
6223 if (GET_CODE (reg) == MEM)
6224 break;
6225 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6226 /* Fall through. */
6227
6228 case REG:
6229 regno_last = regno_first = REGNO (reg);
6230 if (regno_first < FIRST_PSEUDO_REGISTER)
6231 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6232 break;
6233
6234 case SUBREG:
6235 if (GET_CODE (SUBREG_REG (reg)) == REG)
6236 {
6237 enum machine_mode outer_mode = GET_MODE (reg);
6238 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6239
6240 /* Identify the range of registers affected. This is moderately
6241 tricky for hard registers. See alter_subreg. */
6242
6243 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6244 if (regno_first < FIRST_PSEUDO_REGISTER)
6245 {
6246 regno_first += subreg_regno_offset (regno_first, inner_mode,
6247 SUBREG_BYTE (reg),
6248 outer_mode);
6249 regno_last = (regno_first
6250 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6251
6252 /* Since we've just adjusted the register number ranges, make
6253 sure REG matches. Otherwise some_was_live will be clear
6254 when it shouldn't have been, and we'll create incorrect
6255 REG_UNUSED notes. */
6256 reg = gen_rtx_REG (outer_mode, regno_first);
6257 }
6258 else
6259 {
6260 /* If the number of words in the subreg is less than the number
6261 of words in the full register, we have a well-defined partial
6262 set. Otherwise the high bits are undefined.
6263
6264 This is only really applicable to pseudos, since we just took
6265 care of multi-word hard registers. */
6266 if (((GET_MODE_SIZE (outer_mode)
6267 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6268 < ((GET_MODE_SIZE (inner_mode)
6269 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6270 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6271 regno_first);
6272
6273 reg = SUBREG_REG (reg);
6274 }
6275 }
6276 else
6277 reg = SUBREG_REG (reg);
6278 break;
6279
6280 default:
6281 break;
6282 }
6283
6284 /* If this set is a MEM, then it kills any aliased writes.
6285 If this set is a REG, then it kills any MEMs which use the reg. */
6286 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6287 {
6288 if (GET_CODE (reg) == REG)
6289 invalidate_mems_from_set (pbi, reg);
6290
6291 /* If the memory reference had embedded side effects (autoincrement
6292 address modes. Then we may need to kill some entries on the
6293 memory set list. */
6294 if (insn && GET_CODE (reg) == MEM)
6295 invalidate_mems_from_autoinc (pbi, insn);
6296
6297 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6298 /* ??? With more effort we could track conditional memory life. */
6299 && ! cond
6300 /* There are no REG_INC notes for SP, so we can't assume we'll see
6301 everything that invalidates it. To be safe, don't eliminate any
6302 stores though SP; none of them should be redundant anyway. */
6303 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6304 add_to_mem_set_list (pbi, canon_rtx (reg));
6305 }
6306
6307 if (GET_CODE (reg) == REG
6308 && ! (regno_first == FRAME_POINTER_REGNUM
6309 && (! reload_completed || frame_pointer_needed))
6310 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6311 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6312 && (! reload_completed || frame_pointer_needed))
6313 #endif
6314 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6315 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6316 #endif
6317 )
6318 {
6319 int some_was_live = 0, some_was_dead = 0;
6320
6321 for (i = regno_first; i <= regno_last; ++i)
6322 {
6323 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6324 if (pbi->local_set)
6325 {
6326 /* Order of the set operation matters here since both
6327 sets may be the same. */
6328 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6329 if (cond != NULL_RTX
6330 && ! REGNO_REG_SET_P (pbi->local_set, i))
6331 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6332 else
6333 SET_REGNO_REG_SET (pbi->local_set, i);
6334 }
6335 if (code != CLOBBER)
6336 SET_REGNO_REG_SET (pbi->new_set, i);
6337
6338 some_was_live |= needed_regno;
6339 some_was_dead |= ! needed_regno;
6340 }
6341
6342 #ifdef HAVE_conditional_execution
6343 /* Consider conditional death in deciding that the register needs
6344 a death note. */
6345 if (some_was_live && ! not_dead
6346 /* The stack pointer is never dead. Well, not strictly true,
6347 but it's very difficult to tell from here. Hopefully
6348 combine_stack_adjustments will fix up the most egregious
6349 errors. */
6350 && regno_first != STACK_POINTER_REGNUM)
6351 {
6352 for (i = regno_first; i <= regno_last; ++i)
6353 if (! mark_regno_cond_dead (pbi, i, cond))
6354 not_dead |= ((unsigned long) 1) << (i - regno_first);
6355 }
6356 #endif
6357
6358 /* Additional data to record if this is the final pass. */
6359 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6360 | PROP_DEATH_NOTES | PROP_AUTOINC))
6361 {
6362 register rtx y;
6363 register int blocknum = pbi->bb->index;
6364
6365 y = NULL_RTX;
6366 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6367 {
6368 y = pbi->reg_next_use[regno_first];
6369
6370 /* The next use is no longer next, since a store intervenes. */
6371 for (i = regno_first; i <= regno_last; ++i)
6372 pbi->reg_next_use[i] = 0;
6373 }
6374
6375 if (flags & PROP_REG_INFO)
6376 {
6377 for (i = regno_first; i <= regno_last; ++i)
6378 {
6379 /* Count (weighted) references, stores, etc. This counts a
6380 register twice if it is modified, but that is correct. */
6381 REG_N_SETS (i) += 1;
6382 REG_N_REFS (i) += 1;
6383 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6384
6385 /* The insns where a reg is live are normally counted
6386 elsewhere, but we want the count to include the insn
6387 where the reg is set, and the normal counting mechanism
6388 would not count it. */
6389 REG_LIVE_LENGTH (i) += 1;
6390 }
6391
6392 /* If this is a hard reg, record this function uses the reg. */
6393 if (regno_first < FIRST_PSEUDO_REGISTER)
6394 {
6395 for (i = regno_first; i <= regno_last; i++)
6396 regs_ever_live[i] = 1;
6397 }
6398 else
6399 {
6400 /* Keep track of which basic blocks each reg appears in. */
6401 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6402 REG_BASIC_BLOCK (regno_first) = blocknum;
6403 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6404 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6405 }
6406 }
6407
6408 if (! some_was_dead)
6409 {
6410 if (flags & PROP_LOG_LINKS)
6411 {
6412 /* Make a logical link from the next following insn
6413 that uses this register, back to this insn.
6414 The following insns have already been processed.
6415
6416 We don't build a LOG_LINK for hard registers containing
6417 in ASM_OPERANDs. If these registers get replaced,
6418 we might wind up changing the semantics of the insn,
6419 even if reload can make what appear to be valid
6420 assignments later. */
6421 if (y && (BLOCK_NUM (y) == blocknum)
6422 && (regno_first >= FIRST_PSEUDO_REGISTER
6423 || asm_noperands (PATTERN (y)) < 0))
6424 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6425 }
6426 }
6427 else if (not_dead)
6428 ;
6429 else if (! some_was_live)
6430 {
6431 if (flags & PROP_REG_INFO)
6432 REG_N_DEATHS (regno_first) += 1;
6433
6434 if (flags & PROP_DEATH_NOTES)
6435 {
6436 /* Note that dead stores have already been deleted
6437 when possible. If we get here, we have found a
6438 dead store that cannot be eliminated (because the
6439 same insn does something useful). Indicate this
6440 by marking the reg being set as dying here. */
6441 REG_NOTES (insn)
6442 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6443 }
6444 }
6445 else
6446 {
6447 if (flags & PROP_DEATH_NOTES)
6448 {
6449 /* This is a case where we have a multi-word hard register
6450 and some, but not all, of the words of the register are
6451 needed in subsequent insns. Write REG_UNUSED notes
6452 for those parts that were not needed. This case should
6453 be rare. */
6454
6455 for (i = regno_first; i <= regno_last; ++i)
6456 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6457 REG_NOTES (insn)
6458 = alloc_EXPR_LIST (REG_UNUSED,
6459 gen_rtx_REG (reg_raw_mode[i], i),
6460 REG_NOTES (insn));
6461 }
6462 }
6463 }
6464
6465 /* Mark the register as being dead. */
6466 if (some_was_live
6467 /* The stack pointer is never dead. Well, not strictly true,
6468 but it's very difficult to tell from here. Hopefully
6469 combine_stack_adjustments will fix up the most egregious
6470 errors. */
6471 && regno_first != STACK_POINTER_REGNUM)
6472 {
6473 for (i = regno_first; i <= regno_last; ++i)
6474 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6475 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6476 }
6477 }
6478 else if (GET_CODE (reg) == REG)
6479 {
6480 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6481 pbi->reg_next_use[regno_first] = 0;
6482 }
6483
6484 /* If this is the last pass and this is a SCRATCH, show it will be dying
6485 here and count it. */
6486 else if (GET_CODE (reg) == SCRATCH)
6487 {
6488 if (flags & PROP_DEATH_NOTES)
6489 REG_NOTES (insn)
6490 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6491 }
6492 }
6493 \f
6494 #ifdef HAVE_conditional_execution
6495 /* Mark REGNO conditionally dead.
6496 Return true if the register is now unconditionally dead. */
6497
6498 static int
6499 mark_regno_cond_dead (pbi, regno, cond)
6500 struct propagate_block_info *pbi;
6501 int regno;
6502 rtx cond;
6503 {
6504 /* If this is a store to a predicate register, the value of the
6505 predicate is changing, we don't know that the predicate as seen
6506 before is the same as that seen after. Flush all dependent
6507 conditions from reg_cond_dead. This will make all such
6508 conditionally live registers unconditionally live. */
6509 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6510 flush_reg_cond_reg (pbi, regno);
6511
6512 /* If this is an unconditional store, remove any conditional
6513 life that may have existed. */
6514 if (cond == NULL_RTX)
6515 splay_tree_remove (pbi->reg_cond_dead, regno);
6516 else
6517 {
6518 splay_tree_node node;
6519 struct reg_cond_life_info *rcli;
6520 rtx ncond;
6521
6522 /* Otherwise this is a conditional set. Record that fact.
6523 It may have been conditionally used, or there may be a
6524 subsequent set with a complimentary condition. */
6525
6526 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6527 if (node == NULL)
6528 {
6529 /* The register was unconditionally live previously.
6530 Record the current condition as the condition under
6531 which it is dead. */
6532 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6533 rcli->condition = cond;
6534 rcli->stores = cond;
6535 rcli->orig_condition = const0_rtx;
6536 splay_tree_insert (pbi->reg_cond_dead, regno,
6537 (splay_tree_value) rcli);
6538
6539 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6540
6541 /* Not unconditionaly dead. */
6542 return 0;
6543 }
6544 else
6545 {
6546 /* The register was conditionally live previously.
6547 Add the new condition to the old. */
6548 rcli = (struct reg_cond_life_info *) node->value;
6549 ncond = rcli->condition;
6550 ncond = ior_reg_cond (ncond, cond, 1);
6551 if (rcli->stores == const0_rtx)
6552 rcli->stores = cond;
6553 else if (rcli->stores != const1_rtx)
6554 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6555
6556 /* If the register is now unconditionally dead, remove the entry
6557 in the splay_tree. A register is unconditionally dead if the
6558 dead condition ncond is true. A register is also unconditionally
6559 dead if the sum of all conditional stores is an unconditional
6560 store (stores is true), and the dead condition is identically the
6561 same as the original dead condition initialized at the end of
6562 the block. This is a pointer compare, not an rtx_equal_p
6563 compare. */
6564 if (ncond == const1_rtx
6565 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6566 splay_tree_remove (pbi->reg_cond_dead, regno);
6567 else
6568 {
6569 rcli->condition = ncond;
6570
6571 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6572
6573 /* Not unconditionaly dead. */
6574 return 0;
6575 }
6576 }
6577 }
6578
6579 return 1;
6580 }
6581
6582 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6583
6584 static void
6585 free_reg_cond_life_info (value)
6586 splay_tree_value value;
6587 {
6588 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6589 free (rcli);
6590 }
6591
6592 /* Helper function for flush_reg_cond_reg. */
6593
6594 static int
6595 flush_reg_cond_reg_1 (node, data)
6596 splay_tree_node node;
6597 void *data;
6598 {
6599 struct reg_cond_life_info *rcli;
6600 int *xdata = (int *) data;
6601 unsigned int regno = xdata[0];
6602
6603 /* Don't need to search if last flushed value was farther on in
6604 the in-order traversal. */
6605 if (xdata[1] >= (int) node->key)
6606 return 0;
6607
6608 /* Splice out portions of the expression that refer to regno. */
6609 rcli = (struct reg_cond_life_info *) node->value;
6610 rcli->condition = elim_reg_cond (rcli->condition, regno);
6611 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6612 rcli->stores = elim_reg_cond (rcli->stores, regno);
6613
6614 /* If the entire condition is now false, signal the node to be removed. */
6615 if (rcli->condition == const0_rtx)
6616 {
6617 xdata[1] = node->key;
6618 return -1;
6619 }
6620 else if (rcli->condition == const1_rtx)
6621 abort ();
6622
6623 return 0;
6624 }
6625
6626 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6627
6628 static void
6629 flush_reg_cond_reg (pbi, regno)
6630 struct propagate_block_info *pbi;
6631 int regno;
6632 {
6633 int pair[2];
6634
6635 pair[0] = regno;
6636 pair[1] = -1;
6637 while (splay_tree_foreach (pbi->reg_cond_dead,
6638 flush_reg_cond_reg_1, pair) == -1)
6639 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6640
6641 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6642 }
6643
6644 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6645 For ior/and, the ADD flag determines whether we want to add the new
6646 condition X to the old one unconditionally. If it is zero, we will
6647 only return a new expression if X allows us to simplify part of
6648 OLD, otherwise we return OLD unchanged to the caller.
6649 If ADD is nonzero, we will return a new condition in all cases. The
6650 toplevel caller of one of these functions should always pass 1 for
6651 ADD. */
6652
6653 static rtx
6654 ior_reg_cond (old, x, add)
6655 rtx old, x;
6656 int add;
6657 {
6658 rtx op0, op1;
6659
6660 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6661 {
6662 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6663 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6664 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6665 return const1_rtx;
6666 if (GET_CODE (x) == GET_CODE (old)
6667 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6668 return old;
6669 if (! add)
6670 return old;
6671 return gen_rtx_IOR (0, old, x);
6672 }
6673
6674 switch (GET_CODE (old))
6675 {
6676 case IOR:
6677 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6678 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6679 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6680 {
6681 if (op0 == const0_rtx)
6682 return op1;
6683 if (op1 == const0_rtx)
6684 return op0;
6685 if (op0 == const1_rtx || op1 == const1_rtx)
6686 return const1_rtx;
6687 if (op0 == XEXP (old, 0))
6688 op0 = gen_rtx_IOR (0, op0, x);
6689 else
6690 op1 = gen_rtx_IOR (0, op1, x);
6691 return gen_rtx_IOR (0, op0, op1);
6692 }
6693 if (! add)
6694 return old;
6695 return gen_rtx_IOR (0, old, x);
6696
6697 case AND:
6698 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6699 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6700 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6701 {
6702 if (op0 == const1_rtx)
6703 return op1;
6704 if (op1 == const1_rtx)
6705 return op0;
6706 if (op0 == const0_rtx || op1 == const0_rtx)
6707 return const0_rtx;
6708 if (op0 == XEXP (old, 0))
6709 op0 = gen_rtx_IOR (0, op0, x);
6710 else
6711 op1 = gen_rtx_IOR (0, op1, x);
6712 return gen_rtx_AND (0, op0, op1);
6713 }
6714 if (! add)
6715 return old;
6716 return gen_rtx_IOR (0, old, x);
6717
6718 case NOT:
6719 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6720 if (op0 != XEXP (old, 0))
6721 return not_reg_cond (op0);
6722 if (! add)
6723 return old;
6724 return gen_rtx_IOR (0, old, x);
6725
6726 default:
6727 abort ();
6728 }
6729 }
6730
6731 static rtx
6732 not_reg_cond (x)
6733 rtx x;
6734 {
6735 enum rtx_code x_code;
6736
6737 if (x == const0_rtx)
6738 return const1_rtx;
6739 else if (x == const1_rtx)
6740 return const0_rtx;
6741 x_code = GET_CODE (x);
6742 if (x_code == NOT)
6743 return XEXP (x, 0);
6744 if (GET_RTX_CLASS (x_code) == '<'
6745 && GET_CODE (XEXP (x, 0)) == REG)
6746 {
6747 if (XEXP (x, 1) != const0_rtx)
6748 abort ();
6749
6750 return gen_rtx_fmt_ee (reverse_condition (x_code),
6751 VOIDmode, XEXP (x, 0), const0_rtx);
6752 }
6753 return gen_rtx_NOT (0, x);
6754 }
6755
6756 static rtx
6757 and_reg_cond (old, x, add)
6758 rtx old, x;
6759 int add;
6760 {
6761 rtx op0, op1;
6762
6763 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6764 {
6765 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6766 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6767 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6768 return const0_rtx;
6769 if (GET_CODE (x) == GET_CODE (old)
6770 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6771 return old;
6772 if (! add)
6773 return old;
6774 return gen_rtx_AND (0, old, x);
6775 }
6776
6777 switch (GET_CODE (old))
6778 {
6779 case IOR:
6780 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6781 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6782 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6783 {
6784 if (op0 == const0_rtx)
6785 return op1;
6786 if (op1 == const0_rtx)
6787 return op0;
6788 if (op0 == const1_rtx || op1 == const1_rtx)
6789 return const1_rtx;
6790 if (op0 == XEXP (old, 0))
6791 op0 = gen_rtx_AND (0, op0, x);
6792 else
6793 op1 = gen_rtx_AND (0, op1, x);
6794 return gen_rtx_IOR (0, op0, op1);
6795 }
6796 if (! add)
6797 return old;
6798 return gen_rtx_AND (0, old, x);
6799
6800 case AND:
6801 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6802 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6803 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6804 {
6805 if (op0 == const1_rtx)
6806 return op1;
6807 if (op1 == const1_rtx)
6808 return op0;
6809 if (op0 == const0_rtx || op1 == const0_rtx)
6810 return const0_rtx;
6811 if (op0 == XEXP (old, 0))
6812 op0 = gen_rtx_AND (0, op0, x);
6813 else
6814 op1 = gen_rtx_AND (0, op1, x);
6815 return gen_rtx_AND (0, op0, op1);
6816 }
6817 if (! add)
6818 return old;
6819
6820 /* If X is identical to one of the existing terms of the AND,
6821 then just return what we already have. */
6822 /* ??? There really should be some sort of recursive check here in
6823 case there are nested ANDs. */
6824 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6825 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6826 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6827 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6828 return old;
6829
6830 return gen_rtx_AND (0, old, x);
6831
6832 case NOT:
6833 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6834 if (op0 != XEXP (old, 0))
6835 return not_reg_cond (op0);
6836 if (! add)
6837 return old;
6838 return gen_rtx_AND (0, old, x);
6839
6840 default:
6841 abort ();
6842 }
6843 }
6844
6845 /* Given a condition X, remove references to reg REGNO and return the
6846 new condition. The removal will be done so that all conditions
6847 involving REGNO are considered to evaluate to false. This function
6848 is used when the value of REGNO changes. */
6849
6850 static rtx
6851 elim_reg_cond (x, regno)
6852 rtx x;
6853 unsigned int regno;
6854 {
6855 rtx op0, op1;
6856
6857 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6858 {
6859 if (REGNO (XEXP (x, 0)) == regno)
6860 return const0_rtx;
6861 return x;
6862 }
6863
6864 switch (GET_CODE (x))
6865 {
6866 case AND:
6867 op0 = elim_reg_cond (XEXP (x, 0), regno);
6868 op1 = elim_reg_cond (XEXP (x, 1), regno);
6869 if (op0 == const0_rtx || op1 == const0_rtx)
6870 return const0_rtx;
6871 if (op0 == const1_rtx)
6872 return op1;
6873 if (op1 == const1_rtx)
6874 return op0;
6875 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6876 return x;
6877 return gen_rtx_AND (0, op0, op1);
6878
6879 case IOR:
6880 op0 = elim_reg_cond (XEXP (x, 0), regno);
6881 op1 = elim_reg_cond (XEXP (x, 1), regno);
6882 if (op0 == const1_rtx || op1 == const1_rtx)
6883 return const1_rtx;
6884 if (op0 == const0_rtx)
6885 return op1;
6886 if (op1 == const0_rtx)
6887 return op0;
6888 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6889 return x;
6890 return gen_rtx_IOR (0, op0, op1);
6891
6892 case NOT:
6893 op0 = elim_reg_cond (XEXP (x, 0), regno);
6894 if (op0 == const0_rtx)
6895 return const1_rtx;
6896 if (op0 == const1_rtx)
6897 return const0_rtx;
6898 if (op0 != XEXP (x, 0))
6899 return not_reg_cond (op0);
6900 return x;
6901
6902 default:
6903 abort ();
6904 }
6905 }
6906 #endif /* HAVE_conditional_execution */
6907 \f
6908 #ifdef AUTO_INC_DEC
6909
6910 /* Try to substitute the auto-inc expression INC as the address inside
6911 MEM which occurs in INSN. Currently, the address of MEM is an expression
6912 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6913 that has a single set whose source is a PLUS of INCR_REG and something
6914 else. */
6915
6916 static void
6917 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6918 struct propagate_block_info *pbi;
6919 rtx inc, insn, mem, incr, incr_reg;
6920 {
6921 int regno = REGNO (incr_reg);
6922 rtx set = single_set (incr);
6923 rtx q = SET_DEST (set);
6924 rtx y = SET_SRC (set);
6925 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6926
6927 /* Make sure this reg appears only once in this insn. */
6928 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6929 return;
6930
6931 if (dead_or_set_p (incr, incr_reg)
6932 /* Mustn't autoinc an eliminable register. */
6933 && (regno >= FIRST_PSEUDO_REGISTER
6934 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6935 {
6936 /* This is the simple case. Try to make the auto-inc. If
6937 we can't, we are done. Otherwise, we will do any
6938 needed updates below. */
6939 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6940 return;
6941 }
6942 else if (GET_CODE (q) == REG
6943 /* PREV_INSN used here to check the semi-open interval
6944 [insn,incr). */
6945 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6946 /* We must also check for sets of q as q may be
6947 a call clobbered hard register and there may
6948 be a call between PREV_INSN (insn) and incr. */
6949 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6950 {
6951 /* We have *p followed sometime later by q = p+size.
6952 Both p and q must be live afterward,
6953 and q is not used between INSN and its assignment.
6954 Change it to q = p, ...*q..., q = q+size.
6955 Then fall into the usual case. */
6956 rtx insns, temp;
6957
6958 start_sequence ();
6959 emit_move_insn (q, incr_reg);
6960 insns = get_insns ();
6961 end_sequence ();
6962
6963 if (basic_block_for_insn)
6964 for (temp = insns; temp; temp = NEXT_INSN (temp))
6965 set_block_for_insn (temp, pbi->bb);
6966
6967 /* If we can't make the auto-inc, or can't make the
6968 replacement into Y, exit. There's no point in making
6969 the change below if we can't do the auto-inc and doing
6970 so is not correct in the pre-inc case. */
6971
6972 XEXP (inc, 0) = q;
6973 validate_change (insn, &XEXP (mem, 0), inc, 1);
6974 validate_change (incr, &XEXP (y, opnum), q, 1);
6975 if (! apply_change_group ())
6976 return;
6977
6978 /* We now know we'll be doing this change, so emit the
6979 new insn(s) and do the updates. */
6980 emit_insns_before (insns, insn);
6981
6982 if (pbi->bb->head == insn)
6983 pbi->bb->head = insns;
6984
6985 /* INCR will become a NOTE and INSN won't contain a
6986 use of INCR_REG. If a use of INCR_REG was just placed in
6987 the insn before INSN, make that the next use.
6988 Otherwise, invalidate it. */
6989 if (GET_CODE (PREV_INSN (insn)) == INSN
6990 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6991 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6992 pbi->reg_next_use[regno] = PREV_INSN (insn);
6993 else
6994 pbi->reg_next_use[regno] = 0;
6995
6996 incr_reg = q;
6997 regno = REGNO (q);
6998
6999 /* REGNO is now used in INCR which is below INSN, but
7000 it previously wasn't live here. If we don't mark
7001 it as live, we'll put a REG_DEAD note for it
7002 on this insn, which is incorrect. */
7003 SET_REGNO_REG_SET (pbi->reg_live, regno);
7004
7005 /* If there are any calls between INSN and INCR, show
7006 that REGNO now crosses them. */
7007 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7008 if (GET_CODE (temp) == CALL_INSN)
7009 REG_N_CALLS_CROSSED (regno)++;
7010 }
7011 else
7012 return;
7013
7014 /* If we haven't returned, it means we were able to make the
7015 auto-inc, so update the status. First, record that this insn
7016 has an implicit side effect. */
7017
7018 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7019
7020 /* Modify the old increment-insn to simply copy
7021 the already-incremented value of our register. */
7022 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7023 abort ();
7024
7025 /* If that makes it a no-op (copying the register into itself) delete
7026 it so it won't appear to be a "use" and a "set" of this
7027 register. */
7028 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7029 {
7030 /* If the original source was dead, it's dead now. */
7031 rtx note;
7032
7033 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7034 {
7035 remove_note (incr, note);
7036 if (XEXP (note, 0) != incr_reg)
7037 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7038 }
7039
7040 PUT_CODE (incr, NOTE);
7041 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7042 NOTE_SOURCE_FILE (incr) = 0;
7043 }
7044
7045 if (regno >= FIRST_PSEUDO_REGISTER)
7046 {
7047 /* Count an extra reference to the reg. When a reg is
7048 incremented, spilling it is worse, so we want to make
7049 that less likely. */
7050 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7051
7052 /* Count the increment as a setting of the register,
7053 even though it isn't a SET in rtl. */
7054 REG_N_SETS (regno)++;
7055 }
7056 }
7057
7058 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7059 reference. */
7060
7061 static void
7062 find_auto_inc (pbi, x, insn)
7063 struct propagate_block_info *pbi;
7064 rtx x;
7065 rtx insn;
7066 {
7067 rtx addr = XEXP (x, 0);
7068 HOST_WIDE_INT offset = 0;
7069 rtx set, y, incr, inc_val;
7070 int regno;
7071 int size = GET_MODE_SIZE (GET_MODE (x));
7072
7073 if (GET_CODE (insn) == JUMP_INSN)
7074 return;
7075
7076 /* Here we detect use of an index register which might be good for
7077 postincrement, postdecrement, preincrement, or predecrement. */
7078
7079 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7080 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7081
7082 if (GET_CODE (addr) != REG)
7083 return;
7084
7085 regno = REGNO (addr);
7086
7087 /* Is the next use an increment that might make auto-increment? */
7088 incr = pbi->reg_next_use[regno];
7089 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7090 return;
7091 set = single_set (incr);
7092 if (set == 0 || GET_CODE (set) != SET)
7093 return;
7094 y = SET_SRC (set);
7095
7096 if (GET_CODE (y) != PLUS)
7097 return;
7098
7099 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7100 inc_val = XEXP (y, 1);
7101 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7102 inc_val = XEXP (y, 0);
7103 else
7104 return;
7105
7106 if (GET_CODE (inc_val) == CONST_INT)
7107 {
7108 if (HAVE_POST_INCREMENT
7109 && (INTVAL (inc_val) == size && offset == 0))
7110 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7111 incr, addr);
7112 else if (HAVE_POST_DECREMENT
7113 && (INTVAL (inc_val) == -size && offset == 0))
7114 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7115 incr, addr);
7116 else if (HAVE_PRE_INCREMENT
7117 && (INTVAL (inc_val) == size && offset == size))
7118 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7119 incr, addr);
7120 else if (HAVE_PRE_DECREMENT
7121 && (INTVAL (inc_val) == -size && offset == -size))
7122 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7123 incr, addr);
7124 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7125 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7126 gen_rtx_PLUS (Pmode,
7127 addr,
7128 inc_val)),
7129 insn, x, incr, addr);
7130 }
7131 else if (GET_CODE (inc_val) == REG
7132 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7133 NEXT_INSN (incr)))
7134
7135 {
7136 if (HAVE_POST_MODIFY_REG && offset == 0)
7137 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7138 gen_rtx_PLUS (Pmode,
7139 addr,
7140 inc_val)),
7141 insn, x, incr, addr);
7142 }
7143 }
7144
7145 #endif /* AUTO_INC_DEC */
7146 \f
7147 static void
7148 mark_used_reg (pbi, reg, cond, insn)
7149 struct propagate_block_info *pbi;
7150 rtx reg;
7151 rtx cond ATTRIBUTE_UNUSED;
7152 rtx insn;
7153 {
7154 unsigned int regno_first, regno_last, i;
7155 int some_was_live, some_was_dead, some_not_set;
7156
7157 regno_last = regno_first = REGNO (reg);
7158 if (regno_first < FIRST_PSEUDO_REGISTER)
7159 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7160
7161 /* Find out if any of this register is live after this instruction. */
7162 some_was_live = some_was_dead = 0;
7163 for (i = regno_first; i <= regno_last; ++i)
7164 {
7165 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7166 some_was_live |= needed_regno;
7167 some_was_dead |= ! needed_regno;
7168 }
7169
7170 /* Find out if any of the register was set this insn. */
7171 some_not_set = 0;
7172 for (i = regno_first; i <= regno_last; ++i)
7173 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7174
7175 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7176 {
7177 /* Record where each reg is used, so when the reg is set we know
7178 the next insn that uses it. */
7179 pbi->reg_next_use[regno_first] = insn;
7180 }
7181
7182 if (pbi->flags & PROP_REG_INFO)
7183 {
7184 if (regno_first < FIRST_PSEUDO_REGISTER)
7185 {
7186 /* If this is a register we are going to try to eliminate,
7187 don't mark it live here. If we are successful in
7188 eliminating it, it need not be live unless it is used for
7189 pseudos, in which case it will have been set live when it
7190 was allocated to the pseudos. If the register will not
7191 be eliminated, reload will set it live at that point.
7192
7193 Otherwise, record that this function uses this register. */
7194 /* ??? The PPC backend tries to "eliminate" on the pic
7195 register to itself. This should be fixed. In the mean
7196 time, hack around it. */
7197
7198 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7199 && (regno_first == FRAME_POINTER_REGNUM
7200 || regno_first == ARG_POINTER_REGNUM)))
7201 for (i = regno_first; i <= regno_last; ++i)
7202 regs_ever_live[i] = 1;
7203 }
7204 else
7205 {
7206 /* Keep track of which basic block each reg appears in. */
7207
7208 register int blocknum = pbi->bb->index;
7209 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7210 REG_BASIC_BLOCK (regno_first) = blocknum;
7211 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7212 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7213
7214 /* Count (weighted) number of uses of each reg. */
7215 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7216 REG_N_REFS (regno_first)++;
7217 }
7218 }
7219
7220 /* Record and count the insns in which a reg dies. If it is used in
7221 this insn and was dead below the insn then it dies in this insn.
7222 If it was set in this insn, we do not make a REG_DEAD note;
7223 likewise if we already made such a note. */
7224 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7225 && some_was_dead
7226 && some_not_set)
7227 {
7228 /* Check for the case where the register dying partially
7229 overlaps the register set by this insn. */
7230 if (regno_first != regno_last)
7231 for (i = regno_first; i <= regno_last; ++i)
7232 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7233
7234 /* If none of the words in X is needed, make a REG_DEAD note.
7235 Otherwise, we must make partial REG_DEAD notes. */
7236 if (! some_was_live)
7237 {
7238 if ((pbi->flags & PROP_DEATH_NOTES)
7239 && ! find_regno_note (insn, REG_DEAD, regno_first))
7240 REG_NOTES (insn)
7241 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7242
7243 if (pbi->flags & PROP_REG_INFO)
7244 REG_N_DEATHS (regno_first)++;
7245 }
7246 else
7247 {
7248 /* Don't make a REG_DEAD note for a part of a register
7249 that is set in the insn. */
7250 for (i = regno_first; i <= regno_last; ++i)
7251 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7252 && ! dead_or_set_regno_p (insn, i))
7253 REG_NOTES (insn)
7254 = alloc_EXPR_LIST (REG_DEAD,
7255 gen_rtx_REG (reg_raw_mode[i], i),
7256 REG_NOTES (insn));
7257 }
7258 }
7259
7260 /* Mark the register as being live. */
7261 for (i = regno_first; i <= regno_last; ++i)
7262 {
7263 SET_REGNO_REG_SET (pbi->reg_live, i);
7264
7265 #ifdef HAVE_conditional_execution
7266 /* If this is a conditional use, record that fact. If it is later
7267 conditionally set, we'll know to kill the register. */
7268 if (cond != NULL_RTX)
7269 {
7270 splay_tree_node node;
7271 struct reg_cond_life_info *rcli;
7272 rtx ncond;
7273
7274 if (some_was_live)
7275 {
7276 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7277 if (node == NULL)
7278 {
7279 /* The register was unconditionally live previously.
7280 No need to do anything. */
7281 }
7282 else
7283 {
7284 /* The register was conditionally live previously.
7285 Subtract the new life cond from the old death cond. */
7286 rcli = (struct reg_cond_life_info *) node->value;
7287 ncond = rcli->condition;
7288 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7289
7290 /* If the register is now unconditionally live,
7291 remove the entry in the splay_tree. */
7292 if (ncond == const0_rtx)
7293 splay_tree_remove (pbi->reg_cond_dead, i);
7294 else
7295 {
7296 rcli->condition = ncond;
7297 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7298 REGNO (XEXP (cond, 0)));
7299 }
7300 }
7301 }
7302 else
7303 {
7304 /* The register was not previously live at all. Record
7305 the condition under which it is still dead. */
7306 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7307 rcli->condition = not_reg_cond (cond);
7308 rcli->stores = const0_rtx;
7309 rcli->orig_condition = const0_rtx;
7310 splay_tree_insert (pbi->reg_cond_dead, i,
7311 (splay_tree_value) rcli);
7312
7313 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7314 }
7315 }
7316 else if (some_was_live)
7317 {
7318 /* The register may have been conditionally live previously, but
7319 is now unconditionally live. Remove it from the conditionally
7320 dead list, so that a conditional set won't cause us to think
7321 it dead. */
7322 splay_tree_remove (pbi->reg_cond_dead, i);
7323 }
7324 #endif
7325 }
7326 }
7327
7328 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7329 This is done assuming the registers needed from X are those that
7330 have 1-bits in PBI->REG_LIVE.
7331
7332 INSN is the containing instruction. If INSN is dead, this function
7333 is not called. */
7334
7335 static void
7336 mark_used_regs (pbi, x, cond, insn)
7337 struct propagate_block_info *pbi;
7338 rtx x, cond, insn;
7339 {
7340 register RTX_CODE code;
7341 register int regno;
7342 int flags = pbi->flags;
7343
7344 retry:
7345 code = GET_CODE (x);
7346 switch (code)
7347 {
7348 case LABEL_REF:
7349 case SYMBOL_REF:
7350 case CONST_INT:
7351 case CONST:
7352 case CONST_DOUBLE:
7353 case PC:
7354 case ADDR_VEC:
7355 case ADDR_DIFF_VEC:
7356 return;
7357
7358 #ifdef HAVE_cc0
7359 case CC0:
7360 pbi->cc0_live = 1;
7361 return;
7362 #endif
7363
7364 case CLOBBER:
7365 /* If we are clobbering a MEM, mark any registers inside the address
7366 as being used. */
7367 if (GET_CODE (XEXP (x, 0)) == MEM)
7368 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7369 return;
7370
7371 case MEM:
7372 /* Don't bother watching stores to mems if this is not the
7373 final pass. We'll not be deleting dead stores this round. */
7374 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7375 {
7376 /* Invalidate the data for the last MEM stored, but only if MEM is
7377 something that can be stored into. */
7378 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7379 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7380 /* Needn't clear the memory set list. */
7381 ;
7382 else
7383 {
7384 rtx temp = pbi->mem_set_list;
7385 rtx prev = NULL_RTX;
7386 rtx next;
7387
7388 while (temp)
7389 {
7390 next = XEXP (temp, 1);
7391 if (anti_dependence (XEXP (temp, 0), x))
7392 {
7393 /* Splice temp out of the list. */
7394 if (prev)
7395 XEXP (prev, 1) = next;
7396 else
7397 pbi->mem_set_list = next;
7398 free_EXPR_LIST_node (temp);
7399 pbi->mem_set_list_len--;
7400 }
7401 else
7402 prev = temp;
7403 temp = next;
7404 }
7405 }
7406
7407 /* If the memory reference had embedded side effects (autoincrement
7408 address modes. Then we may need to kill some entries on the
7409 memory set list. */
7410 if (insn)
7411 invalidate_mems_from_autoinc (pbi, insn);
7412 }
7413
7414 #ifdef AUTO_INC_DEC
7415 if (flags & PROP_AUTOINC)
7416 find_auto_inc (pbi, x, insn);
7417 #endif
7418 break;
7419
7420 case SUBREG:
7421 #ifdef CLASS_CANNOT_CHANGE_MODE
7422 if (GET_CODE (SUBREG_REG (x)) == REG
7423 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7424 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7425 GET_MODE (SUBREG_REG (x))))
7426 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7427 #endif
7428
7429 /* While we're here, optimize this case. */
7430 x = SUBREG_REG (x);
7431 if (GET_CODE (x) != REG)
7432 goto retry;
7433 /* Fall through. */
7434
7435 case REG:
7436 /* See a register other than being set => mark it as needed. */
7437 mark_used_reg (pbi, x, cond, insn);
7438 return;
7439
7440 case SET:
7441 {
7442 register rtx testreg = SET_DEST (x);
7443 int mark_dest = 0;
7444
7445 /* If storing into MEM, don't show it as being used. But do
7446 show the address as being used. */
7447 if (GET_CODE (testreg) == MEM)
7448 {
7449 #ifdef AUTO_INC_DEC
7450 if (flags & PROP_AUTOINC)
7451 find_auto_inc (pbi, testreg, insn);
7452 #endif
7453 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7454 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7455 return;
7456 }
7457
7458 /* Storing in STRICT_LOW_PART is like storing in a reg
7459 in that this SET might be dead, so ignore it in TESTREG.
7460 but in some other ways it is like using the reg.
7461
7462 Storing in a SUBREG or a bit field is like storing the entire
7463 register in that if the register's value is not used
7464 then this SET is not needed. */
7465 while (GET_CODE (testreg) == STRICT_LOW_PART
7466 || GET_CODE (testreg) == ZERO_EXTRACT
7467 || GET_CODE (testreg) == SIGN_EXTRACT
7468 || GET_CODE (testreg) == SUBREG)
7469 {
7470 #ifdef CLASS_CANNOT_CHANGE_MODE
7471 if (GET_CODE (testreg) == SUBREG
7472 && GET_CODE (SUBREG_REG (testreg)) == REG
7473 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7474 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7475 GET_MODE (testreg)))
7476 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7477 #endif
7478
7479 /* Modifying a single register in an alternate mode
7480 does not use any of the old value. But these other
7481 ways of storing in a register do use the old value. */
7482 if (GET_CODE (testreg) == SUBREG
7483 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7484 ;
7485 else
7486 mark_dest = 1;
7487
7488 testreg = XEXP (testreg, 0);
7489 }
7490
7491 /* If this is a store into a register or group of registers,
7492 recursively scan the value being stored. */
7493
7494 if ((GET_CODE (testreg) == PARALLEL
7495 && GET_MODE (testreg) == BLKmode)
7496 || (GET_CODE (testreg) == REG
7497 && (regno = REGNO (testreg),
7498 ! (regno == FRAME_POINTER_REGNUM
7499 && (! reload_completed || frame_pointer_needed)))
7500 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7501 && ! (regno == HARD_FRAME_POINTER_REGNUM
7502 && (! reload_completed || frame_pointer_needed))
7503 #endif
7504 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7505 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7506 #endif
7507 ))
7508 {
7509 if (mark_dest)
7510 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7511 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7512 return;
7513 }
7514 }
7515 break;
7516
7517 case ASM_OPERANDS:
7518 case UNSPEC_VOLATILE:
7519 case TRAP_IF:
7520 case ASM_INPUT:
7521 {
7522 /* Traditional and volatile asm instructions must be considered to use
7523 and clobber all hard registers, all pseudo-registers and all of
7524 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7525
7526 Consider for instance a volatile asm that changes the fpu rounding
7527 mode. An insn should not be moved across this even if it only uses
7528 pseudo-regs because it might give an incorrectly rounded result.
7529
7530 ?!? Unfortunately, marking all hard registers as live causes massive
7531 problems for the register allocator and marking all pseudos as live
7532 creates mountains of uninitialized variable warnings.
7533
7534 So for now, just clear the memory set list and mark any regs
7535 we can find in ASM_OPERANDS as used. */
7536 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7537 {
7538 free_EXPR_LIST_list (&pbi->mem_set_list);
7539 pbi->mem_set_list_len = 0;
7540 }
7541
7542 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7543 We can not just fall through here since then we would be confused
7544 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7545 traditional asms unlike their normal usage. */
7546 if (code == ASM_OPERANDS)
7547 {
7548 int j;
7549
7550 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7551 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7552 }
7553 break;
7554 }
7555
7556 case COND_EXEC:
7557 if (cond != NULL_RTX)
7558 abort ();
7559
7560 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7561
7562 cond = COND_EXEC_TEST (x);
7563 x = COND_EXEC_CODE (x);
7564 goto retry;
7565
7566 case PHI:
7567 /* We _do_not_ want to scan operands of phi nodes. Operands of
7568 a phi function are evaluated only when control reaches this
7569 block along a particular edge. Therefore, regs that appear
7570 as arguments to phi should not be added to the global live at
7571 start. */
7572 return;
7573
7574 default:
7575 break;
7576 }
7577
7578 /* Recursively scan the operands of this expression. */
7579
7580 {
7581 register const char *fmt = GET_RTX_FORMAT (code);
7582 register int i;
7583
7584 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7585 {
7586 if (fmt[i] == 'e')
7587 {
7588 /* Tail recursive case: save a function call level. */
7589 if (i == 0)
7590 {
7591 x = XEXP (x, 0);
7592 goto retry;
7593 }
7594 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7595 }
7596 else if (fmt[i] == 'E')
7597 {
7598 register int j;
7599 for (j = 0; j < XVECLEN (x, i); j++)
7600 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7601 }
7602 }
7603 }
7604 }
7605 \f
7606 #ifdef AUTO_INC_DEC
7607
7608 static int
7609 try_pre_increment_1 (pbi, insn)
7610 struct propagate_block_info *pbi;
7611 rtx insn;
7612 {
7613 /* Find the next use of this reg. If in same basic block,
7614 make it do pre-increment or pre-decrement if appropriate. */
7615 rtx x = single_set (insn);
7616 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7617 * INTVAL (XEXP (SET_SRC (x), 1)));
7618 int regno = REGNO (SET_DEST (x));
7619 rtx y = pbi->reg_next_use[regno];
7620 if (y != 0
7621 && SET_DEST (x) != stack_pointer_rtx
7622 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7623 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7624 mode would be better. */
7625 && ! dead_or_set_p (y, SET_DEST (x))
7626 && try_pre_increment (y, SET_DEST (x), amount))
7627 {
7628 /* We have found a suitable auto-increment and already changed
7629 insn Y to do it. So flush this increment instruction. */
7630 propagate_block_delete_insn (pbi->bb, insn);
7631
7632 /* Count a reference to this reg for the increment insn we are
7633 deleting. When a reg is incremented, spilling it is worse,
7634 so we want to make that less likely. */
7635 if (regno >= FIRST_PSEUDO_REGISTER)
7636 {
7637 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7638 REG_N_SETS (regno)++;
7639 }
7640
7641 /* Flush any remembered memories depending on the value of
7642 the incremented register. */
7643 invalidate_mems_from_set (pbi, SET_DEST (x));
7644
7645 return 1;
7646 }
7647 return 0;
7648 }
7649
7650 /* Try to change INSN so that it does pre-increment or pre-decrement
7651 addressing on register REG in order to add AMOUNT to REG.
7652 AMOUNT is negative for pre-decrement.
7653 Returns 1 if the change could be made.
7654 This checks all about the validity of the result of modifying INSN. */
7655
7656 static int
7657 try_pre_increment (insn, reg, amount)
7658 rtx insn, reg;
7659 HOST_WIDE_INT amount;
7660 {
7661 register rtx use;
7662
7663 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7664 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7665 int pre_ok = 0;
7666 /* Nonzero if we can try to make a post-increment or post-decrement.
7667 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7668 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7669 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7670 int post_ok = 0;
7671
7672 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7673 int do_post = 0;
7674
7675 /* From the sign of increment, see which possibilities are conceivable
7676 on this target machine. */
7677 if (HAVE_PRE_INCREMENT && amount > 0)
7678 pre_ok = 1;
7679 if (HAVE_POST_INCREMENT && amount > 0)
7680 post_ok = 1;
7681
7682 if (HAVE_PRE_DECREMENT && amount < 0)
7683 pre_ok = 1;
7684 if (HAVE_POST_DECREMENT && amount < 0)
7685 post_ok = 1;
7686
7687 if (! (pre_ok || post_ok))
7688 return 0;
7689
7690 /* It is not safe to add a side effect to a jump insn
7691 because if the incremented register is spilled and must be reloaded
7692 there would be no way to store the incremented value back in memory. */
7693
7694 if (GET_CODE (insn) == JUMP_INSN)
7695 return 0;
7696
7697 use = 0;
7698 if (pre_ok)
7699 use = find_use_as_address (PATTERN (insn), reg, 0);
7700 if (post_ok && (use == 0 || use == (rtx) 1))
7701 {
7702 use = find_use_as_address (PATTERN (insn), reg, -amount);
7703 do_post = 1;
7704 }
7705
7706 if (use == 0 || use == (rtx) 1)
7707 return 0;
7708
7709 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7710 return 0;
7711
7712 /* See if this combination of instruction and addressing mode exists. */
7713 if (! validate_change (insn, &XEXP (use, 0),
7714 gen_rtx_fmt_e (amount > 0
7715 ? (do_post ? POST_INC : PRE_INC)
7716 : (do_post ? POST_DEC : PRE_DEC),
7717 Pmode, reg), 0))
7718 return 0;
7719
7720 /* Record that this insn now has an implicit side effect on X. */
7721 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7722 return 1;
7723 }
7724
7725 #endif /* AUTO_INC_DEC */
7726 \f
7727 /* Find the place in the rtx X where REG is used as a memory address.
7728 Return the MEM rtx that so uses it.
7729 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7730 (plus REG (const_int PLUSCONST)).
7731
7732 If such an address does not appear, return 0.
7733 If REG appears more than once, or is used other than in such an address,
7734 return (rtx)1. */
7735
7736 rtx
7737 find_use_as_address (x, reg, plusconst)
7738 register rtx x;
7739 rtx reg;
7740 HOST_WIDE_INT plusconst;
7741 {
7742 enum rtx_code code = GET_CODE (x);
7743 const char *fmt = GET_RTX_FORMAT (code);
7744 register int i;
7745 register rtx value = 0;
7746 register rtx tem;
7747
7748 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7749 return x;
7750
7751 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7752 && XEXP (XEXP (x, 0), 0) == reg
7753 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7754 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7755 return x;
7756
7757 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7758 {
7759 /* If REG occurs inside a MEM used in a bit-field reference,
7760 that is unacceptable. */
7761 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7762 return (rtx) (HOST_WIDE_INT) 1;
7763 }
7764
7765 if (x == reg)
7766 return (rtx) (HOST_WIDE_INT) 1;
7767
7768 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7769 {
7770 if (fmt[i] == 'e')
7771 {
7772 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7773 if (value == 0)
7774 value = tem;
7775 else if (tem != 0)
7776 return (rtx) (HOST_WIDE_INT) 1;
7777 }
7778 else if (fmt[i] == 'E')
7779 {
7780 register int j;
7781 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7782 {
7783 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7784 if (value == 0)
7785 value = tem;
7786 else if (tem != 0)
7787 return (rtx) (HOST_WIDE_INT) 1;
7788 }
7789 }
7790 }
7791
7792 return value;
7793 }
7794 \f
7795 /* Write information about registers and basic blocks into FILE.
7796 This is part of making a debugging dump. */
7797
7798 void
7799 dump_regset (r, outf)
7800 regset r;
7801 FILE *outf;
7802 {
7803 int i;
7804 if (r == NULL)
7805 {
7806 fputs (" (nil)", outf);
7807 return;
7808 }
7809
7810 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7811 {
7812 fprintf (outf, " %d", i);
7813 if (i < FIRST_PSEUDO_REGISTER)
7814 fprintf (outf, " [%s]",
7815 reg_names[i]);
7816 });
7817 }
7818
7819 /* Print a human-reaable representation of R on the standard error
7820 stream. This function is designed to be used from within the
7821 debugger. */
7822
7823 void
7824 debug_regset (r)
7825 regset r;
7826 {
7827 dump_regset (r, stderr);
7828 putc ('\n', stderr);
7829 }
7830
7831 void
7832 dump_flow_info (file)
7833 FILE *file;
7834 {
7835 register int i;
7836 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7837
7838 fprintf (file, "%d registers.\n", max_regno);
7839 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7840 if (REG_N_REFS (i))
7841 {
7842 enum reg_class class, altclass;
7843 fprintf (file, "\nRegister %d used %d times across %d insns",
7844 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7845 if (REG_BASIC_BLOCK (i) >= 0)
7846 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7847 if (REG_N_SETS (i))
7848 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7849 (REG_N_SETS (i) == 1) ? "" : "s");
7850 if (REG_USERVAR_P (regno_reg_rtx[i]))
7851 fprintf (file, "; user var");
7852 if (REG_N_DEATHS (i) != 1)
7853 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7854 if (REG_N_CALLS_CROSSED (i) == 1)
7855 fprintf (file, "; crosses 1 call");
7856 else if (REG_N_CALLS_CROSSED (i))
7857 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7858 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7859 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7860 class = reg_preferred_class (i);
7861 altclass = reg_alternate_class (i);
7862 if (class != GENERAL_REGS || altclass != ALL_REGS)
7863 {
7864 if (altclass == ALL_REGS || class == ALL_REGS)
7865 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7866 else if (altclass == NO_REGS)
7867 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7868 else
7869 fprintf (file, "; pref %s, else %s",
7870 reg_class_names[(int) class],
7871 reg_class_names[(int) altclass]);
7872 }
7873 if (REG_POINTER (regno_reg_rtx[i]))
7874 fprintf (file, "; pointer");
7875 fprintf (file, ".\n");
7876 }
7877
7878 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7879 for (i = 0; i < n_basic_blocks; i++)
7880 {
7881 register basic_block bb = BASIC_BLOCK (i);
7882 register edge e;
7883
7884 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7885 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7886 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7887 fprintf (file, ", freq %i.\n", bb->frequency);
7888
7889 fprintf (file, "Predecessors: ");
7890 for (e = bb->pred; e; e = e->pred_next)
7891 dump_edge_info (file, e, 0);
7892
7893 fprintf (file, "\nSuccessors: ");
7894 for (e = bb->succ; e; e = e->succ_next)
7895 dump_edge_info (file, e, 1);
7896
7897 fprintf (file, "\nRegisters live at start:");
7898 dump_regset (bb->global_live_at_start, file);
7899
7900 fprintf (file, "\nRegisters live at end:");
7901 dump_regset (bb->global_live_at_end, file);
7902
7903 putc ('\n', file);
7904 }
7905
7906 putc ('\n', file);
7907 }
7908
7909 void
7910 debug_flow_info ()
7911 {
7912 dump_flow_info (stderr);
7913 }
7914
7915 void
7916 dump_edge_info (file, e, do_succ)
7917 FILE *file;
7918 edge e;
7919 int do_succ;
7920 {
7921 basic_block side = (do_succ ? e->dest : e->src);
7922
7923 if (side == ENTRY_BLOCK_PTR)
7924 fputs (" ENTRY", file);
7925 else if (side == EXIT_BLOCK_PTR)
7926 fputs (" EXIT", file);
7927 else
7928 fprintf (file, " %d", side->index);
7929
7930 if (e->probability)
7931 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7932
7933 if (e->count)
7934 {
7935 fprintf (file, " count:");
7936 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7937 }
7938
7939 if (e->flags)
7940 {
7941 static const char * const bitnames[] = {
7942 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7943 };
7944 int comma = 0;
7945 int i, flags = e->flags;
7946
7947 fputc (' ', file);
7948 fputc ('(', file);
7949 for (i = 0; flags; i++)
7950 if (flags & (1 << i))
7951 {
7952 flags &= ~(1 << i);
7953
7954 if (comma)
7955 fputc (',', file);
7956 if (i < (int) ARRAY_SIZE (bitnames))
7957 fputs (bitnames[i], file);
7958 else
7959 fprintf (file, "%d", i);
7960 comma = 1;
7961 }
7962 fputc (')', file);
7963 }
7964 }
7965 \f
7966 /* Print out one basic block with live information at start and end. */
7967
7968 void
7969 dump_bb (bb, outf)
7970 basic_block bb;
7971 FILE *outf;
7972 {
7973 rtx insn;
7974 rtx last;
7975 edge e;
7976
7977 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7978 bb->index, bb->loop_depth);
7979 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7980 putc ('\n', outf);
7981
7982 fputs (";; Predecessors: ", outf);
7983 for (e = bb->pred; e; e = e->pred_next)
7984 dump_edge_info (outf, e, 0);
7985 putc ('\n', outf);
7986
7987 fputs (";; Registers live at start:", outf);
7988 dump_regset (bb->global_live_at_start, outf);
7989 putc ('\n', outf);
7990
7991 for (insn = bb->head, last = NEXT_INSN (bb->end);
7992 insn != last;
7993 insn = NEXT_INSN (insn))
7994 print_rtl_single (outf, insn);
7995
7996 fputs (";; Registers live at end:", outf);
7997 dump_regset (bb->global_live_at_end, outf);
7998 putc ('\n', outf);
7999
8000 fputs (";; Successors: ", outf);
8001 for (e = bb->succ; e; e = e->succ_next)
8002 dump_edge_info (outf, e, 1);
8003 putc ('\n', outf);
8004 }
8005
8006 void
8007 debug_bb (bb)
8008 basic_block bb;
8009 {
8010 dump_bb (bb, stderr);
8011 }
8012
8013 void
8014 debug_bb_n (n)
8015 int n;
8016 {
8017 dump_bb (BASIC_BLOCK (n), stderr);
8018 }
8019
8020 /* Like print_rtl, but also print out live information for the start of each
8021 basic block. */
8022
8023 void
8024 print_rtl_with_bb (outf, rtx_first)
8025 FILE *outf;
8026 rtx rtx_first;
8027 {
8028 register rtx tmp_rtx;
8029
8030 if (rtx_first == 0)
8031 fprintf (outf, "(nil)\n");
8032 else
8033 {
8034 int i;
8035 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8036 int max_uid = get_max_uid ();
8037 basic_block *start = (basic_block *)
8038 xcalloc (max_uid, sizeof (basic_block));
8039 basic_block *end = (basic_block *)
8040 xcalloc (max_uid, sizeof (basic_block));
8041 enum bb_state *in_bb_p = (enum bb_state *)
8042 xcalloc (max_uid, sizeof (enum bb_state));
8043
8044 for (i = n_basic_blocks - 1; i >= 0; i--)
8045 {
8046 basic_block bb = BASIC_BLOCK (i);
8047 rtx x;
8048
8049 start[INSN_UID (bb->head)] = bb;
8050 end[INSN_UID (bb->end)] = bb;
8051 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8052 {
8053 enum bb_state state = IN_MULTIPLE_BB;
8054 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8055 state = IN_ONE_BB;
8056 in_bb_p[INSN_UID (x)] = state;
8057
8058 if (x == bb->end)
8059 break;
8060 }
8061 }
8062
8063 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8064 {
8065 int did_output;
8066 basic_block bb;
8067
8068 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8069 {
8070 fprintf (outf, ";; Start of basic block %d, registers live:",
8071 bb->index);
8072 dump_regset (bb->global_live_at_start, outf);
8073 putc ('\n', outf);
8074 }
8075
8076 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8077 && GET_CODE (tmp_rtx) != NOTE
8078 && GET_CODE (tmp_rtx) != BARRIER)
8079 fprintf (outf, ";; Insn is not within a basic block\n");
8080 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8081 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8082
8083 did_output = print_rtl_single (outf, tmp_rtx);
8084
8085 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8086 {
8087 fprintf (outf, ";; End of basic block %d, registers live:\n",
8088 bb->index);
8089 dump_regset (bb->global_live_at_end, outf);
8090 putc ('\n', outf);
8091 }
8092
8093 if (did_output)
8094 putc ('\n', outf);
8095 }
8096
8097 free (start);
8098 free (end);
8099 free (in_bb_p);
8100 }
8101
8102 if (current_function_epilogue_delay_list != 0)
8103 {
8104 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8105 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8106 tmp_rtx = XEXP (tmp_rtx, 1))
8107 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8108 }
8109 }
8110
8111 /* Dump the rtl into the current debugging dump file, then abort. */
8112
8113 static void
8114 print_rtl_and_abort_fcn (file, line, function)
8115 const char *file;
8116 int line;
8117 const char *function;
8118 {
8119 if (rtl_dump_file)
8120 {
8121 print_rtl_with_bb (rtl_dump_file, get_insns ());
8122 fclose (rtl_dump_file);
8123 }
8124
8125 fancy_abort (file, line, function);
8126 }
8127
8128 /* Recompute register set/reference counts immediately prior to register
8129 allocation.
8130
8131 This avoids problems with set/reference counts changing to/from values
8132 which have special meanings to the register allocators.
8133
8134 Additionally, the reference counts are the primary component used by the
8135 register allocators to prioritize pseudos for allocation to hard regs.
8136 More accurate reference counts generally lead to better register allocation.
8137
8138 F is the first insn to be scanned.
8139
8140 LOOP_STEP denotes how much loop_depth should be incremented per
8141 loop nesting level in order to increase the ref count more for
8142 references in a loop.
8143
8144 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8145 possibly other information which is used by the register allocators. */
8146
8147 void
8148 recompute_reg_usage (f, loop_step)
8149 rtx f ATTRIBUTE_UNUSED;
8150 int loop_step ATTRIBUTE_UNUSED;
8151 {
8152 allocate_reg_life_data ();
8153 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8154 }
8155
8156 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8157 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8158 of the number of registers that died. */
8159
8160 int
8161 count_or_remove_death_notes (blocks, kill)
8162 sbitmap blocks;
8163 int kill;
8164 {
8165 int i, count = 0;
8166
8167 for (i = n_basic_blocks - 1; i >= 0; --i)
8168 {
8169 basic_block bb;
8170 rtx insn;
8171
8172 if (blocks && ! TEST_BIT (blocks, i))
8173 continue;
8174
8175 bb = BASIC_BLOCK (i);
8176
8177 for (insn = bb->head;; insn = NEXT_INSN (insn))
8178 {
8179 if (INSN_P (insn))
8180 {
8181 rtx *pprev = &REG_NOTES (insn);
8182 rtx link = *pprev;
8183
8184 while (link)
8185 {
8186 switch (REG_NOTE_KIND (link))
8187 {
8188 case REG_DEAD:
8189 if (GET_CODE (XEXP (link, 0)) == REG)
8190 {
8191 rtx reg = XEXP (link, 0);
8192 int n;
8193
8194 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8195 n = 1;
8196 else
8197 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8198 count += n;
8199 }
8200 /* Fall through. */
8201
8202 case REG_UNUSED:
8203 if (kill)
8204 {
8205 rtx next = XEXP (link, 1);
8206 free_EXPR_LIST_node (link);
8207 *pprev = link = next;
8208 break;
8209 }
8210 /* Fall through. */
8211
8212 default:
8213 pprev = &XEXP (link, 1);
8214 link = *pprev;
8215 break;
8216 }
8217 }
8218 }
8219
8220 if (insn == bb->end)
8221 break;
8222 }
8223 }
8224
8225 return count;
8226 }
8227
8228
8229 /* Update insns block within BB. */
8230
8231 void
8232 update_bb_for_insn (bb)
8233 basic_block bb;
8234 {
8235 rtx insn;
8236
8237 if (! basic_block_for_insn)
8238 return;
8239
8240 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8241 {
8242 set_block_for_insn (insn, bb);
8243
8244 if (insn == bb->end)
8245 break;
8246 }
8247 }
8248
8249
8250 /* Record INSN's block as BB. */
8251
8252 void
8253 set_block_for_insn (insn, bb)
8254 rtx insn;
8255 basic_block bb;
8256 {
8257 size_t uid = INSN_UID (insn);
8258 if (uid >= basic_block_for_insn->num_elements)
8259 {
8260 int new_size;
8261
8262 /* Add one-eighth the size so we don't keep calling xrealloc. */
8263 new_size = uid + (uid + 7) / 8;
8264
8265 VARRAY_GROW (basic_block_for_insn, new_size);
8266 }
8267 VARRAY_BB (basic_block_for_insn, uid) = bb;
8268 }
8269
8270 /* When a new insn has been inserted into an existing block, it will
8271 sometimes emit more than a single insn. This routine will set the
8272 block number for the specified insn, and look backwards in the insn
8273 chain to see if there are any other uninitialized insns immediately
8274 previous to this one, and set the block number for them too. */
8275
8276 void
8277 set_block_for_new_insns (insn, bb)
8278 rtx insn;
8279 basic_block bb;
8280 {
8281 set_block_for_insn (insn, bb);
8282
8283 /* Scan the previous instructions setting the block number until we find
8284 an instruction that has the block number set, or we find a note
8285 of any kind. */
8286 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8287 {
8288 if (GET_CODE (insn) == NOTE)
8289 break;
8290 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
8291 || BLOCK_FOR_INSN (insn) == 0)
8292 set_block_for_insn (insn, bb);
8293 else
8294 break;
8295 }
8296 }
8297 \f
8298 /* Verify the CFG consistency. This function check some CFG invariants and
8299 aborts when something is wrong. Hope that this function will help to
8300 convert many optimization passes to preserve CFG consistent.
8301
8302 Currently it does following checks:
8303
8304 - test head/end pointers
8305 - overlapping of basic blocks
8306 - edge list correctness
8307 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8308 - tails of basic blocks (ensure that boundary is necesary)
8309 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8310 and NOTE_INSN_BASIC_BLOCK
8311 - check that all insns are in the basic blocks
8312 (except the switch handling code, barriers and notes)
8313 - check that all returns are followed by barriers
8314
8315 In future it can be extended check a lot of other stuff as well
8316 (reachability of basic blocks, life information, etc. etc.). */
8317
8318 void
8319 verify_flow_info ()
8320 {
8321 const int max_uid = get_max_uid ();
8322 const rtx rtx_first = get_insns ();
8323 rtx last_head = get_last_insn ();
8324 basic_block *bb_info, *last_visited;
8325 rtx x;
8326 int i, last_bb_num_seen, num_bb_notes, err = 0;
8327
8328 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8329 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8330 sizeof (basic_block));
8331
8332 for (i = n_basic_blocks - 1; i >= 0; i--)
8333 {
8334 basic_block bb = BASIC_BLOCK (i);
8335 rtx head = bb->head;
8336 rtx end = bb->end;
8337
8338 /* Verify the end of the basic block is in the INSN chain. */
8339 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8340 if (x == end)
8341 break;
8342 if (!x)
8343 {
8344 error ("End insn %d for block %d not found in the insn stream.",
8345 INSN_UID (end), bb->index);
8346 err = 1;
8347 }
8348
8349 /* Work backwards from the end to the head of the basic block
8350 to verify the head is in the RTL chain. */
8351 for (; x != NULL_RTX; x = PREV_INSN (x))
8352 {
8353 /* While walking over the insn chain, verify insns appear
8354 in only one basic block and initialize the BB_INFO array
8355 used by other passes. */
8356 if (bb_info[INSN_UID (x)] != NULL)
8357 {
8358 error ("Insn %d is in multiple basic blocks (%d and %d)",
8359 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8360 err = 1;
8361 }
8362 bb_info[INSN_UID (x)] = bb;
8363
8364 if (x == head)
8365 break;
8366 }
8367 if (!x)
8368 {
8369 error ("Head insn %d for block %d not found in the insn stream.",
8370 INSN_UID (head), bb->index);
8371 err = 1;
8372 }
8373
8374 last_head = x;
8375 }
8376
8377 /* Now check the basic blocks (boundaries etc.) */
8378 for (i = n_basic_blocks - 1; i >= 0; i--)
8379 {
8380 basic_block bb = BASIC_BLOCK (i);
8381 /* Check correctness of edge lists. */
8382 edge e;
8383 int has_fallthru = 0;
8384
8385 e = bb->succ;
8386 while (e)
8387 {
8388 if (last_visited [e->dest->index + 2] == bb)
8389 {
8390 error ("verify_flow_info: Duplicate edge %i->%i",
8391 e->src->index, e->dest->index);
8392 err = 1;
8393 }
8394 last_visited [e->dest->index + 2] = bb;
8395
8396 if (e->flags & EDGE_FALLTHRU)
8397 has_fallthru = 1;
8398
8399 if ((e->flags & EDGE_FALLTHRU)
8400 && e->src != ENTRY_BLOCK_PTR
8401 && e->dest != EXIT_BLOCK_PTR)
8402 {
8403 rtx insn;
8404 if (e->src->index + 1 != e->dest->index)
8405 {
8406 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8407 e->src->index, e->dest->index);
8408 err = 1;
8409 }
8410 else
8411 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8412 insn = NEXT_INSN (insn))
8413 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8414 {
8415 error ("verify_flow_info: Incorrect fallthru %i->%i",
8416 e->src->index, e->dest->index);
8417 fatal_insn ("Wrong insn in the fallthru edge", insn);
8418 err = 1;
8419 }
8420 }
8421 if (e->src != bb)
8422 {
8423 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8424 bb->index);
8425 fprintf (stderr, "Predecessor: ");
8426 dump_edge_info (stderr, e, 0);
8427 fprintf (stderr, "\nSuccessor: ");
8428 dump_edge_info (stderr, e, 1);
8429 fprintf (stderr, "\n");
8430 err = 1;
8431 }
8432 if (e->dest != EXIT_BLOCK_PTR)
8433 {
8434 edge e2 = e->dest->pred;
8435 while (e2 && e2 != e)
8436 e2 = e2->pred_next;
8437 if (!e2)
8438 {
8439 error ("Basic block %i edge lists are corrupted", bb->index);
8440 err = 1;
8441 }
8442 }
8443 e = e->succ_next;
8444 }
8445 if (!has_fallthru)
8446 {
8447 rtx insn = bb->end;
8448
8449 /* Ensure existence of barrier in BB with no fallthru edges. */
8450 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8451 insn = NEXT_INSN (insn))
8452 if (!insn
8453 || (GET_CODE (insn) == NOTE
8454 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8455 {
8456 error ("Missing barrier after block %i", bb->index);
8457 err = 1;
8458 }
8459 }
8460
8461 e = bb->pred;
8462 while (e)
8463 {
8464 if (e->dest != bb)
8465 {
8466 error ("Basic block %d pred edge is corrupted", bb->index);
8467 fputs ("Predecessor: ", stderr);
8468 dump_edge_info (stderr, e, 0);
8469 fputs ("\nSuccessor: ", stderr);
8470 dump_edge_info (stderr, e, 1);
8471 fputc ('\n', stderr);
8472 err = 1;
8473 }
8474 if (e->src != ENTRY_BLOCK_PTR)
8475 {
8476 edge e2 = e->src->succ;
8477 while (e2 && e2 != e)
8478 e2 = e2->succ_next;
8479 if (!e2)
8480 {
8481 error ("Basic block %i edge lists are corrupted", bb->index);
8482 err = 1;
8483 }
8484 }
8485 e = e->pred_next;
8486 }
8487
8488 /* OK pointers are correct. Now check the header of basic
8489 block. It ought to contain optional CODE_LABEL followed
8490 by NOTE_BASIC_BLOCK. */
8491 x = bb->head;
8492 if (GET_CODE (x) == CODE_LABEL)
8493 {
8494 if (bb->end == x)
8495 {
8496 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8497 bb->index);
8498 err = 1;
8499 }
8500 x = NEXT_INSN (x);
8501 }
8502 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8503 {
8504 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8505 bb->index);
8506 err = 1;
8507 }
8508
8509 if (bb->end == x)
8510 {
8511 /* Do checks for empty blocks here */
8512 }
8513 else
8514 {
8515 x = NEXT_INSN (x);
8516 while (x)
8517 {
8518 if (NOTE_INSN_BASIC_BLOCK_P (x))
8519 {
8520 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8521 INSN_UID (x), bb->index);
8522 err = 1;
8523 }
8524
8525 if (x == bb->end)
8526 break;
8527
8528 if (GET_CODE (x) == JUMP_INSN
8529 || GET_CODE (x) == CODE_LABEL
8530 || GET_CODE (x) == BARRIER)
8531 {
8532 error ("In basic block %d:", bb->index);
8533 fatal_insn ("Flow control insn inside a basic block", x);
8534 }
8535
8536 x = NEXT_INSN (x);
8537 }
8538 }
8539 }
8540
8541 last_bb_num_seen = -1;
8542 num_bb_notes = 0;
8543 x = rtx_first;
8544 while (x)
8545 {
8546 if (NOTE_INSN_BASIC_BLOCK_P (x))
8547 {
8548 basic_block bb = NOTE_BASIC_BLOCK (x);
8549 num_bb_notes++;
8550 if (bb->index != last_bb_num_seen + 1)
8551 internal_error ("Basic blocks not numbered consecutively.");
8552
8553 last_bb_num_seen = bb->index;
8554 }
8555
8556 if (!bb_info[INSN_UID (x)])
8557 {
8558 switch (GET_CODE (x))
8559 {
8560 case BARRIER:
8561 case NOTE:
8562 break;
8563
8564 case CODE_LABEL:
8565 /* An addr_vec is placed outside any block block. */
8566 if (NEXT_INSN (x)
8567 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8568 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8569 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8570 {
8571 x = NEXT_INSN (x);
8572 }
8573
8574 /* But in any case, non-deletable labels can appear anywhere. */
8575 break;
8576
8577 default:
8578 fatal_insn ("Insn outside basic block", x);
8579 }
8580 }
8581
8582 if (INSN_P (x)
8583 && GET_CODE (x) == JUMP_INSN
8584 && returnjump_p (x) && ! condjump_p (x)
8585 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8586 fatal_insn ("Return not followed by barrier", x);
8587
8588 x = NEXT_INSN (x);
8589 }
8590
8591 if (num_bb_notes != n_basic_blocks)
8592 internal_error
8593 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8594 num_bb_notes, n_basic_blocks);
8595
8596 if (err)
8597 internal_error ("verify_flow_info failed.");
8598
8599 /* Clean up. */
8600 free (bb_info);
8601 free (last_visited);
8602 }
8603 \f
8604 /* Functions to access an edge list with a vector representation.
8605 Enough data is kept such that given an index number, the
8606 pred and succ that edge represents can be determined, or
8607 given a pred and a succ, its index number can be returned.
8608 This allows algorithms which consume a lot of memory to
8609 represent the normally full matrix of edge (pred,succ) with a
8610 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8611 wasted space in the client code due to sparse flow graphs. */
8612
8613 /* This functions initializes the edge list. Basically the entire
8614 flowgraph is processed, and all edges are assigned a number,
8615 and the data structure is filled in. */
8616
8617 struct edge_list *
8618 create_edge_list ()
8619 {
8620 struct edge_list *elist;
8621 edge e;
8622 int num_edges;
8623 int x;
8624 int block_count;
8625
8626 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8627
8628 num_edges = 0;
8629
8630 /* Determine the number of edges in the flow graph by counting successor
8631 edges on each basic block. */
8632 for (x = 0; x < n_basic_blocks; x++)
8633 {
8634 basic_block bb = BASIC_BLOCK (x);
8635
8636 for (e = bb->succ; e; e = e->succ_next)
8637 num_edges++;
8638 }
8639 /* Don't forget successors of the entry block. */
8640 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8641 num_edges++;
8642
8643 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8644 elist->num_blocks = block_count;
8645 elist->num_edges = num_edges;
8646 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8647
8648 num_edges = 0;
8649
8650 /* Follow successors of the entry block, and register these edges. */
8651 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8652 {
8653 elist->index_to_edge[num_edges] = e;
8654 num_edges++;
8655 }
8656
8657 for (x = 0; x < n_basic_blocks; x++)
8658 {
8659 basic_block bb = BASIC_BLOCK (x);
8660
8661 /* Follow all successors of blocks, and register these edges. */
8662 for (e = bb->succ; e; e = e->succ_next)
8663 {
8664 elist->index_to_edge[num_edges] = e;
8665 num_edges++;
8666 }
8667 }
8668 return elist;
8669 }
8670
8671 /* This function free's memory associated with an edge list. */
8672
8673 void
8674 free_edge_list (elist)
8675 struct edge_list *elist;
8676 {
8677 if (elist)
8678 {
8679 free (elist->index_to_edge);
8680 free (elist);
8681 }
8682 }
8683
8684 /* This function provides debug output showing an edge list. */
8685
8686 void
8687 print_edge_list (f, elist)
8688 FILE *f;
8689 struct edge_list *elist;
8690 {
8691 int x;
8692 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8693 elist->num_blocks - 2, elist->num_edges);
8694
8695 for (x = 0; x < elist->num_edges; x++)
8696 {
8697 fprintf (f, " %-4d - edge(", x);
8698 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8699 fprintf (f, "entry,");
8700 else
8701 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8702
8703 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8704 fprintf (f, "exit)\n");
8705 else
8706 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8707 }
8708 }
8709
8710 /* This function provides an internal consistency check of an edge list,
8711 verifying that all edges are present, and that there are no
8712 extra edges. */
8713
8714 void
8715 verify_edge_list (f, elist)
8716 FILE *f;
8717 struct edge_list *elist;
8718 {
8719 int x, pred, succ, index;
8720 edge e;
8721
8722 for (x = 0; x < n_basic_blocks; x++)
8723 {
8724 basic_block bb = BASIC_BLOCK (x);
8725
8726 for (e = bb->succ; e; e = e->succ_next)
8727 {
8728 pred = e->src->index;
8729 succ = e->dest->index;
8730 index = EDGE_INDEX (elist, e->src, e->dest);
8731 if (index == EDGE_INDEX_NO_EDGE)
8732 {
8733 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8734 continue;
8735 }
8736 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8737 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8738 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8739 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8740 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8741 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8742 }
8743 }
8744 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8745 {
8746 pred = e->src->index;
8747 succ = e->dest->index;
8748 index = EDGE_INDEX (elist, e->src, e->dest);
8749 if (index == EDGE_INDEX_NO_EDGE)
8750 {
8751 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8752 continue;
8753 }
8754 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8755 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8756 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8757 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8758 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8759 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8760 }
8761 /* We've verified that all the edges are in the list, no lets make sure
8762 there are no spurious edges in the list. */
8763
8764 for (pred = 0; pred < n_basic_blocks; pred++)
8765 for (succ = 0; succ < n_basic_blocks; succ++)
8766 {
8767 basic_block p = BASIC_BLOCK (pred);
8768 basic_block s = BASIC_BLOCK (succ);
8769
8770 int found_edge = 0;
8771
8772 for (e = p->succ; e; e = e->succ_next)
8773 if (e->dest == s)
8774 {
8775 found_edge = 1;
8776 break;
8777 }
8778 for (e = s->pred; e; e = e->pred_next)
8779 if (e->src == p)
8780 {
8781 found_edge = 1;
8782 break;
8783 }
8784 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8785 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8786 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8787 pred, succ);
8788 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8789 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8790 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8791 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8792 BASIC_BLOCK (succ)));
8793 }
8794 for (succ = 0; succ < n_basic_blocks; succ++)
8795 {
8796 basic_block p = ENTRY_BLOCK_PTR;
8797 basic_block s = BASIC_BLOCK (succ);
8798
8799 int found_edge = 0;
8800
8801 for (e = p->succ; e; e = e->succ_next)
8802 if (e->dest == s)
8803 {
8804 found_edge = 1;
8805 break;
8806 }
8807 for (e = s->pred; e; e = e->pred_next)
8808 if (e->src == p)
8809 {
8810 found_edge = 1;
8811 break;
8812 }
8813 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8814 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8815 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8816 succ);
8817 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8818 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8819 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8820 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8821 BASIC_BLOCK (succ)));
8822 }
8823 for (pred = 0; pred < n_basic_blocks; pred++)
8824 {
8825 basic_block p = BASIC_BLOCK (pred);
8826 basic_block s = EXIT_BLOCK_PTR;
8827
8828 int found_edge = 0;
8829
8830 for (e = p->succ; e; e = e->succ_next)
8831 if (e->dest == s)
8832 {
8833 found_edge = 1;
8834 break;
8835 }
8836 for (e = s->pred; e; e = e->pred_next)
8837 if (e->src == p)
8838 {
8839 found_edge = 1;
8840 break;
8841 }
8842 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8843 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8844 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8845 pred);
8846 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8847 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8848 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8849 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8850 EXIT_BLOCK_PTR));
8851 }
8852 }
8853
8854 /* This routine will determine what, if any, edge there is between
8855 a specified predecessor and successor. */
8856
8857 int
8858 find_edge_index (edge_list, pred, succ)
8859 struct edge_list *edge_list;
8860 basic_block pred, succ;
8861 {
8862 int x;
8863 for (x = 0; x < NUM_EDGES (edge_list); x++)
8864 {
8865 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8866 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8867 return x;
8868 }
8869 return (EDGE_INDEX_NO_EDGE);
8870 }
8871
8872 /* This function will remove an edge from the flow graph. */
8873
8874 void
8875 remove_edge (e)
8876 edge e;
8877 {
8878 edge last_pred = NULL;
8879 edge last_succ = NULL;
8880 edge tmp;
8881 basic_block src, dest;
8882 src = e->src;
8883 dest = e->dest;
8884 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8885 last_succ = tmp;
8886
8887 if (!tmp)
8888 abort ();
8889 if (last_succ)
8890 last_succ->succ_next = e->succ_next;
8891 else
8892 src->succ = e->succ_next;
8893
8894 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8895 last_pred = tmp;
8896
8897 if (!tmp)
8898 abort ();
8899 if (last_pred)
8900 last_pred->pred_next = e->pred_next;
8901 else
8902 dest->pred = e->pred_next;
8903
8904 n_edges--;
8905 free (e);
8906 }
8907
8908 /* This routine will remove any fake successor edges for a basic block.
8909 When the edge is removed, it is also removed from whatever predecessor
8910 list it is in. */
8911
8912 static void
8913 remove_fake_successors (bb)
8914 basic_block bb;
8915 {
8916 edge e;
8917 for (e = bb->succ; e;)
8918 {
8919 edge tmp = e;
8920 e = e->succ_next;
8921 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8922 remove_edge (tmp);
8923 }
8924 }
8925
8926 /* This routine will remove all fake edges from the flow graph. If
8927 we remove all fake successors, it will automatically remove all
8928 fake predecessors. */
8929
8930 void
8931 remove_fake_edges ()
8932 {
8933 int x;
8934
8935 for (x = 0; x < n_basic_blocks; x++)
8936 remove_fake_successors (BASIC_BLOCK (x));
8937
8938 /* We've handled all successors except the entry block's. */
8939 remove_fake_successors (ENTRY_BLOCK_PTR);
8940 }
8941
8942 /* This function will add a fake edge between any block which has no
8943 successors, and the exit block. Some data flow equations require these
8944 edges to exist. */
8945
8946 void
8947 add_noreturn_fake_exit_edges ()
8948 {
8949 int x;
8950
8951 for (x = 0; x < n_basic_blocks; x++)
8952 if (BASIC_BLOCK (x)->succ == NULL)
8953 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8954 }
8955
8956 /* This function adds a fake edge between any infinite loops to the
8957 exit block. Some optimizations require a path from each node to
8958 the exit node.
8959
8960 See also Morgan, Figure 3.10, pp. 82-83.
8961
8962 The current implementation is ugly, not attempting to minimize the
8963 number of inserted fake edges. To reduce the number of fake edges
8964 to insert, add fake edges from _innermost_ loops containing only
8965 nodes not reachable from the exit block. */
8966
8967 void
8968 connect_infinite_loops_to_exit ()
8969 {
8970 basic_block unvisited_block;
8971
8972 /* Perform depth-first search in the reverse graph to find nodes
8973 reachable from the exit block. */
8974 struct depth_first_search_dsS dfs_ds;
8975
8976 flow_dfs_compute_reverse_init (&dfs_ds);
8977 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8978
8979 /* Repeatedly add fake edges, updating the unreachable nodes. */
8980 while (1)
8981 {
8982 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8983 if (!unvisited_block)
8984 break;
8985 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8986 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8987 }
8988
8989 flow_dfs_compute_reverse_finish (&dfs_ds);
8990
8991 return;
8992 }
8993
8994 /* Redirect an edge's successor from one block to another. */
8995
8996 void
8997 redirect_edge_succ (e, new_succ)
8998 edge e;
8999 basic_block new_succ;
9000 {
9001 edge *pe;
9002
9003 /* Disconnect the edge from the old successor block. */
9004 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9005 continue;
9006 *pe = (*pe)->pred_next;
9007
9008 /* Reconnect the edge to the new successor block. */
9009 e->pred_next = new_succ->pred;
9010 new_succ->pred = e;
9011 e->dest = new_succ;
9012 }
9013
9014 /* Like previous but avoid possible dupplicate edge. */
9015
9016 void
9017 redirect_edge_succ_nodup (e, new_succ)
9018 edge e;
9019 basic_block new_succ;
9020 {
9021 edge s;
9022 /* Check whether the edge is already present. */
9023 for (s = e->src->succ; s; s = s->succ_next)
9024 if (s->dest == new_succ && s != e)
9025 break;
9026 if (s)
9027 {
9028 s->flags |= e->flags;
9029 s->probability += e->probability;
9030 s->count += e->count;
9031 remove_edge (e);
9032 }
9033 else
9034 redirect_edge_succ (e, new_succ);
9035 }
9036
9037 /* Redirect an edge's predecessor from one block to another. */
9038
9039 void
9040 redirect_edge_pred (e, new_pred)
9041 edge e;
9042 basic_block new_pred;
9043 {
9044 edge *pe;
9045
9046 /* Disconnect the edge from the old predecessor block. */
9047 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9048 continue;
9049 *pe = (*pe)->succ_next;
9050
9051 /* Reconnect the edge to the new predecessor block. */
9052 e->succ_next = new_pred->succ;
9053 new_pred->succ = e;
9054 e->src = new_pred;
9055 }
9056 \f
9057 /* Dump the list of basic blocks in the bitmap NODES. */
9058
9059 static void
9060 flow_nodes_print (str, nodes, file)
9061 const char *str;
9062 const sbitmap nodes;
9063 FILE *file;
9064 {
9065 int node;
9066
9067 if (! nodes)
9068 return;
9069
9070 fprintf (file, "%s { ", str);
9071 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9072 fputs ("}\n", file);
9073 }
9074
9075
9076 /* Dump the list of edges in the array EDGE_LIST. */
9077
9078 static void
9079 flow_edge_list_print (str, edge_list, num_edges, file)
9080 const char *str;
9081 const edge *edge_list;
9082 int num_edges;
9083 FILE *file;
9084 {
9085 int i;
9086
9087 if (! edge_list)
9088 return;
9089
9090 fprintf (file, "%s { ", str);
9091 for (i = 0; i < num_edges; i++)
9092 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9093 edge_list[i]->dest->index);
9094 fputs ("}\n", file);
9095 }
9096
9097
9098 /* Dump loop related CFG information. */
9099
9100 static void
9101 flow_loops_cfg_dump (loops, file)
9102 const struct loops *loops;
9103 FILE *file;
9104 {
9105 int i;
9106
9107 if (! loops->num || ! file || ! loops->cfg.dom)
9108 return;
9109
9110 for (i = 0; i < n_basic_blocks; i++)
9111 {
9112 edge succ;
9113
9114 fprintf (file, ";; %d succs { ", i);
9115 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9116 fprintf (file, "%d ", succ->dest->index);
9117 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9118 }
9119
9120 /* Dump the DFS node order. */
9121 if (loops->cfg.dfs_order)
9122 {
9123 fputs (";; DFS order: ", file);
9124 for (i = 0; i < n_basic_blocks; i++)
9125 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9126 fputs ("\n", file);
9127 }
9128 /* Dump the reverse completion node order. */
9129 if (loops->cfg.rc_order)
9130 {
9131 fputs (";; RC order: ", file);
9132 for (i = 0; i < n_basic_blocks; i++)
9133 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9134 fputs ("\n", file);
9135 }
9136 }
9137
9138 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9139
9140 static int
9141 flow_loop_nested_p (outer, loop)
9142 struct loop *outer;
9143 struct loop *loop;
9144 {
9145 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9146 }
9147
9148
9149 /* Dump the loop information specified by LOOP to the stream FILE
9150 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9151 void
9152 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9153 const struct loop *loop;
9154 FILE *file;
9155 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9156 int verbose;
9157 {
9158 if (! loop || ! loop->header)
9159 return;
9160
9161 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9162 loop->num, INSN_UID (loop->first->head),
9163 INSN_UID (loop->last->end),
9164 loop->shared ? " shared" : "",
9165 loop->invalid ? " invalid" : "");
9166 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9167 loop->header->index, loop->latch->index,
9168 loop->pre_header ? loop->pre_header->index : -1,
9169 loop->first->index, loop->last->index);
9170 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9171 loop->depth, loop->level,
9172 (long) (loop->outer ? loop->outer->num : -1));
9173
9174 if (loop->pre_header_edges)
9175 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9176 loop->num_pre_header_edges, file);
9177 flow_edge_list_print (";; entry edges", loop->entry_edges,
9178 loop->num_entries, file);
9179 fprintf (file, ";; %d", loop->num_nodes);
9180 flow_nodes_print (" nodes", loop->nodes, file);
9181 flow_edge_list_print (";; exit edges", loop->exit_edges,
9182 loop->num_exits, file);
9183 if (loop->exits_doms)
9184 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9185 if (loop_dump_aux)
9186 loop_dump_aux (loop, file, verbose);
9187 }
9188
9189
9190 /* Dump the loop information specified by LOOPS to the stream FILE,
9191 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9192 void
9193 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9194 const struct loops *loops;
9195 FILE *file;
9196 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9197 int verbose;
9198 {
9199 int i;
9200 int num_loops;
9201
9202 num_loops = loops->num;
9203 if (! num_loops || ! file)
9204 return;
9205
9206 fprintf (file, ";; %d loops found, %d levels\n",
9207 num_loops, loops->levels);
9208
9209 for (i = 0; i < num_loops; i++)
9210 {
9211 struct loop *loop = &loops->array[i];
9212
9213 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9214
9215 if (loop->shared)
9216 {
9217 int j;
9218
9219 for (j = 0; j < i; j++)
9220 {
9221 struct loop *oloop = &loops->array[j];
9222
9223 if (loop->header == oloop->header)
9224 {
9225 int disjoint;
9226 int smaller;
9227
9228 smaller = loop->num_nodes < oloop->num_nodes;
9229
9230 /* If the union of LOOP and OLOOP is different than
9231 the larger of LOOP and OLOOP then LOOP and OLOOP
9232 must be disjoint. */
9233 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9234 smaller ? oloop : loop);
9235 fprintf (file,
9236 ";; loop header %d shared by loops %d, %d %s\n",
9237 loop->header->index, i, j,
9238 disjoint ? "disjoint" : "nested");
9239 }
9240 }
9241 }
9242 }
9243
9244 if (verbose)
9245 flow_loops_cfg_dump (loops, file);
9246 }
9247
9248
9249 /* Free all the memory allocated for LOOPS. */
9250
9251 void
9252 flow_loops_free (loops)
9253 struct loops *loops;
9254 {
9255 if (loops->array)
9256 {
9257 int i;
9258
9259 if (! loops->num)
9260 abort ();
9261
9262 /* Free the loop descriptors. */
9263 for (i = 0; i < loops->num; i++)
9264 {
9265 struct loop *loop = &loops->array[i];
9266
9267 if (loop->pre_header_edges)
9268 free (loop->pre_header_edges);
9269 if (loop->nodes)
9270 sbitmap_free (loop->nodes);
9271 if (loop->entry_edges)
9272 free (loop->entry_edges);
9273 if (loop->exit_edges)
9274 free (loop->exit_edges);
9275 if (loop->exits_doms)
9276 sbitmap_free (loop->exits_doms);
9277 }
9278 free (loops->array);
9279 loops->array = NULL;
9280
9281 if (loops->cfg.dom)
9282 sbitmap_vector_free (loops->cfg.dom);
9283 if (loops->cfg.dfs_order)
9284 free (loops->cfg.dfs_order);
9285
9286 if (loops->shared_headers)
9287 sbitmap_free (loops->shared_headers);
9288 }
9289 }
9290
9291
9292 /* Find the entry edges into the loop with header HEADER and nodes
9293 NODES and store in ENTRY_EDGES array. Return the number of entry
9294 edges from the loop. */
9295
9296 static int
9297 flow_loop_entry_edges_find (header, nodes, entry_edges)
9298 basic_block header;
9299 const sbitmap nodes;
9300 edge **entry_edges;
9301 {
9302 edge e;
9303 int num_entries;
9304
9305 *entry_edges = NULL;
9306
9307 num_entries = 0;
9308 for (e = header->pred; e; e = e->pred_next)
9309 {
9310 basic_block src = e->src;
9311
9312 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9313 num_entries++;
9314 }
9315
9316 if (! num_entries)
9317 abort ();
9318
9319 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9320
9321 num_entries = 0;
9322 for (e = header->pred; e; e = e->pred_next)
9323 {
9324 basic_block src = e->src;
9325
9326 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9327 (*entry_edges)[num_entries++] = e;
9328 }
9329
9330 return num_entries;
9331 }
9332
9333
9334 /* Find the exit edges from the loop using the bitmap of loop nodes
9335 NODES and store in EXIT_EDGES array. Return the number of
9336 exit edges from the loop. */
9337
9338 static int
9339 flow_loop_exit_edges_find (nodes, exit_edges)
9340 const sbitmap nodes;
9341 edge **exit_edges;
9342 {
9343 edge e;
9344 int node;
9345 int num_exits;
9346
9347 *exit_edges = NULL;
9348
9349 /* Check all nodes within the loop to see if there are any
9350 successors not in the loop. Note that a node may have multiple
9351 exiting edges ????? A node can have one jumping edge and one fallthru
9352 edge so only one of these can exit the loop. */
9353 num_exits = 0;
9354 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9355 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9356 {
9357 basic_block dest = e->dest;
9358
9359 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9360 num_exits++;
9361 }
9362 });
9363
9364 if (! num_exits)
9365 return 0;
9366
9367 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9368
9369 /* Store all exiting edges into an array. */
9370 num_exits = 0;
9371 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9372 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9373 {
9374 basic_block dest = e->dest;
9375
9376 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9377 (*exit_edges)[num_exits++] = e;
9378 }
9379 });
9380
9381 return num_exits;
9382 }
9383
9384
9385 /* Find the nodes contained within the loop with header HEADER and
9386 latch LATCH and store in NODES. Return the number of nodes within
9387 the loop. */
9388
9389 static int
9390 flow_loop_nodes_find (header, latch, nodes)
9391 basic_block header;
9392 basic_block latch;
9393 sbitmap nodes;
9394 {
9395 basic_block *stack;
9396 int sp;
9397 int num_nodes = 0;
9398
9399 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9400 sp = 0;
9401
9402 /* Start with only the loop header in the set of loop nodes. */
9403 sbitmap_zero (nodes);
9404 SET_BIT (nodes, header->index);
9405 num_nodes++;
9406 header->loop_depth++;
9407
9408 /* Push the loop latch on to the stack. */
9409 if (! TEST_BIT (nodes, latch->index))
9410 {
9411 SET_BIT (nodes, latch->index);
9412 latch->loop_depth++;
9413 num_nodes++;
9414 stack[sp++] = latch;
9415 }
9416
9417 while (sp)
9418 {
9419 basic_block node;
9420 edge e;
9421
9422 node = stack[--sp];
9423 for (e = node->pred; e; e = e->pred_next)
9424 {
9425 basic_block ancestor = e->src;
9426
9427 /* If each ancestor not marked as part of loop, add to set of
9428 loop nodes and push on to stack. */
9429 if (ancestor != ENTRY_BLOCK_PTR
9430 && ! TEST_BIT (nodes, ancestor->index))
9431 {
9432 SET_BIT (nodes, ancestor->index);
9433 ancestor->loop_depth++;
9434 num_nodes++;
9435 stack[sp++] = ancestor;
9436 }
9437 }
9438 }
9439 free (stack);
9440 return num_nodes;
9441 }
9442
9443 /* Compute the depth first search order and store in the array
9444 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9445 RC_ORDER is non-zero, return the reverse completion number for each
9446 node. Returns the number of nodes visited. A depth first search
9447 tries to get as far away from the starting point as quickly as
9448 possible. */
9449
9450 int
9451 flow_depth_first_order_compute (dfs_order, rc_order)
9452 int *dfs_order;
9453 int *rc_order;
9454 {
9455 edge *stack;
9456 int sp;
9457 int dfsnum = 0;
9458 int rcnum = n_basic_blocks - 1;
9459 sbitmap visited;
9460
9461 /* Allocate stack for back-tracking up CFG. */
9462 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9463 sp = 0;
9464
9465 /* Allocate bitmap to track nodes that have been visited. */
9466 visited = sbitmap_alloc (n_basic_blocks);
9467
9468 /* None of the nodes in the CFG have been visited yet. */
9469 sbitmap_zero (visited);
9470
9471 /* Push the first edge on to the stack. */
9472 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9473
9474 while (sp)
9475 {
9476 edge e;
9477 basic_block src;
9478 basic_block dest;
9479
9480 /* Look at the edge on the top of the stack. */
9481 e = stack[sp - 1];
9482 src = e->src;
9483 dest = e->dest;
9484
9485 /* Check if the edge destination has been visited yet. */
9486 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9487 {
9488 /* Mark that we have visited the destination. */
9489 SET_BIT (visited, dest->index);
9490
9491 if (dfs_order)
9492 dfs_order[dfsnum++] = dest->index;
9493
9494 if (dest->succ)
9495 {
9496 /* Since the DEST node has been visited for the first
9497 time, check its successors. */
9498 stack[sp++] = dest->succ;
9499 }
9500 else
9501 {
9502 /* There are no successors for the DEST node so assign
9503 its reverse completion number. */
9504 if (rc_order)
9505 rc_order[rcnum--] = dest->index;
9506 }
9507 }
9508 else
9509 {
9510 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9511 {
9512 /* There are no more successors for the SRC node
9513 so assign its reverse completion number. */
9514 if (rc_order)
9515 rc_order[rcnum--] = src->index;
9516 }
9517
9518 if (e->succ_next)
9519 stack[sp - 1] = e->succ_next;
9520 else
9521 sp--;
9522 }
9523 }
9524
9525 free (stack);
9526 sbitmap_free (visited);
9527
9528 /* The number of nodes visited should not be greater than
9529 n_basic_blocks. */
9530 if (dfsnum > n_basic_blocks)
9531 abort ();
9532
9533 /* There are some nodes left in the CFG that are unreachable. */
9534 if (dfsnum < n_basic_blocks)
9535 abort ();
9536 return dfsnum;
9537 }
9538
9539 /* Compute the depth first search order on the _reverse_ graph and
9540 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9541 Returns the number of nodes visited.
9542
9543 The computation is split into three pieces:
9544
9545 flow_dfs_compute_reverse_init () creates the necessary data
9546 structures.
9547
9548 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9549 structures. The block will start the search.
9550
9551 flow_dfs_compute_reverse_execute () continues (or starts) the
9552 search using the block on the top of the stack, stopping when the
9553 stack is empty.
9554
9555 flow_dfs_compute_reverse_finish () destroys the necessary data
9556 structures.
9557
9558 Thus, the user will probably call ..._init(), call ..._add_bb() to
9559 add a beginning basic block to the stack, call ..._execute(),
9560 possibly add another bb to the stack and again call ..._execute(),
9561 ..., and finally call _finish(). */
9562
9563 /* Initialize the data structures used for depth-first search on the
9564 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9565 added to the basic block stack. DATA is the current depth-first
9566 search context. If INITIALIZE_STACK is non-zero, there is an
9567 element on the stack. */
9568
9569 static void
9570 flow_dfs_compute_reverse_init (data)
9571 depth_first_search_ds data;
9572 {
9573 /* Allocate stack for back-tracking up CFG. */
9574 data->stack =
9575 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9576 * sizeof (basic_block));
9577 data->sp = 0;
9578
9579 /* Allocate bitmap to track nodes that have been visited. */
9580 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9581
9582 /* None of the nodes in the CFG have been visited yet. */
9583 sbitmap_zero (data->visited_blocks);
9584
9585 return;
9586 }
9587
9588 /* Add the specified basic block to the top of the dfs data
9589 structures. When the search continues, it will start at the
9590 block. */
9591
9592 static void
9593 flow_dfs_compute_reverse_add_bb (data, bb)
9594 depth_first_search_ds data;
9595 basic_block bb;
9596 {
9597 data->stack[data->sp++] = bb;
9598 return;
9599 }
9600
9601 /* Continue the depth-first search through the reverse graph starting
9602 with the block at the stack's top and ending when the stack is
9603 empty. Visited nodes are marked. Returns an unvisited basic
9604 block, or NULL if there is none available. */
9605
9606 static basic_block
9607 flow_dfs_compute_reverse_execute (data)
9608 depth_first_search_ds data;
9609 {
9610 basic_block bb;
9611 edge e;
9612 int i;
9613
9614 while (data->sp > 0)
9615 {
9616 bb = data->stack[--data->sp];
9617
9618 /* Mark that we have visited this node. */
9619 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9620 {
9621 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9622
9623 /* Perform depth-first search on adjacent vertices. */
9624 for (e = bb->pred; e; e = e->pred_next)
9625 flow_dfs_compute_reverse_add_bb (data, e->src);
9626 }
9627 }
9628
9629 /* Determine if there are unvisited basic blocks. */
9630 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9631 if (!TEST_BIT (data->visited_blocks, i))
9632 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9633 return NULL;
9634 }
9635
9636 /* Destroy the data structures needed for depth-first search on the
9637 reverse graph. */
9638
9639 static void
9640 flow_dfs_compute_reverse_finish (data)
9641 depth_first_search_ds data;
9642 {
9643 free (data->stack);
9644 sbitmap_free (data->visited_blocks);
9645 return;
9646 }
9647
9648
9649 /* Find the root node of the loop pre-header extended basic block and
9650 the edges along the trace from the root node to the loop header. */
9651
9652 static void
9653 flow_loop_pre_header_scan (loop)
9654 struct loop *loop;
9655 {
9656 int num = 0;
9657 basic_block ebb;
9658
9659 loop->num_pre_header_edges = 0;
9660
9661 if (loop->num_entries != 1)
9662 return;
9663
9664 ebb = loop->entry_edges[0]->src;
9665
9666 if (ebb != ENTRY_BLOCK_PTR)
9667 {
9668 edge e;
9669
9670 /* Count number of edges along trace from loop header to
9671 root of pre-header extended basic block. Usually this is
9672 only one or two edges. */
9673 num++;
9674 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9675 {
9676 ebb = ebb->pred->src;
9677 num++;
9678 }
9679
9680 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9681 loop->num_pre_header_edges = num;
9682
9683 /* Store edges in order that they are followed. The source
9684 of the first edge is the root node of the pre-header extended
9685 basic block and the destination of the last last edge is
9686 the loop header. */
9687 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9688 {
9689 loop->pre_header_edges[--num] = e;
9690 }
9691 }
9692 }
9693
9694
9695 /* Return the block for the pre-header of the loop with header
9696 HEADER where DOM specifies the dominator information. Return NULL if
9697 there is no pre-header. */
9698
9699 static basic_block
9700 flow_loop_pre_header_find (header, dom)
9701 basic_block header;
9702 const sbitmap *dom;
9703 {
9704 basic_block pre_header;
9705 edge e;
9706
9707 /* If block p is a predecessor of the header and is the only block
9708 that the header does not dominate, then it is the pre-header. */
9709 pre_header = NULL;
9710 for (e = header->pred; e; e = e->pred_next)
9711 {
9712 basic_block node = e->src;
9713
9714 if (node != ENTRY_BLOCK_PTR
9715 && ! TEST_BIT (dom[node->index], header->index))
9716 {
9717 if (pre_header == NULL)
9718 pre_header = node;
9719 else
9720 {
9721 /* There are multiple edges into the header from outside
9722 the loop so there is no pre-header block. */
9723 pre_header = NULL;
9724 break;
9725 }
9726 }
9727 }
9728 return pre_header;
9729 }
9730
9731 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9732 previously added. The insertion algorithm assumes that the loops
9733 are added in the order found by a depth first search of the CFG. */
9734
9735 static void
9736 flow_loop_tree_node_add (prevloop, loop)
9737 struct loop *prevloop;
9738 struct loop *loop;
9739 {
9740
9741 if (flow_loop_nested_p (prevloop, loop))
9742 {
9743 prevloop->inner = loop;
9744 loop->outer = prevloop;
9745 return;
9746 }
9747
9748 while (prevloop->outer)
9749 {
9750 if (flow_loop_nested_p (prevloop->outer, loop))
9751 {
9752 prevloop->next = loop;
9753 loop->outer = prevloop->outer;
9754 return;
9755 }
9756 prevloop = prevloop->outer;
9757 }
9758
9759 prevloop->next = loop;
9760 loop->outer = NULL;
9761 }
9762
9763 /* Build the loop hierarchy tree for LOOPS. */
9764
9765 static void
9766 flow_loops_tree_build (loops)
9767 struct loops *loops;
9768 {
9769 int i;
9770 int num_loops;
9771
9772 num_loops = loops->num;
9773 if (! num_loops)
9774 return;
9775
9776 /* Root the loop hierarchy tree with the first loop found.
9777 Since we used a depth first search this should be the
9778 outermost loop. */
9779 loops->tree_root = &loops->array[0];
9780 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9781
9782 /* Add the remaining loops to the tree. */
9783 for (i = 1; i < num_loops; i++)
9784 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9785 }
9786
9787 /* Helper function to compute loop nesting depth and enclosed loop level
9788 for the natural loop specified by LOOP at the loop depth DEPTH.
9789 Returns the loop level. */
9790
9791 static int
9792 flow_loop_level_compute (loop, depth)
9793 struct loop *loop;
9794 int depth;
9795 {
9796 struct loop *inner;
9797 int level = 1;
9798
9799 if (! loop)
9800 return 0;
9801
9802 /* Traverse loop tree assigning depth and computing level as the
9803 maximum level of all the inner loops of this loop. The loop
9804 level is equivalent to the height of the loop in the loop tree
9805 and corresponds to the number of enclosed loop levels (including
9806 itself). */
9807 for (inner = loop->inner; inner; inner = inner->next)
9808 {
9809 int ilevel;
9810
9811 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9812
9813 if (ilevel > level)
9814 level = ilevel;
9815 }
9816 loop->level = level;
9817 loop->depth = depth;
9818 return level;
9819 }
9820
9821 /* Compute the loop nesting depth and enclosed loop level for the loop
9822 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9823 level. */
9824
9825 static int
9826 flow_loops_level_compute (loops)
9827 struct loops *loops;
9828 {
9829 struct loop *loop;
9830 int level;
9831 int levels = 0;
9832
9833 /* Traverse all the outer level loops. */
9834 for (loop = loops->tree_root; loop; loop = loop->next)
9835 {
9836 level = flow_loop_level_compute (loop, 1);
9837 if (level > levels)
9838 levels = level;
9839 }
9840 return levels;
9841 }
9842
9843
9844 /* Scan a single natural loop specified by LOOP collecting information
9845 about it specified by FLAGS. */
9846
9847 int
9848 flow_loop_scan (loops, loop, flags)
9849 struct loops *loops;
9850 struct loop *loop;
9851 int flags;
9852 {
9853 /* Determine prerequisites. */
9854 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9855 flags |= LOOP_EXIT_EDGES;
9856
9857 if (flags & LOOP_ENTRY_EDGES)
9858 {
9859 /* Find edges which enter the loop header.
9860 Note that the entry edges should only
9861 enter the header of a natural loop. */
9862 loop->num_entries
9863 = flow_loop_entry_edges_find (loop->header,
9864 loop->nodes,
9865 &loop->entry_edges);
9866 }
9867
9868 if (flags & LOOP_EXIT_EDGES)
9869 {
9870 /* Find edges which exit the loop. */
9871 loop->num_exits
9872 = flow_loop_exit_edges_find (loop->nodes,
9873 &loop->exit_edges);
9874 }
9875
9876 if (flags & LOOP_EXITS_DOMS)
9877 {
9878 int j;
9879
9880 /* Determine which loop nodes dominate all the exits
9881 of the loop. */
9882 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9883 sbitmap_copy (loop->exits_doms, loop->nodes);
9884 for (j = 0; j < loop->num_exits; j++)
9885 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9886 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9887
9888 /* The header of a natural loop must dominate
9889 all exits. */
9890 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9891 abort ();
9892 }
9893
9894 if (flags & LOOP_PRE_HEADER)
9895 {
9896 /* Look to see if the loop has a pre-header node. */
9897 loop->pre_header
9898 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9899
9900 /* Find the blocks within the extended basic block of
9901 the loop pre-header. */
9902 flow_loop_pre_header_scan (loop);
9903 }
9904 return 1;
9905 }
9906
9907
9908 /* Find all the natural loops in the function and save in LOOPS structure
9909 and recalculate loop_depth information in basic block structures.
9910 FLAGS controls which loop information is collected.
9911 Return the number of natural loops found. */
9912
9913 int
9914 flow_loops_find (loops, flags)
9915 struct loops *loops;
9916 int flags;
9917 {
9918 int i;
9919 int b;
9920 int num_loops;
9921 edge e;
9922 sbitmap headers;
9923 sbitmap *dom;
9924 int *dfs_order;
9925 int *rc_order;
9926
9927 /* This function cannot be repeatedly called with different
9928 flags to build up the loop information. The loop tree
9929 must always be built if this function is called. */
9930 if (! (flags & LOOP_TREE))
9931 abort ();
9932
9933 memset (loops, 0, sizeof (*loops));
9934
9935 /* Taking care of this degenerate case makes the rest of
9936 this code simpler. */
9937 if (n_basic_blocks == 0)
9938 return 0;
9939
9940 dfs_order = NULL;
9941 rc_order = NULL;
9942
9943 /* Compute the dominators. */
9944 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9945 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9946
9947 /* Count the number of loop edges (back edges). This should be the
9948 same as the number of natural loops. */
9949
9950 num_loops = 0;
9951 for (b = 0; b < n_basic_blocks; b++)
9952 {
9953 basic_block header;
9954
9955 header = BASIC_BLOCK (b);
9956 header->loop_depth = 0;
9957
9958 for (e = header->pred; e; e = e->pred_next)
9959 {
9960 basic_block latch = e->src;
9961
9962 /* Look for back edges where a predecessor is dominated
9963 by this block. A natural loop has a single entry
9964 node (header) that dominates all the nodes in the
9965 loop. It also has single back edge to the header
9966 from a latch node. Note that multiple natural loops
9967 may share the same header. */
9968 if (b != header->index)
9969 abort ();
9970
9971 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9972 num_loops++;
9973 }
9974 }
9975
9976 if (num_loops)
9977 {
9978 /* Compute depth first search order of the CFG so that outer
9979 natural loops will be found before inner natural loops. */
9980 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9981 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9982 flow_depth_first_order_compute (dfs_order, rc_order);
9983
9984 /* Save CFG derived information to avoid recomputing it. */
9985 loops->cfg.dom = dom;
9986 loops->cfg.dfs_order = dfs_order;
9987 loops->cfg.rc_order = rc_order;
9988
9989 /* Allocate loop structures. */
9990 loops->array
9991 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9992
9993 headers = sbitmap_alloc (n_basic_blocks);
9994 sbitmap_zero (headers);
9995
9996 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9997 sbitmap_zero (loops->shared_headers);
9998
9999 /* Find and record information about all the natural loops
10000 in the CFG. */
10001 num_loops = 0;
10002 for (b = 0; b < n_basic_blocks; b++)
10003 {
10004 basic_block header;
10005
10006 /* Search the nodes of the CFG in reverse completion order
10007 so that we can find outer loops first. */
10008 header = BASIC_BLOCK (rc_order[b]);
10009
10010 /* Look for all the possible latch blocks for this header. */
10011 for (e = header->pred; e; e = e->pred_next)
10012 {
10013 basic_block latch = e->src;
10014
10015 /* Look for back edges where a predecessor is dominated
10016 by this block. A natural loop has a single entry
10017 node (header) that dominates all the nodes in the
10018 loop. It also has single back edge to the header
10019 from a latch node. Note that multiple natural loops
10020 may share the same header. */
10021 if (latch != ENTRY_BLOCK_PTR
10022 && TEST_BIT (dom[latch->index], header->index))
10023 {
10024 struct loop *loop;
10025
10026 loop = loops->array + num_loops;
10027
10028 loop->header = header;
10029 loop->latch = latch;
10030 loop->num = num_loops;
10031
10032 num_loops++;
10033 }
10034 }
10035 }
10036
10037 for (i = 0; i < num_loops; i++)
10038 {
10039 struct loop *loop = &loops->array[i];
10040
10041 /* Keep track of blocks that are loop headers so
10042 that we can tell which loops should be merged. */
10043 if (TEST_BIT (headers, loop->header->index))
10044 SET_BIT (loops->shared_headers, loop->header->index);
10045 SET_BIT (headers, loop->header->index);
10046
10047 /* Find nodes contained within the loop. */
10048 loop->nodes = sbitmap_alloc (n_basic_blocks);
10049 loop->num_nodes
10050 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10051
10052 /* Compute first and last blocks within the loop.
10053 These are often the same as the loop header and
10054 loop latch respectively, but this is not always
10055 the case. */
10056 loop->first
10057 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10058 loop->last
10059 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10060
10061 flow_loop_scan (loops, loop, flags);
10062 }
10063
10064 /* Natural loops with shared headers may either be disjoint or
10065 nested. Disjoint loops with shared headers cannot be inner
10066 loops and should be merged. For now just mark loops that share
10067 headers. */
10068 for (i = 0; i < num_loops; i++)
10069 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10070 loops->array[i].shared = 1;
10071
10072 sbitmap_free (headers);
10073 }
10074 else
10075 {
10076 sbitmap_vector_free (dom);
10077 }
10078
10079 loops->num = num_loops;
10080
10081 /* Build the loop hierarchy tree. */
10082 flow_loops_tree_build (loops);
10083
10084 /* Assign the loop nesting depth and enclosed loop level for each
10085 loop. */
10086 loops->levels = flow_loops_level_compute (loops);
10087
10088 return num_loops;
10089 }
10090
10091
10092 /* Update the information regarding the loops in the CFG
10093 specified by LOOPS. */
10094 int
10095 flow_loops_update (loops, flags)
10096 struct loops *loops;
10097 int flags;
10098 {
10099 /* One day we may want to update the current loop data. For now
10100 throw away the old stuff and rebuild what we need. */
10101 if (loops->array)
10102 flow_loops_free (loops);
10103
10104 return flow_loops_find (loops, flags);
10105 }
10106
10107
10108 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10109
10110 int
10111 flow_loop_outside_edge_p (loop, e)
10112 const struct loop *loop;
10113 edge e;
10114 {
10115 if (e->dest != loop->header)
10116 abort ();
10117 return (e->src == ENTRY_BLOCK_PTR)
10118 || ! TEST_BIT (loop->nodes, e->src->index);
10119 }
10120
10121 /* Clear LOG_LINKS fields of insns in a chain.
10122 Also clear the global_live_at_{start,end} fields of the basic block
10123 structures. */
10124
10125 void
10126 clear_log_links (insns)
10127 rtx insns;
10128 {
10129 rtx i;
10130 int b;
10131
10132 for (i = insns; i; i = NEXT_INSN (i))
10133 if (INSN_P (i))
10134 LOG_LINKS (i) = 0;
10135
10136 for (b = 0; b < n_basic_blocks; b++)
10137 {
10138 basic_block bb = BASIC_BLOCK (b);
10139
10140 bb->global_live_at_start = NULL;
10141 bb->global_live_at_end = NULL;
10142 }
10143
10144 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10145 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10146 }
10147
10148 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10149 correspond to the hard registers, if any, set in that map. This
10150 could be done far more efficiently by having all sorts of special-cases
10151 with moving single words, but probably isn't worth the trouble. */
10152
10153 void
10154 reg_set_to_hard_reg_set (to, from)
10155 HARD_REG_SET *to;
10156 bitmap from;
10157 {
10158 int i;
10159
10160 EXECUTE_IF_SET_IN_BITMAP
10161 (from, 0, i,
10162 {
10163 if (i >= FIRST_PSEUDO_REGISTER)
10164 return;
10165 SET_HARD_REG_BIT (*to, i);
10166 });
10167 }
10168
10169 /* Called once at intialization time. */
10170
10171 void
10172 init_flow ()
10173 {
10174 static int initialized;
10175
10176 if (!initialized)
10177 {
10178 gcc_obstack_init (&flow_obstack);
10179 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10180 initialized = 1;
10181 }
10182 else
10183 {
10184 obstack_free (&flow_obstack, flow_firstobj);
10185 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10186 }
10187 }
10188
10189 /* Assume that the preceeding pass has possibly eliminated jump instructions
10190 or converted the unconditional jumps. Eliminate the edges from CFG. */
10191
10192 void
10193 purge_dead_edges (bb)
10194 basic_block bb;
10195 {
10196 edge e, next;
10197 rtx insn = bb->end;
10198 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10199 return;
10200 if (GET_CODE (insn) == JUMP_INSN)
10201 {
10202 int removed = 0;
10203 rtx note;
10204 edge b,f;
10205 /* We do care only about conditional jumps and simplejumps. */
10206 if (!any_condjump_p (insn)
10207 && !returnjump_p (insn)
10208 && !simplejump_p (insn))
10209 return;
10210 for (e = bb->succ; e; e = next)
10211 {
10212 next = e->succ_next;
10213
10214 /* Check purposes we can have edge. */
10215 if ((e->flags & EDGE_FALLTHRU)
10216 && any_condjump_p (insn))
10217 continue;
10218 if (e->dest != EXIT_BLOCK_PTR
10219 && e->dest->head == JUMP_LABEL (insn))
10220 continue;
10221 if (e->dest == EXIT_BLOCK_PTR
10222 && returnjump_p (insn))
10223 continue;
10224 removed = 1;
10225 remove_edge (e);
10226 }
10227 if (!bb->succ || !removed)
10228 return;
10229 if (rtl_dump_file)
10230 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10231 if (!optimize)
10232 return;
10233
10234 /* Redistribute probabilities. */
10235 if (!bb->succ->succ_next)
10236 {
10237 bb->succ->probability = REG_BR_PROB_BASE;
10238 bb->succ->count = bb->count;
10239 }
10240 else
10241 {
10242 note = find_reg_note (insn, REG_BR_PROB, NULL);
10243 if (!note)
10244 return;
10245 b = BRANCH_EDGE (bb);
10246 f = FALLTHRU_EDGE (bb);
10247 b->probability = INTVAL (XEXP (note, 0));
10248 f->probability = REG_BR_PROB_BASE - b->probability;
10249 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10250 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10251 }
10252 return;
10253 }
10254 /* If we don't see a jump insn, we don't know exactly why the block would
10255 have been broken at this point. Look for a simple, non-fallthru edge,
10256 as these are only created by conditional branches. If we find such an
10257 edge we know that there used to be a jump here and can then safely
10258 remove all non-fallthru edges. */
10259 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10260 e = e->succ_next);
10261 if (!e)
10262 return;
10263 for (e = bb->succ; e; e = next)
10264 {
10265 next = e->succ_next;
10266 if (!(e->flags & EDGE_FALLTHRU))
10267 remove_edge (e);
10268 }
10269 if (!bb->succ || bb->succ->succ_next)
10270 abort ();
10271 bb->succ->probability = REG_BR_PROB_BASE;
10272 bb->succ->count = bb->count;
10273
10274 if (rtl_dump_file)
10275 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10276 bb->index);
10277 return;
10278 }
10279
10280 /* Search all basic blocks for potentionally dead edges and purge them. */
10281
10282 void
10283 purge_all_dead_edges ()
10284 {
10285 int i;
10286 for (i = 0; i < n_basic_blocks; i++)
10287 purge_dead_edges (BASIC_BLOCK (i));
10288 }