calls.c (ECF_ALWAYS_RETURN): New constant.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
176
177 /* The obstack on which the flow graph components are allocated. */
178
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
181
182 /* Number of basic blocks in the current function. */
183
184 int n_basic_blocks;
185
186 /* Number of edges in the current function. */
187
188 int n_edges;
189
190 /* The basic block array. */
191
192 varray_type basic_block_info;
193
194 /* The special entry and exit blocks. */
195
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0 /* frequency */
212 },
213 {
214 NULL, /* head */
215 NULL, /* end */
216 NULL, /* head_tree */
217 NULL, /* end_tree */
218 NULL, /* pred */
219 NULL, /* succ */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
224 NULL, /* aux */
225 EXIT_BLOCK, /* index */
226 0, /* loop_depth */
227 0, /* count */
228 0 /* frequency */
229 }
230 };
231
232 /* Nonzero if the second flow pass has completed. */
233 int flow2_completed;
234
235 /* Maximum register number used in this function, plus one. */
236
237 int max_regno;
238
239 /* Indexed by n, giving various register information */
240
241 varray_type reg_n_info;
242
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
245
246 int regset_bytes;
247 int regset_size;
248
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
251
252 regset regs_live_at_setjmp;
253
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
258 rtx regs_may_share;
259
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
263
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
266
267 static HARD_REG_SET elim_reg_set;
268
269 /* The basic block structure for every insn, indexed by uid. */
270
271 varray_type basic_block_for_insn;
272
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
276
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
279
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
282 {
283 /* A boolean expression of conditions under which a register is dead. */
284 rtx condition;
285 /* Conditions under which a register is dead at the basic block end. */
286 rtx orig_condition;
287
288 /* A boolean expression of conditions under which a register has been
289 stored into. */
290 rtx stores;
291
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
294 };
295
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
298
299 struct propagate_block_info
300 {
301 /* The basic block we're considering. */
302 basic_block bb;
303
304 /* Bit N is set if register N is conditionally or unconditionally live. */
305 regset reg_live;
306
307 /* Bit N is set if register N is set this insn. */
308 regset new_set;
309
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
312 rtx *reg_next_use;
313
314 /* Contains a list of all the MEMs we are tracking for dead store
315 elimination. */
316 rtx mem_set_list;
317
318 /* If non-null, record the set of registers set unconditionally in the
319 basic block. */
320 regset local_set;
321
322 /* If non-null, record the set of registers set conditionally in the
323 basic block. */
324 regset cond_local_set;
325
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
330
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
332 regset reg_cond_reg;
333 #endif
334
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
337
338 /* Non-zero if the value of CC0 is live. */
339 int cc0_live;
340
341 /* Flags controling the set of information propagate_block collects. */
342 int flags;
343 };
344
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
348
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
352 basic_block *stack;
353
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
355 have edges. */
356 unsigned int sp;
357
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
360 };
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
362
363 /* Have print_rtl_and_abort give the same information that fancy_abort
364 does. */
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
367
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
373 rtx *, rtx *));
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
379 rtx, int));
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
381
382 static void commit_one_edge_insertion PARAMS ((edge));
383
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
390 basic_block));
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
392 basic_block));
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
394 int));
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((int, basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
413 rtx, int, rtx));
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, rtx));
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
420 rtx, int));
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
423 int, rtx));
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
427 int));
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
432 #endif
433 #ifdef AUTO_INC_DEC
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx));
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
439 rtx));
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
441 #endif
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
443 rtx, rtx, rtx));
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
449 const char *))
450 ATTRIBUTE_NORETURN;
451
452 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
453 rtx));
454 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
457 rtx));
458 static void remove_fake_successors PARAMS ((basic_block));
459 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
460 FILE *));
461 static void flow_edge_list_print PARAMS ((const char *, const edge *,
462 int, FILE *));
463 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
464 FILE *));
465 static int flow_loop_nested_p PARAMS ((struct loop *,
466 struct loop *));
467 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
468 edge **));
469 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
470 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
471 static void flow_dfs_compute_reverse_init
472 PARAMS ((depth_first_search_ds));
473 static void flow_dfs_compute_reverse_add_bb
474 PARAMS ((depth_first_search_ds, basic_block));
475 static basic_block flow_dfs_compute_reverse_execute
476 PARAMS ((depth_first_search_ds));
477 static void flow_dfs_compute_reverse_finish
478 PARAMS ((depth_first_search_ds));
479 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
480 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
481 const sbitmap *));
482 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
483 static void flow_loops_tree_build PARAMS ((struct loops *));
484 static int flow_loop_level_compute PARAMS ((struct loop *, int));
485 static int flow_loops_level_compute PARAMS ((struct loops *));
486 static void delete_dead_jumptables PARAMS ((void));
487 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
488 static bool need_fake_edge_p PARAMS ((rtx));
489 \f
490 /* Find basic blocks of the current function.
491 F is the first insn of the function and NREGS the number of register
492 numbers in use. */
493
494 void
495 find_basic_blocks (f, nregs, file)
496 rtx f;
497 int nregs ATTRIBUTE_UNUSED;
498 FILE *file ATTRIBUTE_UNUSED;
499 {
500 int max_uid;
501 timevar_push (TV_CFG);
502
503 /* Flush out existing data. */
504 if (basic_block_info != NULL)
505 {
506 int i;
507
508 clear_edges ();
509
510 /* Clear bb->aux on all extant basic blocks. We'll use this as a
511 tag for reuse during create_basic_block, just in case some pass
512 copies around basic block notes improperly. */
513 for (i = 0; i < n_basic_blocks; ++i)
514 BASIC_BLOCK (i)->aux = NULL;
515
516 VARRAY_FREE (basic_block_info);
517 }
518
519 n_basic_blocks = count_basic_blocks (f);
520
521 /* Size the basic block table. The actual structures will be allocated
522 by find_basic_blocks_1, since we want to keep the structure pointers
523 stable across calls to find_basic_blocks. */
524 /* ??? This whole issue would be much simpler if we called find_basic_blocks
525 exactly once, and thereafter we don't have a single long chain of
526 instructions at all until close to the end of compilation when we
527 actually lay them out. */
528
529 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
530
531 find_basic_blocks_1 (f);
532
533 /* Record the block to which an insn belongs. */
534 /* ??? This should be done another way, by which (perhaps) a label is
535 tagged directly with the basic block that it starts. It is used for
536 more than that currently, but IMO that is the only valid use. */
537
538 max_uid = get_max_uid ();
539 #ifdef AUTO_INC_DEC
540 /* Leave space for insns life_analysis makes in some cases for auto-inc.
541 These cases are rare, so we don't need too much space. */
542 max_uid += max_uid / 10;
543 #endif
544
545 compute_bb_for_insn (max_uid);
546
547 /* Discover the edges of our cfg. */
548 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
549
550 /* Do very simple cleanup now, for the benefit of code that runs between
551 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
552 tidy_fallthru_edges ();
553
554 mark_critical_edges ();
555
556 #ifdef ENABLE_CHECKING
557 verify_flow_info ();
558 #endif
559 timevar_pop (TV_CFG);
560 }
561
562 void
563 check_function_return_warnings ()
564 {
565 if (warn_missing_noreturn
566 && !TREE_THIS_VOLATILE (cfun->decl)
567 && EXIT_BLOCK_PTR->pred == NULL
568 && (lang_missing_noreturn_ok_p
569 && !lang_missing_noreturn_ok_p (cfun->decl)))
570 warning ("function might be possible candidate for attribute `noreturn'");
571
572 /* If we have a path to EXIT, then we do return. */
573 if (TREE_THIS_VOLATILE (cfun->decl)
574 && EXIT_BLOCK_PTR->pred != NULL)
575 warning ("`noreturn' function does return");
576
577 /* If the clobber_return_insn appears in some basic block, then we
578 do reach the end without returning a value. */
579 else if (warn_return_type
580 && cfun->x_clobber_return_insn != NULL
581 && EXIT_BLOCK_PTR->pred != NULL)
582 {
583 int max_uid = get_max_uid ();
584
585 /* If clobber_return_insn was excised by jump1, then renumber_insns
586 can make max_uid smaller than the number still recorded in our rtx.
587 That's fine, since this is a quick way of verifying that the insn
588 is no longer in the chain. */
589 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
590 {
591 /* Recompute insn->block mapping, since the initial mapping is
592 set before we delete unreachable blocks. */
593 compute_bb_for_insn (max_uid);
594
595 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
596 warning ("control reaches end of non-void function");
597 }
598 }
599 }
600
601 /* Count the basic blocks of the function. */
602
603 static int
604 count_basic_blocks (f)
605 rtx f;
606 {
607 register rtx insn;
608 register RTX_CODE prev_code;
609 register int count = 0;
610 int saw_abnormal_edge = 0;
611
612 prev_code = JUMP_INSN;
613 for (insn = f; insn; insn = NEXT_INSN (insn))
614 {
615 enum rtx_code code = GET_CODE (insn);
616
617 if (code == CODE_LABEL
618 || (GET_RTX_CLASS (code) == 'i'
619 && (prev_code == JUMP_INSN
620 || prev_code == BARRIER
621 || saw_abnormal_edge)))
622 {
623 saw_abnormal_edge = 0;
624 count++;
625 }
626
627 /* Record whether this insn created an edge. */
628 if (code == CALL_INSN)
629 {
630 rtx note;
631
632 /* If there is a nonlocal goto label and the specified
633 region number isn't -1, we have an edge. */
634 if (nonlocal_goto_handler_labels
635 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
636 || INTVAL (XEXP (note, 0)) >= 0))
637 saw_abnormal_edge = 1;
638
639 else if (can_throw_internal (insn))
640 saw_abnormal_edge = 1;
641 }
642 else if (flag_non_call_exceptions
643 && code == INSN
644 && can_throw_internal (insn))
645 saw_abnormal_edge = 1;
646
647 if (code != NOTE)
648 prev_code = code;
649 }
650
651 /* The rest of the compiler works a bit smoother when we don't have to
652 check for the edge case of do-nothing functions with no basic blocks. */
653 if (count == 0)
654 {
655 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
656 count = 1;
657 }
658
659 return count;
660 }
661
662 /* Scan a list of insns for labels referred to other than by jumps.
663 This is used to scan the alternatives of a call placeholder. */
664 static rtx
665 find_label_refs (f, lvl)
666 rtx f;
667 rtx lvl;
668 {
669 rtx insn;
670
671 for (insn = f; insn; insn = NEXT_INSN (insn))
672 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
673 {
674 rtx note;
675
676 /* Make a list of all labels referred to other than by jumps
677 (which just don't have the REG_LABEL notes).
678
679 Make a special exception for labels followed by an ADDR*VEC,
680 as this would be a part of the tablejump setup code.
681
682 Make a special exception to registers loaded with label
683 values just before jump insns that use them. */
684
685 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
686 if (REG_NOTE_KIND (note) == REG_LABEL)
687 {
688 rtx lab = XEXP (note, 0), next;
689
690 if ((next = next_nonnote_insn (lab)) != NULL
691 && GET_CODE (next) == JUMP_INSN
692 && (GET_CODE (PATTERN (next)) == ADDR_VEC
693 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
694 ;
695 else if (GET_CODE (lab) == NOTE)
696 ;
697 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
698 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
699 ;
700 else
701 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
702 }
703 }
704
705 return lvl;
706 }
707
708 /* Assume that someone emitted code with control flow instructions to the
709 basic block. Update the data structure. */
710 void
711 find_sub_basic_blocks (bb)
712 basic_block bb;
713 {
714 rtx insn = bb->head;
715 rtx end = bb->end;
716 rtx jump_insn = NULL_RTX;
717 edge falltru = 0;
718 basic_block first_bb = bb;
719 int i;
720
721 if (insn == bb->end)
722 return;
723
724 if (GET_CODE (insn) == CODE_LABEL)
725 insn = NEXT_INSN (insn);
726
727 /* Scan insn chain and try to find new basic block boundaries. */
728 while (1)
729 {
730 enum rtx_code code = GET_CODE (insn);
731 switch (code)
732 {
733 case BARRIER:
734 if (!jump_insn)
735 abort ();
736 break;
737 /* On code label, split current basic block. */
738 case CODE_LABEL:
739 falltru = split_block (bb, PREV_INSN (insn));
740 if (jump_insn)
741 bb->end = jump_insn;
742 bb = falltru->dest;
743 remove_edge (falltru);
744 jump_insn = 0;
745 if (LABEL_ALTERNATE_NAME (insn))
746 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
747 break;
748 case INSN:
749 case JUMP_INSN:
750 /* In case we've previously split insn on the JUMP_INSN, move the
751 block header to proper place. */
752 if (jump_insn)
753 {
754 falltru = split_block (bb, PREV_INSN (insn));
755 bb->end = jump_insn;
756 bb = falltru->dest;
757 remove_edge (falltru);
758 jump_insn = 0;
759 }
760 /* We need some special care for those expressions. */
761 if (GET_CODE (insn) == JUMP_INSN)
762 {
763 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
764 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
765 abort();
766 jump_insn = insn;
767 }
768 break;
769 default:
770 break;
771 }
772 if (insn == end)
773 break;
774 insn = NEXT_INSN (insn);
775 }
776
777 /* In case expander replaced normal insn by sequence terminating by
778 return and barrier, or possibly other sequence not behaving like
779 ordinary jump, we need to take care and move basic block boundary. */
780 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
781 bb->end = jump_insn;
782
783 /* We've possibly replaced the conditional jump by conditional jump
784 followed by cleanup at fallthru edge, so the outgoing edges may
785 be dead. */
786 purge_dead_edges (bb);
787
788 /* Now re-scan and wire in all edges. This expect simple (conditional)
789 jumps at the end of each new basic blocks. */
790 make_edges (NULL, first_bb->index, bb->index, 1);
791
792 /* Update branch probabilities. Expect only (un)conditional jumps
793 to be created with only the forward edges. */
794 for (i = first_bb->index; i <= bb->index; i++)
795 {
796 edge e,f;
797 basic_block b = BASIC_BLOCK (i);
798 if (b != first_bb)
799 {
800 b->count = 0;
801 b->frequency = 0;
802 for (e = b->pred; e; e=e->pred_next)
803 {
804 b->count += e->count;
805 b->frequency += EDGE_FREQUENCY (e);
806 }
807 }
808 if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
809 {
810 rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
811 int probability;
812
813 if (!note)
814 continue;
815 probability = INTVAL (XEXP (find_reg_note (b->end,
816 REG_BR_PROB,
817 NULL), 0));
818 e = BRANCH_EDGE (b);
819 e->probability = probability;
820 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
821 / REG_BR_PROB_BASE);
822 f = FALLTHRU_EDGE (b);
823 f->probability = REG_BR_PROB_BASE - probability;
824 f->count = b->count - e->count;
825 }
826 if (b->succ && !b->succ->succ_next)
827 {
828 e = b->succ;
829 e->probability = REG_BR_PROB_BASE;
830 e->count = b->count;
831 }
832 }
833 }
834
835 /* Find all basic blocks of the function whose first insn is F.
836
837 Collect and return a list of labels whose addresses are taken. This
838 will be used in make_edges for use with computed gotos. */
839
840 static void
841 find_basic_blocks_1 (f)
842 rtx f;
843 {
844 register rtx insn, next;
845 int i = 0;
846 rtx bb_note = NULL_RTX;
847 rtx lvl = NULL_RTX;
848 rtx trll = NULL_RTX;
849 rtx head = NULL_RTX;
850 rtx end = NULL_RTX;
851
852 /* We process the instructions in a slightly different way than we did
853 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
854 closed out the previous block, so that it gets attached at the proper
855 place. Since this form should be equivalent to the previous,
856 count_basic_blocks continues to use the old form as a check. */
857
858 for (insn = f; insn; insn = next)
859 {
860 enum rtx_code code = GET_CODE (insn);
861
862 next = NEXT_INSN (insn);
863
864 switch (code)
865 {
866 case NOTE:
867 {
868 int kind = NOTE_LINE_NUMBER (insn);
869
870 /* Look for basic block notes with which to keep the
871 basic_block_info pointers stable. Unthread the note now;
872 we'll put it back at the right place in create_basic_block.
873 Or not at all if we've already found a note in this block. */
874 if (kind == NOTE_INSN_BASIC_BLOCK)
875 {
876 if (bb_note == NULL_RTX)
877 bb_note = insn;
878 else
879 next = flow_delete_insn (insn);
880 }
881 break;
882 }
883
884 case CODE_LABEL:
885 /* A basic block starts at a label. If we've closed one off due
886 to a barrier or some such, no need to do it again. */
887 if (head != NULL_RTX)
888 {
889 create_basic_block (i++, head, end, bb_note);
890 bb_note = NULL_RTX;
891 }
892
893 head = end = insn;
894 break;
895
896 case JUMP_INSN:
897 /* A basic block ends at a jump. */
898 if (head == NULL_RTX)
899 head = insn;
900 else
901 {
902 /* ??? Make a special check for table jumps. The way this
903 happens is truly and amazingly gross. We are about to
904 create a basic block that contains just a code label and
905 an addr*vec jump insn. Worse, an addr_diff_vec creates
906 its own natural loop.
907
908 Prevent this bit of brain damage, pasting things together
909 correctly in make_edges.
910
911 The correct solution involves emitting the table directly
912 on the tablejump instruction as a note, or JUMP_LABEL. */
913
914 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
915 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
916 {
917 head = end = NULL;
918 n_basic_blocks--;
919 break;
920 }
921 }
922 end = insn;
923 goto new_bb_inclusive;
924
925 case BARRIER:
926 /* A basic block ends at a barrier. It may be that an unconditional
927 jump already closed the basic block -- no need to do it again. */
928 if (head == NULL_RTX)
929 break;
930 goto new_bb_exclusive;
931
932 case CALL_INSN:
933 {
934 /* Record whether this call created an edge. */
935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
936 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
937
938 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
939 {
940 /* Scan each of the alternatives for label refs. */
941 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
942 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
943 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
944 /* Record its tail recursion label, if any. */
945 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
946 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
947 }
948
949 /* A basic block ends at a call that can either throw or
950 do a non-local goto. */
951 if ((nonlocal_goto_handler_labels && region >= 0)
952 || can_throw_internal (insn))
953 {
954 new_bb_inclusive:
955 if (head == NULL_RTX)
956 head = insn;
957 end = insn;
958
959 new_bb_exclusive:
960 create_basic_block (i++, head, end, bb_note);
961 head = end = NULL_RTX;
962 bb_note = NULL_RTX;
963 break;
964 }
965 }
966 /* Fall through. */
967
968 case INSN:
969 /* Non-call exceptions generate new blocks just like calls. */
970 if (flag_non_call_exceptions && can_throw_internal (insn))
971 goto new_bb_inclusive;
972
973 if (head == NULL_RTX)
974 head = insn;
975 end = insn;
976 break;
977
978 default:
979 abort ();
980 }
981
982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
983 {
984 rtx note;
985
986 /* Make a list of all labels referred to other than by jumps.
987
988 Make a special exception for labels followed by an ADDR*VEC,
989 as this would be a part of the tablejump setup code.
990
991 Make a special exception to registers loaded with label
992 values just before jump insns that use them. */
993
994 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
995 if (REG_NOTE_KIND (note) == REG_LABEL)
996 {
997 rtx lab = XEXP (note, 0), next;
998
999 if ((next = next_nonnote_insn (lab)) != NULL
1000 && GET_CODE (next) == JUMP_INSN
1001 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1002 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1003 ;
1004 else if (GET_CODE (lab) == NOTE)
1005 ;
1006 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1007 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
1008 ;
1009 else
1010 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
1011 }
1012 }
1013 }
1014
1015 if (head != NULL_RTX)
1016 create_basic_block (i++, head, end, bb_note);
1017 else if (bb_note)
1018 flow_delete_insn (bb_note);
1019
1020 if (i != n_basic_blocks)
1021 abort ();
1022
1023 label_value_list = lvl;
1024 tail_recursion_label_list = trll;
1025 }
1026
1027 /* Tidy the CFG by deleting unreachable code and whatnot. */
1028
1029 void
1030 cleanup_cfg (mode)
1031 int mode;
1032 {
1033 timevar_push (TV_CLEANUP_CFG);
1034 delete_unreachable_blocks ();
1035 if (try_optimize_cfg (mode))
1036 delete_unreachable_blocks ();
1037 mark_critical_edges ();
1038
1039 /* Kill the data we won't maintain. */
1040 free_EXPR_LIST_list (&label_value_list);
1041 free_EXPR_LIST_list (&tail_recursion_label_list);
1042 timevar_pop (TV_CLEANUP_CFG);
1043 }
1044
1045 /* Create a new basic block consisting of the instructions between
1046 HEAD and END inclusive. Reuses the note and basic block struct
1047 in BB_NOTE, if any. */
1048
1049 void
1050 create_basic_block (index, head, end, bb_note)
1051 int index;
1052 rtx head, end, bb_note;
1053 {
1054 basic_block bb;
1055
1056 if (bb_note
1057 && ! RTX_INTEGRATED_P (bb_note)
1058 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1059 && bb->aux == NULL)
1060 {
1061 /* If we found an existing note, thread it back onto the chain. */
1062
1063 rtx after;
1064
1065 if (GET_CODE (head) == CODE_LABEL)
1066 after = head;
1067 else
1068 {
1069 after = PREV_INSN (head);
1070 head = bb_note;
1071 }
1072
1073 if (after != bb_note && NEXT_INSN (after) != bb_note)
1074 reorder_insns (bb_note, bb_note, after);
1075 }
1076 else
1077 {
1078 /* Otherwise we must create a note and a basic block structure.
1079 Since we allow basic block structs in rtl, give the struct
1080 the same lifetime by allocating it off the function obstack
1081 rather than using malloc. */
1082
1083 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1084 memset (bb, 0, sizeof (*bb));
1085
1086 if (GET_CODE (head) == CODE_LABEL)
1087 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1088 else
1089 {
1090 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1091 head = bb_note;
1092 }
1093 NOTE_BASIC_BLOCK (bb_note) = bb;
1094 }
1095
1096 /* Always include the bb note in the block. */
1097 if (NEXT_INSN (end) == bb_note)
1098 end = bb_note;
1099
1100 bb->head = head;
1101 bb->end = end;
1102 bb->index = index;
1103 BASIC_BLOCK (index) = bb;
1104
1105 /* Tag the block so that we know it has been used when considering
1106 other basic block notes. */
1107 bb->aux = bb;
1108 }
1109 \f
1110 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1111 note associated with the BLOCK. */
1112
1113 rtx
1114 first_insn_after_basic_block_note (block)
1115 basic_block block;
1116 {
1117 rtx insn;
1118
1119 /* Get the first instruction in the block. */
1120 insn = block->head;
1121
1122 if (insn == NULL_RTX)
1123 return NULL_RTX;
1124 if (GET_CODE (insn) == CODE_LABEL)
1125 insn = NEXT_INSN (insn);
1126 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1127 abort ();
1128
1129 return NEXT_INSN (insn);
1130 }
1131
1132 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1133 indexed by INSN_UID. MAX is the size of the array. */
1134
1135 void
1136 compute_bb_for_insn (max)
1137 int max;
1138 {
1139 int i;
1140
1141 if (basic_block_for_insn)
1142 VARRAY_FREE (basic_block_for_insn);
1143 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1144
1145 for (i = 0; i < n_basic_blocks; ++i)
1146 {
1147 basic_block bb = BASIC_BLOCK (i);
1148 rtx insn, end;
1149
1150 end = bb->end;
1151 insn = bb->head;
1152 while (1)
1153 {
1154 int uid = INSN_UID (insn);
1155 if (uid < max)
1156 VARRAY_BB (basic_block_for_insn, uid) = bb;
1157 if (insn == end)
1158 break;
1159 insn = NEXT_INSN (insn);
1160 }
1161 }
1162 }
1163
1164 /* Free the memory associated with the edge structures. */
1165
1166 void
1167 clear_edges ()
1168 {
1169 int i;
1170 edge n, e;
1171
1172 for (i = 0; i < n_basic_blocks; ++i)
1173 {
1174 basic_block bb = BASIC_BLOCK (i);
1175
1176 for (e = bb->succ; e; e = n)
1177 {
1178 n = e->succ_next;
1179 free (e);
1180 }
1181
1182 bb->succ = 0;
1183 bb->pred = 0;
1184 }
1185
1186 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1187 {
1188 n = e->succ_next;
1189 free (e);
1190 }
1191
1192 ENTRY_BLOCK_PTR->succ = 0;
1193 EXIT_BLOCK_PTR->pred = 0;
1194
1195 n_edges = 0;
1196 }
1197
1198 /* Identify the edges between basic blocks MIN to MAX.
1199
1200 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1201 that are otherwise unreachable may be reachable with a non-local goto.
1202
1203 BB_EH_END is an array indexed by basic block number in which we record
1204 the list of exception regions active at the end of the basic block. */
1205
1206 static void
1207 make_edges (label_value_list, min, max, update_p)
1208 rtx label_value_list;
1209 int min, max, update_p;
1210 {
1211 int i;
1212 sbitmap *edge_cache = NULL;
1213
1214 /* Assume no computed jump; revise as we create edges. */
1215 current_function_has_computed_jump = 0;
1216
1217 /* Heavy use of computed goto in machine-generated code can lead to
1218 nearly fully-connected CFGs. In that case we spend a significant
1219 amount of time searching the edge lists for duplicates. */
1220 if (forced_labels || label_value_list)
1221 {
1222 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1223 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1224
1225 if (update_p)
1226 for (i = min; i <= max; ++i)
1227 {
1228 edge e;
1229 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 SET_BIT (edge_cache[i], e->dest->index);
1232 }
1233 }
1234
1235 /* By nature of the way these get numbered, block 0 is always the entry. */
1236 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1237
1238 for (i = min; i <= max; ++i)
1239 {
1240 basic_block bb = BASIC_BLOCK (i);
1241 rtx insn, x;
1242 enum rtx_code code;
1243 int force_fallthru = 0;
1244
1245 if (GET_CODE (bb->head) == CODE_LABEL
1246 && LABEL_ALTERNATE_NAME (bb->head))
1247 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1248
1249 /* Examine the last instruction of the block, and discover the
1250 ways we can leave the block. */
1251
1252 insn = bb->end;
1253 code = GET_CODE (insn);
1254
1255 /* A branch. */
1256 if (code == JUMP_INSN)
1257 {
1258 rtx tmp;
1259
1260 /* Recognize exception handling placeholders. */
1261 if (GET_CODE (PATTERN (insn)) == RESX)
1262 make_eh_edge (edge_cache, bb, insn);
1263
1264 /* Recognize a non-local goto as a branch outside the
1265 current function. */
1266 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1267 ;
1268
1269 /* ??? Recognize a tablejump and do the right thing. */
1270 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1271 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1272 && GET_CODE (tmp) == JUMP_INSN
1273 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1274 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1275 {
1276 rtvec vec;
1277 int j;
1278
1279 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1280 vec = XVEC (PATTERN (tmp), 0);
1281 else
1282 vec = XVEC (PATTERN (tmp), 1);
1283
1284 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1285 make_label_edge (edge_cache, bb,
1286 XEXP (RTVEC_ELT (vec, j), 0), 0);
1287
1288 /* Some targets (eg, ARM) emit a conditional jump that also
1289 contains the out-of-range target. Scan for these and
1290 add an edge if necessary. */
1291 if ((tmp = single_set (insn)) != NULL
1292 && SET_DEST (tmp) == pc_rtx
1293 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1294 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1295 make_label_edge (edge_cache, bb,
1296 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1297
1298 #ifdef CASE_DROPS_THROUGH
1299 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1300 us naturally detecting fallthru into the next block. */
1301 force_fallthru = 1;
1302 #endif
1303 }
1304
1305 /* If this is a computed jump, then mark it as reaching
1306 everything on the label_value_list and forced_labels list. */
1307 else if (computed_jump_p (insn))
1308 {
1309 current_function_has_computed_jump = 1;
1310
1311 for (x = label_value_list; x; x = XEXP (x, 1))
1312 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1313
1314 for (x = forced_labels; x; x = XEXP (x, 1))
1315 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1316 }
1317
1318 /* Returns create an exit out. */
1319 else if (returnjump_p (insn))
1320 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1321
1322 /* Otherwise, we have a plain conditional or unconditional jump. */
1323 else
1324 {
1325 if (! JUMP_LABEL (insn))
1326 abort ();
1327 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1328 }
1329 }
1330
1331 /* If this is a sibling call insn, then this is in effect a
1332 combined call and return, and so we need an edge to the
1333 exit block. No need to worry about EH edges, since we
1334 wouldn't have created the sibling call in the first place. */
1335
1336 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1337 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1338 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1339
1340 /* If this is a CALL_INSN, then mark it as reaching the active EH
1341 handler for this CALL_INSN. If we're handling non-call
1342 exceptions then any insn can reach any of the active handlers.
1343
1344 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1345
1346 else if (code == CALL_INSN || flag_non_call_exceptions)
1347 {
1348 /* Add any appropriate EH edges. */
1349 make_eh_edge (edge_cache, bb, insn);
1350
1351 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1352 {
1353 /* ??? This could be made smarter: in some cases it's possible
1354 to tell that certain calls will not do a nonlocal goto.
1355
1356 For example, if the nested functions that do the nonlocal
1357 gotos do not have their addresses taken, then only calls to
1358 those functions or to other nested functions that use them
1359 could possibly do nonlocal gotos. */
1360 /* We do know that a REG_EH_REGION note with a value less
1361 than 0 is guaranteed not to perform a non-local goto. */
1362 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1363 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1364 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1365 make_label_edge (edge_cache, bb, XEXP (x, 0),
1366 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1367 }
1368 }
1369
1370 /* Find out if we can drop through to the next block. */
1371 insn = next_nonnote_insn (insn);
1372 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1373 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1374 else if (i + 1 < n_basic_blocks)
1375 {
1376 rtx tmp = BLOCK_HEAD (i + 1);
1377 if (GET_CODE (tmp) == NOTE)
1378 tmp = next_nonnote_insn (tmp);
1379 if (force_fallthru || insn == tmp)
1380 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1381 }
1382 }
1383
1384 if (edge_cache)
1385 sbitmap_vector_free (edge_cache);
1386 }
1387
1388 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1389 about the edge that is accumulated between calls. */
1390
1391 void
1392 make_edge (edge_cache, src, dst, flags)
1393 sbitmap *edge_cache;
1394 basic_block src, dst;
1395 int flags;
1396 {
1397 int use_edge_cache;
1398 edge e;
1399
1400 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1401 many edges to them, and we didn't allocate memory for it. */
1402 use_edge_cache = (edge_cache
1403 && src != ENTRY_BLOCK_PTR
1404 && dst != EXIT_BLOCK_PTR);
1405
1406 /* Make sure we don't add duplicate edges. */
1407 switch (use_edge_cache)
1408 {
1409 default:
1410 /* Quick test for non-existance of the edge. */
1411 if (! TEST_BIT (edge_cache[src->index], dst->index))
1412 break;
1413
1414 /* The edge exists; early exit if no work to do. */
1415 if (flags == 0)
1416 return;
1417
1418 /* FALLTHRU */
1419 case 0:
1420 for (e = src->succ; e; e = e->succ_next)
1421 if (e->dest == dst)
1422 {
1423 e->flags |= flags;
1424 return;
1425 }
1426 break;
1427 }
1428
1429 e = (edge) xcalloc (1, sizeof (*e));
1430 n_edges++;
1431
1432 e->succ_next = src->succ;
1433 e->pred_next = dst->pred;
1434 e->src = src;
1435 e->dest = dst;
1436 e->flags = flags;
1437
1438 src->succ = e;
1439 dst->pred = e;
1440
1441 if (use_edge_cache)
1442 SET_BIT (edge_cache[src->index], dst->index);
1443 }
1444
1445 /* Create an edge from a basic block to a label. */
1446
1447 static void
1448 make_label_edge (edge_cache, src, label, flags)
1449 sbitmap *edge_cache;
1450 basic_block src;
1451 rtx label;
1452 int flags;
1453 {
1454 if (GET_CODE (label) != CODE_LABEL)
1455 abort ();
1456
1457 /* If the label was never emitted, this insn is junk, but avoid a
1458 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1459 as a result of a syntax error and a diagnostic has already been
1460 printed. */
1461
1462 if (INSN_UID (label) == 0)
1463 return;
1464
1465 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1466 }
1467
1468 /* Create the edges generated by INSN in REGION. */
1469
1470 static void
1471 make_eh_edge (edge_cache, src, insn)
1472 sbitmap *edge_cache;
1473 basic_block src;
1474 rtx insn;
1475 {
1476 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1477 rtx handlers, i;
1478
1479 handlers = reachable_handlers (insn);
1480
1481 for (i = handlers; i; i = XEXP (i, 1))
1482 make_label_edge (edge_cache, src, XEXP (i, 0),
1483 EDGE_ABNORMAL | EDGE_EH | is_call);
1484
1485 free_INSN_LIST_list (&handlers);
1486 }
1487
1488 /* Identify critical edges and set the bits appropriately. */
1489
1490 void
1491 mark_critical_edges ()
1492 {
1493 int i, n = n_basic_blocks;
1494 basic_block bb;
1495
1496 /* We begin with the entry block. This is not terribly important now,
1497 but could be if a front end (Fortran) implemented alternate entry
1498 points. */
1499 bb = ENTRY_BLOCK_PTR;
1500 i = -1;
1501
1502 while (1)
1503 {
1504 edge e;
1505
1506 /* (1) Critical edges must have a source with multiple successors. */
1507 if (bb->succ && bb->succ->succ_next)
1508 {
1509 for (e = bb->succ; e; e = e->succ_next)
1510 {
1511 /* (2) Critical edges must have a destination with multiple
1512 predecessors. Note that we know there is at least one
1513 predecessor -- the edge we followed to get here. */
1514 if (e->dest->pred->pred_next)
1515 e->flags |= EDGE_CRITICAL;
1516 else
1517 e->flags &= ~EDGE_CRITICAL;
1518 }
1519 }
1520 else
1521 {
1522 for (e = bb->succ; e; e = e->succ_next)
1523 e->flags &= ~EDGE_CRITICAL;
1524 }
1525
1526 if (++i >= n)
1527 break;
1528 bb = BASIC_BLOCK (i);
1529 }
1530 }
1531 \f
1532 /* Mark the back edges in DFS traversal.
1533 Return non-zero if a loop (natural or otherwise) is present.
1534 Inspired by Depth_First_Search_PP described in:
1535
1536 Advanced Compiler Design and Implementation
1537 Steven Muchnick
1538 Morgan Kaufmann, 1997
1539
1540 and heavily borrowed from flow_depth_first_order_compute. */
1541
1542 bool
1543 mark_dfs_back_edges ()
1544 {
1545 edge *stack;
1546 int *pre;
1547 int *post;
1548 int sp;
1549 int prenum = 1;
1550 int postnum = 1;
1551 sbitmap visited;
1552 bool found = false;
1553
1554 /* Allocate the preorder and postorder number arrays. */
1555 pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
1556 post = (int *) xcalloc (n_basic_blocks, sizeof (int));
1557
1558 /* Allocate stack for back-tracking up CFG. */
1559 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
1560 sp = 0;
1561
1562 /* Allocate bitmap to track nodes that have been visited. */
1563 visited = sbitmap_alloc (n_basic_blocks);
1564
1565 /* None of the nodes in the CFG have been visited yet. */
1566 sbitmap_zero (visited);
1567
1568 /* Push the first edge on to the stack. */
1569 stack[sp++] = ENTRY_BLOCK_PTR->succ;
1570
1571 while (sp)
1572 {
1573 edge e;
1574 basic_block src;
1575 basic_block dest;
1576
1577 /* Look at the edge on the top of the stack. */
1578 e = stack[sp - 1];
1579 src = e->src;
1580 dest = e->dest;
1581 e->flags &= ~EDGE_DFS_BACK;
1582
1583 /* Check if the edge destination has been visited yet. */
1584 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
1585 {
1586 /* Mark that we have visited the destination. */
1587 SET_BIT (visited, dest->index);
1588
1589 pre[dest->index] = prenum++;
1590
1591 if (dest->succ)
1592 {
1593 /* Since the DEST node has been visited for the first
1594 time, check its successors. */
1595 stack[sp++] = dest->succ;
1596 }
1597 else
1598 post[dest->index] = postnum++;
1599 }
1600 else
1601 {
1602 if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
1603 && pre[src->index] >= pre[dest->index]
1604 && post[dest->index] == 0)
1605 e->flags |= EDGE_DFS_BACK, found = true;
1606
1607 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
1608 post[src->index] = postnum++;
1609
1610 if (e->succ_next)
1611 stack[sp - 1] = e->succ_next;
1612 else
1613 sp--;
1614 }
1615 }
1616
1617 free (pre);
1618 free (post);
1619 free (stack);
1620 sbitmap_free (visited);
1621
1622 return found;
1623 }
1624 \f
1625 /* Split a block BB after insn INSN creating a new fallthru edge.
1626 Return the new edge. Note that to keep other parts of the compiler happy,
1627 this function renumbers all the basic blocks so that the new
1628 one has a number one greater than the block split. */
1629
1630 edge
1631 split_block (bb, insn)
1632 basic_block bb;
1633 rtx insn;
1634 {
1635 basic_block new_bb;
1636 edge new_edge;
1637 edge e;
1638 rtx bb_note;
1639 int i, j;
1640
1641 /* There is no point splitting the block after its end. */
1642 if (bb->end == insn)
1643 return 0;
1644
1645 /* Create the new structures. */
1646 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1647 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1648 n_edges++;
1649
1650 memset (new_bb, 0, sizeof (*new_bb));
1651
1652 new_bb->head = NEXT_INSN (insn);
1653 new_bb->end = bb->end;
1654 bb->end = insn;
1655
1656 new_bb->succ = bb->succ;
1657 bb->succ = new_edge;
1658 new_bb->pred = new_edge;
1659 new_bb->count = bb->count;
1660 new_bb->frequency = bb->frequency;
1661 new_bb->loop_depth = bb->loop_depth;
1662
1663 new_edge->src = bb;
1664 new_edge->dest = new_bb;
1665 new_edge->flags = EDGE_FALLTHRU;
1666 new_edge->probability = REG_BR_PROB_BASE;
1667 new_edge->count = bb->count;
1668
1669 /* Redirect the src of the successor edges of bb to point to new_bb. */
1670 for (e = new_bb->succ; e; e = e->succ_next)
1671 e->src = new_bb;
1672
1673 /* Place the new block just after the block being split. */
1674 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1675
1676 /* Some parts of the compiler expect blocks to be number in
1677 sequential order so insert the new block immediately after the
1678 block being split.. */
1679 j = bb->index;
1680 for (i = n_basic_blocks - 1; i > j + 1; --i)
1681 {
1682 basic_block tmp = BASIC_BLOCK (i - 1);
1683 BASIC_BLOCK (i) = tmp;
1684 tmp->index = i;
1685 }
1686
1687 BASIC_BLOCK (i) = new_bb;
1688 new_bb->index = i;
1689
1690 if (GET_CODE (new_bb->head) == CODE_LABEL)
1691 {
1692 /* Create the basic block note. */
1693 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1694 new_bb->head);
1695 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1696
1697 /* If the only thing in this new block was the label, make sure
1698 the block note gets included. */
1699 if (new_bb->head == new_bb->end)
1700 new_bb->end = bb_note;
1701 }
1702 else
1703 {
1704 /* Create the basic block note. */
1705 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1706 new_bb->head);
1707 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1708 new_bb->head = bb_note;
1709 }
1710
1711 update_bb_for_insn (new_bb);
1712
1713 if (bb->global_live_at_start)
1714 {
1715 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1716 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1717 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1718
1719 /* We now have to calculate which registers are live at the end
1720 of the split basic block and at the start of the new basic
1721 block. Start with those registers that are known to be live
1722 at the end of the original basic block and get
1723 propagate_block to determine which registers are live. */
1724 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1725 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1726 COPY_REG_SET (bb->global_live_at_end,
1727 new_bb->global_live_at_start);
1728 }
1729
1730 return new_edge;
1731 }
1732
1733 /* Return label in the head of basic block. Create one if it doesn't exist. */
1734 rtx
1735 block_label (block)
1736 basic_block block;
1737 {
1738 if (block == EXIT_BLOCK_PTR)
1739 return NULL_RTX;
1740 if (GET_CODE (block->head) != CODE_LABEL)
1741 {
1742 block->head = emit_label_before (gen_label_rtx (), block->head);
1743 if (basic_block_for_insn)
1744 set_block_for_insn (block->head, block);
1745 }
1746 return block->head;
1747 }
1748
1749 /* Return true if the block has no effect and only forwards control flow to
1750 its single destination. */
1751 bool
1752 forwarder_block_p (bb)
1753 basic_block bb;
1754 {
1755 rtx insn = bb->head;
1756 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1757 || !bb->succ || bb->succ->succ_next)
1758 return false;
1759
1760 while (insn != bb->end)
1761 {
1762 if (active_insn_p (insn))
1763 return false;
1764 insn = NEXT_INSN (insn);
1765 }
1766 return (!active_insn_p (insn)
1767 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1768 }
1769
1770 /* Return nonzero if we can reach target from src by falling trought. */
1771 static bool
1772 can_fallthru (src, target)
1773 basic_block src, target;
1774 {
1775 rtx insn = src->end;
1776 rtx insn2 = target->head;
1777
1778 if (src->index + 1 == target->index && !active_insn_p (insn2))
1779 insn2 = next_active_insn (insn2);
1780 /* ??? Later we may add code to move jump tables offline. */
1781 return next_active_insn (insn) == insn2;
1782 }
1783
1784 /* Attempt to perform edge redirection by replacing possibly complex jump
1785 instruction by unconditional jump or removing jump completely.
1786 This can apply only if all edges now point to the same block.
1787
1788 The parameters and return values are equivalent to redirect_edge_and_branch.
1789 */
1790 static bool
1791 try_redirect_by_replacing_jump (e, target)
1792 edge e;
1793 basic_block target;
1794 {
1795 basic_block src = e->src;
1796 rtx insn = src->end, kill_from;
1797 edge tmp;
1798 rtx set;
1799 int fallthru = 0;
1800
1801 /* Verify that all targets will be TARGET. */
1802 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1803 if (tmp->dest != target && tmp != e)
1804 break;
1805 if (tmp || !onlyjump_p (insn))
1806 return false;
1807
1808 /* Avoid removing branch with side effects. */
1809 set = single_set (insn);
1810 if (!set || side_effects_p (set))
1811 return false;
1812
1813 /* In case we zap a conditional jump, we'll need to kill
1814 the cc0 setter too. */
1815 kill_from = insn;
1816 #ifdef HAVE_cc0
1817 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1818 kill_from = PREV_INSN (insn);
1819 #endif
1820
1821 /* See if we can create the fallthru edge. */
1822 if (can_fallthru (src, target))
1823 {
1824 src->end = PREV_INSN (kill_from);
1825 if (rtl_dump_file)
1826 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1827 fallthru = 1;
1828
1829 /* Selectivly unlink whole insn chain. */
1830 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1831 }
1832 /* If this already is simplejump, redirect it. */
1833 else if (simplejump_p (insn))
1834 {
1835 if (e->dest == target)
1836 return false;
1837 if (rtl_dump_file)
1838 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1839 INSN_UID (insn), e->dest->index, target->index);
1840 redirect_jump (insn, block_label (target), 0);
1841 }
1842 /* Or replace possibly complicated jump insn by simple jump insn. */
1843 else
1844 {
1845 rtx target_label = block_label (target);
1846 rtx barrier;
1847
1848 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1849 JUMP_LABEL (src->end) = target_label;
1850 LABEL_NUSES (target_label)++;
1851 if (basic_block_for_insn)
1852 set_block_for_new_insns (src->end, src);
1853 if (rtl_dump_file)
1854 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1855 INSN_UID (insn), INSN_UID (src->end));
1856
1857 flow_delete_insn_chain (kill_from, insn);
1858
1859 barrier = next_nonnote_insn (src->end);
1860 if (!barrier || GET_CODE (barrier) != BARRIER)
1861 emit_barrier_after (src->end);
1862 }
1863
1864 /* Keep only one edge out and set proper flags. */
1865 while (src->succ->succ_next)
1866 remove_edge (src->succ);
1867 e = src->succ;
1868 if (fallthru)
1869 e->flags = EDGE_FALLTHRU;
1870 else
1871 e->flags = 0;
1872 e->probability = REG_BR_PROB_BASE;
1873 e->count = src->count;
1874
1875 /* We don't want a block to end on a line-number note since that has
1876 the potential of changing the code between -g and not -g. */
1877 while (GET_CODE (e->src->end) == NOTE
1878 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1879 {
1880 rtx prev = PREV_INSN (e->src->end);
1881 flow_delete_insn (e->src->end);
1882 e->src->end = prev;
1883 }
1884
1885 if (e->dest != target)
1886 redirect_edge_succ (e, target);
1887 return true;
1888 }
1889
1890 /* Return last loop_beg note appearing after INSN, before start of next
1891 basic block. Return INSN if there are no such notes.
1892
1893 When emmiting jump to redirect an fallthru edge, it should always
1894 appear after the LOOP_BEG notes, as loop optimizer expect loop to
1895 eighter start by fallthru edge or jump following the LOOP_BEG note
1896 jumping to the loop exit test. */
1897 rtx
1898 last_loop_beg_note (insn)
1899 rtx insn;
1900 {
1901 rtx last = insn;
1902 insn = NEXT_INSN (insn);
1903 while (GET_CODE (insn) == NOTE
1904 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
1905 {
1906 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1907 last = insn;
1908 insn = NEXT_INSN (insn);
1909 }
1910 return last;
1911 }
1912
1913 /* Attempt to change code to redirect edge E to TARGET.
1914 Don't do that on expense of adding new instructions or reordering
1915 basic blocks.
1916
1917 Function can be also called with edge destionation equivalent to the
1918 TARGET. Then it should try the simplifications and do nothing if
1919 none is possible.
1920
1921 Return true if transformation suceeded. We still return flase in case
1922 E already destinated TARGET and we didn't managed to simplify instruction
1923 stream. */
1924 bool
1925 redirect_edge_and_branch (e, target)
1926 edge e;
1927 basic_block target;
1928 {
1929 rtx tmp;
1930 rtx old_label = e->dest->head;
1931 basic_block src = e->src;
1932 rtx insn = src->end;
1933
1934 if (e->flags & EDGE_COMPLEX)
1935 return false;
1936
1937 if (try_redirect_by_replacing_jump (e, target))
1938 return true;
1939 /* Do this fast path late, as we want above code to simplify for cases
1940 where called on single edge leaving basic block containing nontrivial
1941 jump insn. */
1942 else if (e->dest == target)
1943 return false;
1944
1945 /* We can only redirect non-fallthru edges of jump insn. */
1946 if (e->flags & EDGE_FALLTHRU)
1947 return false;
1948 if (GET_CODE (insn) != JUMP_INSN)
1949 return false;
1950
1951 /* Recognize a tablejump and adjust all matching cases. */
1952 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1953 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1954 && GET_CODE (tmp) == JUMP_INSN
1955 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1956 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1957 {
1958 rtvec vec;
1959 int j;
1960 rtx new_label = block_label (target);
1961
1962 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1963 vec = XVEC (PATTERN (tmp), 0);
1964 else
1965 vec = XVEC (PATTERN (tmp), 1);
1966
1967 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1968 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1969 {
1970 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1971 --LABEL_NUSES (old_label);
1972 ++LABEL_NUSES (new_label);
1973 }
1974
1975 /* Handle casesi dispatch insns */
1976 if ((tmp = single_set (insn)) != NULL
1977 && SET_DEST (tmp) == pc_rtx
1978 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1979 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1980 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1981 {
1982 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1983 new_label);
1984 --LABEL_NUSES (old_label);
1985 ++LABEL_NUSES (new_label);
1986 }
1987 }
1988 else
1989 {
1990 /* ?? We may play the games with moving the named labels from
1991 one basic block to the other in case only one computed_jump is
1992 available. */
1993 if (computed_jump_p (insn))
1994 return false;
1995
1996 /* A return instruction can't be redirected. */
1997 if (returnjump_p (insn))
1998 return false;
1999
2000 /* If the insn doesn't go where we think, we're confused. */
2001 if (JUMP_LABEL (insn) != old_label)
2002 abort ();
2003 redirect_jump (insn, block_label (target), 0);
2004 }
2005
2006 if (rtl_dump_file)
2007 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
2008 e->src->index, e->dest->index, target->index);
2009 if (e->dest != target)
2010 redirect_edge_succ_nodup (e, target);
2011 return true;
2012 }
2013
2014 /* Redirect edge even at the expense of creating new jump insn or
2015 basic block. Return new basic block if created, NULL otherwise.
2016 Abort if converison is impossible. */
2017 basic_block
2018 redirect_edge_and_branch_force (e, target)
2019 edge e;
2020 basic_block target;
2021 {
2022 basic_block new_bb;
2023 edge new_edge;
2024 rtx label;
2025 rtx bb_note;
2026 int i, j;
2027
2028 if (redirect_edge_and_branch (e, target))
2029 return NULL;
2030 if (e->dest == target)
2031 return NULL;
2032 if (e->flags & EDGE_ABNORMAL)
2033 abort ();
2034 if (!(e->flags & EDGE_FALLTHRU))
2035 abort ();
2036
2037 e->flags &= ~EDGE_FALLTHRU;
2038 label = block_label (target);
2039 /* Case of the fallthru block. */
2040 if (!e->src->succ->succ_next)
2041 {
2042 e->src->end = emit_jump_insn_after (gen_jump (label),
2043 last_loop_beg_note (e->src->end));
2044 JUMP_LABEL (e->src->end) = label;
2045 LABEL_NUSES (label)++;
2046 if (basic_block_for_insn)
2047 set_block_for_new_insns (e->src->end, e->src);
2048 emit_barrier_after (e->src->end);
2049 if (rtl_dump_file)
2050 fprintf (rtl_dump_file,
2051 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
2052 INSN_UID (e->src->end), e->src->index, e->dest->index,
2053 target->index);
2054 redirect_edge_succ (e, target);
2055 return NULL;
2056 }
2057 /* Redirecting fallthru edge of the conditional needs extra work. */
2058
2059 if (rtl_dump_file)
2060 fprintf (rtl_dump_file,
2061 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
2062 INSN_UID (e->src->end), e->src->index, e->dest->index,
2063 target->index);
2064
2065 /* Create the new structures. */
2066 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
2067 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
2068 n_edges++;
2069
2070 memset (new_bb, 0, sizeof (*new_bb));
2071
2072 new_bb->end = new_bb->head = last_loop_beg_note (e->src->end);
2073 new_bb->succ = NULL;
2074 new_bb->pred = new_edge;
2075 new_bb->count = e->count;
2076 new_bb->frequency = EDGE_FREQUENCY (e);
2077 new_bb->loop_depth = e->dest->loop_depth;
2078
2079 new_edge->flags = EDGE_FALLTHRU;
2080 new_edge->probability = e->probability;
2081 new_edge->count = e->count;
2082
2083 if (target->global_live_at_start)
2084 {
2085 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2086 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2087 COPY_REG_SET (new_bb->global_live_at_start,
2088 target->global_live_at_start);
2089 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
2090 }
2091
2092 /* Wire edge in. */
2093 new_edge->src = e->src;
2094 new_edge->dest = new_bb;
2095 new_edge->succ_next = e->src->succ;
2096 e->src->succ = new_edge;
2097 new_edge->pred_next = NULL;
2098
2099 /* Redirect old edge. */
2100 redirect_edge_succ (e, target);
2101 redirect_edge_pred (e, new_bb);
2102 e->probability = REG_BR_PROB_BASE;
2103
2104 /* Place the new block just after the block being split. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2106
2107 /* Some parts of the compiler expect blocks to be number in
2108 sequential order so insert the new block immediately after the
2109 block being split.. */
2110 j = new_edge->src->index;
2111 for (i = n_basic_blocks - 1; i > j + 1; --i)
2112 {
2113 basic_block tmp = BASIC_BLOCK (i - 1);
2114 BASIC_BLOCK (i) = tmp;
2115 tmp->index = i;
2116 }
2117
2118 BASIC_BLOCK (i) = new_bb;
2119 new_bb->index = i;
2120
2121 /* Create the basic block note. */
2122 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
2123 NOTE_BASIC_BLOCK (bb_note) = new_bb;
2124 new_bb->head = bb_note;
2125
2126 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
2127 JUMP_LABEL (new_bb->end) = label;
2128 LABEL_NUSES (label)++;
2129 if (basic_block_for_insn)
2130 set_block_for_new_insns (new_bb->end, new_bb);
2131 emit_barrier_after (new_bb->end);
2132 return new_bb;
2133 }
2134
2135 /* Helper function for split_edge. Return true in case edge BB2 to BB1
2136 is back edge of syntactic loop. */
2137 static bool
2138 back_edge_of_syntactic_loop_p (bb1, bb2)
2139 basic_block bb1, bb2;
2140 {
2141 rtx insn;
2142 int count = 0;
2143
2144 if (bb1->index > bb2->index)
2145 return false;
2146
2147 if (bb1->index == bb2->index)
2148 return true;
2149
2150 for (insn = bb1->end; insn != bb2->head && count >= 0;
2151 insn = NEXT_INSN (insn))
2152 if (GET_CODE (insn) == NOTE)
2153 {
2154 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2155 count++;
2156 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2157 count--;
2158 }
2159
2160 return count >= 0;
2161 }
2162
2163 /* Split a (typically critical) edge. Return the new block.
2164 Abort on abnormal edges.
2165
2166 ??? The code generally expects to be called on critical edges.
2167 The case of a block ending in an unconditional jump to a
2168 block with multiple predecessors is not handled optimally. */
2169
2170 basic_block
2171 split_edge (edge_in)
2172 edge edge_in;
2173 {
2174 basic_block old_pred, bb, old_succ;
2175 edge edge_out;
2176 rtx bb_note;
2177 int i, j;
2178
2179 /* Abnormal edges cannot be split. */
2180 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2181 abort ();
2182
2183 old_pred = edge_in->src;
2184 old_succ = edge_in->dest;
2185
2186 /* Create the new structures. */
2187 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2188 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2189 n_edges++;
2190
2191 memset (bb, 0, sizeof (*bb));
2192
2193 /* ??? This info is likely going to be out of date very soon. */
2194 if (old_succ->global_live_at_start)
2195 {
2196 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2197 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2198 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2199 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2200 }
2201
2202 /* Wire them up. */
2203 bb->succ = edge_out;
2204 bb->count = edge_in->count;
2205 bb->frequency = EDGE_FREQUENCY (edge_in);
2206
2207 edge_in->flags &= ~EDGE_CRITICAL;
2208
2209 edge_out->pred_next = old_succ->pred;
2210 edge_out->succ_next = NULL;
2211 edge_out->src = bb;
2212 edge_out->dest = old_succ;
2213 edge_out->flags = EDGE_FALLTHRU;
2214 edge_out->probability = REG_BR_PROB_BASE;
2215 edge_out->count = edge_in->count;
2216
2217 old_succ->pred = edge_out;
2218
2219 /* Tricky case -- if there existed a fallthru into the successor
2220 (and we're not it) we must add a new unconditional jump around
2221 the new block we're actually interested in.
2222
2223 Further, if that edge is critical, this means a second new basic
2224 block must be created to hold it. In order to simplify correct
2225 insn placement, do this before we touch the existing basic block
2226 ordering for the block we were really wanting. */
2227 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2228 {
2229 edge e;
2230 for (e = edge_out->pred_next; e; e = e->pred_next)
2231 if (e->flags & EDGE_FALLTHRU)
2232 break;
2233
2234 if (e)
2235 {
2236 basic_block jump_block;
2237 rtx pos;
2238
2239 if ((e->flags & EDGE_CRITICAL) == 0
2240 && e->src != ENTRY_BLOCK_PTR)
2241 {
2242 /* Non critical -- we can simply add a jump to the end
2243 of the existing predecessor. */
2244 jump_block = e->src;
2245 }
2246 else
2247 {
2248 /* We need a new block to hold the jump. The simplest
2249 way to do the bulk of the work here is to recursively
2250 call ourselves. */
2251 jump_block = split_edge (e);
2252 e = jump_block->succ;
2253 }
2254
2255 /* Now add the jump insn ... */
2256 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2257 last_loop_beg_note (jump_block->end));
2258 jump_block->end = pos;
2259 if (basic_block_for_insn)
2260 set_block_for_new_insns (pos, jump_block);
2261 emit_barrier_after (pos);
2262
2263 /* ... let jump know that label is in use, ... */
2264 JUMP_LABEL (pos) = old_succ->head;
2265 ++LABEL_NUSES (old_succ->head);
2266
2267 /* ... and clear fallthru on the outgoing edge. */
2268 e->flags &= ~EDGE_FALLTHRU;
2269
2270 /* Continue splitting the interesting edge. */
2271 }
2272 }
2273
2274 /* Place the new block just in front of the successor. */
2275 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2276 if (old_succ == EXIT_BLOCK_PTR)
2277 j = n_basic_blocks - 1;
2278 else
2279 j = old_succ->index;
2280 for (i = n_basic_blocks - 1; i > j; --i)
2281 {
2282 basic_block tmp = BASIC_BLOCK (i - 1);
2283 BASIC_BLOCK (i) = tmp;
2284 tmp->index = i;
2285 }
2286 BASIC_BLOCK (i) = bb;
2287 bb->index = i;
2288
2289 /* Create the basic block note.
2290
2291 Where we place the note can have a noticable impact on the generated
2292 code. Consider this cfg:
2293
2294 E
2295 |
2296 0
2297 / \
2298 +->1-->2--->E
2299 | |
2300 +--+
2301
2302 If we need to insert an insn on the edge from block 0 to block 1,
2303 we want to ensure the instructions we insert are outside of any
2304 loop notes that physically sit between block 0 and block 1. Otherwise
2305 we confuse the loop optimizer into thinking the loop is a phony. */
2306 if (old_succ != EXIT_BLOCK_PTR
2307 && PREV_INSN (old_succ->head)
2308 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2309 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
2310 && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
2311 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2312 PREV_INSN (old_succ->head));
2313 else if (old_succ != EXIT_BLOCK_PTR)
2314 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2315 else
2316 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2317 NOTE_BASIC_BLOCK (bb_note) = bb;
2318 bb->head = bb->end = bb_note;
2319
2320 /* For non-fallthry edges, we must adjust the predecessor's
2321 jump instruction to target our new block. */
2322 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2323 {
2324 if (!redirect_edge_and_branch (edge_in, bb))
2325 abort ();
2326 }
2327 else
2328 redirect_edge_succ (edge_in, bb);
2329
2330 return bb;
2331 }
2332
2333 /* Queue instructions for insertion on an edge between two basic blocks.
2334 The new instructions and basic blocks (if any) will not appear in the
2335 CFG until commit_edge_insertions is called. */
2336
2337 void
2338 insert_insn_on_edge (pattern, e)
2339 rtx pattern;
2340 edge e;
2341 {
2342 /* We cannot insert instructions on an abnormal critical edge.
2343 It will be easier to find the culprit if we die now. */
2344 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2345 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2346 abort ();
2347
2348 if (e->insns == NULL_RTX)
2349 start_sequence ();
2350 else
2351 push_to_sequence (e->insns);
2352
2353 emit_insn (pattern);
2354
2355 e->insns = get_insns ();
2356 end_sequence ();
2357 }
2358
2359 /* Update the CFG for the instructions queued on edge E. */
2360
2361 static void
2362 commit_one_edge_insertion (e)
2363 edge e;
2364 {
2365 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2366 basic_block bb;
2367
2368 /* Pull the insns off the edge now since the edge might go away. */
2369 insns = e->insns;
2370 e->insns = NULL_RTX;
2371
2372 /* Figure out where to put these things. If the destination has
2373 one predecessor, insert there. Except for the exit block. */
2374 if (e->dest->pred->pred_next == NULL
2375 && e->dest != EXIT_BLOCK_PTR)
2376 {
2377 bb = e->dest;
2378
2379 /* Get the location correct wrt a code label, and "nice" wrt
2380 a basic block note, and before everything else. */
2381 tmp = bb->head;
2382 if (GET_CODE (tmp) == CODE_LABEL)
2383 tmp = NEXT_INSN (tmp);
2384 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2385 tmp = NEXT_INSN (tmp);
2386 if (tmp == bb->head)
2387 before = tmp;
2388 else
2389 after = PREV_INSN (tmp);
2390 }
2391
2392 /* If the source has one successor and the edge is not abnormal,
2393 insert there. Except for the entry block. */
2394 else if ((e->flags & EDGE_ABNORMAL) == 0
2395 && e->src->succ->succ_next == NULL
2396 && e->src != ENTRY_BLOCK_PTR)
2397 {
2398 bb = e->src;
2399 /* It is possible to have a non-simple jump here. Consider a target
2400 where some forms of unconditional jumps clobber a register. This
2401 happens on the fr30 for example.
2402
2403 We know this block has a single successor, so we can just emit
2404 the queued insns before the jump. */
2405 if (GET_CODE (bb->end) == JUMP_INSN)
2406 {
2407 before = bb->end;
2408 }
2409 else
2410 {
2411 /* We'd better be fallthru, or we've lost track of what's what. */
2412 if ((e->flags & EDGE_FALLTHRU) == 0)
2413 abort ();
2414
2415 after = bb->end;
2416 }
2417 }
2418
2419 /* Otherwise we must split the edge. */
2420 else
2421 {
2422 bb = split_edge (e);
2423 after = bb->end;
2424 }
2425
2426 /* Now that we've found the spot, do the insertion. */
2427
2428 /* Set the new block number for these insns, if structure is allocated. */
2429 if (basic_block_for_insn)
2430 {
2431 rtx i;
2432 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2433 set_block_for_insn (i, bb);
2434 }
2435
2436 if (before)
2437 {
2438 emit_insns_before (insns, before);
2439 if (before == bb->head)
2440 bb->head = insns;
2441
2442 last = prev_nonnote_insn (before);
2443 }
2444 else
2445 {
2446 last = emit_insns_after (insns, after);
2447 if (after == bb->end)
2448 bb->end = last;
2449 }
2450
2451 if (returnjump_p (last))
2452 {
2453 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2454 This is not currently a problem because this only happens
2455 for the (single) epilogue, which already has a fallthru edge
2456 to EXIT. */
2457
2458 e = bb->succ;
2459 if (e->dest != EXIT_BLOCK_PTR
2460 || e->succ_next != NULL
2461 || (e->flags & EDGE_FALLTHRU) == 0)
2462 abort ();
2463 e->flags &= ~EDGE_FALLTHRU;
2464
2465 emit_barrier_after (last);
2466 bb->end = last;
2467
2468 if (before)
2469 flow_delete_insn (before);
2470 }
2471 else if (GET_CODE (last) == JUMP_INSN)
2472 abort ();
2473 find_sub_basic_blocks (bb);
2474 }
2475
2476 /* Update the CFG for all queued instructions. */
2477
2478 void
2479 commit_edge_insertions ()
2480 {
2481 int i;
2482 basic_block bb;
2483 compute_bb_for_insn (get_max_uid ());
2484
2485 #ifdef ENABLE_CHECKING
2486 verify_flow_info ();
2487 #endif
2488
2489 i = -1;
2490 bb = ENTRY_BLOCK_PTR;
2491 while (1)
2492 {
2493 edge e, next;
2494
2495 for (e = bb->succ; e; e = next)
2496 {
2497 next = e->succ_next;
2498 if (e->insns)
2499 commit_one_edge_insertion (e);
2500 }
2501
2502 if (++i >= n_basic_blocks)
2503 break;
2504 bb = BASIC_BLOCK (i);
2505 }
2506 }
2507
2508 /* Return true if we need to add fake edge to exit.
2509 Helper function for the flow_call_edges_add. */
2510 static bool
2511 need_fake_edge_p (insn)
2512 rtx insn;
2513 {
2514 if (!INSN_P (insn))
2515 return false;
2516
2517 if ((GET_CODE (insn) == CALL_INSN
2518 && !SIBLING_CALL_P (insn)
2519 && !find_reg_note (insn, REG_NORETURN, NULL)
2520 && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
2521 && !CONST_OR_PURE_CALL_P (insn)))
2522 return true;
2523
2524 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2525 && MEM_VOLATILE_P (PATTERN (insn)))
2526 || (GET_CODE (PATTERN (insn)) == PARALLEL
2527 && asm_noperands (insn) != -1
2528 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2529 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2530 }
2531
2532 /* Add fake edges to the function exit for any non constant and non noreturn
2533 calls, volatile inline assembly in the bitmap of blocks specified by
2534 BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks
2535 that were split.
2536
2537 The goal is to expose cases in which entering a basic block does not imply
2538 that all subsequent instructions must be executed. */
2539
2540 int
2541 flow_call_edges_add (blocks)
2542 sbitmap blocks;
2543 {
2544 int i;
2545 int blocks_split = 0;
2546 int bb_num = 0;
2547 basic_block *bbs;
2548 bool check_last_block = false;
2549
2550 /* Map bb indicies into basic block pointers since split_block
2551 will renumber the basic blocks. */
2552
2553 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2554
2555 if (! blocks)
2556 {
2557 for (i = 0; i < n_basic_blocks; i++)
2558 bbs[bb_num++] = BASIC_BLOCK (i);
2559 check_last_block = true;
2560 }
2561 else
2562 {
2563 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2564 {
2565 bbs[bb_num++] = BASIC_BLOCK (i);
2566 if (i == n_basic_blocks - 1)
2567 check_last_block = true;
2568 });
2569 }
2570
2571 /* In the last basic block, before epilogue generation, there will be
2572 a fallthru edge to EXIT. Special care is required if the last insn
2573 of the last basic block is a call because make_edge folds duplicate
2574 edges, which would result in the fallthru edge also being marked
2575 fake, which would result in the fallthru edge being removed by
2576 remove_fake_edges, which would result in an invalid CFG.
2577
2578 Moreover, we can't elide the outgoing fake edge, since the block
2579 profiler needs to take this into account in order to solve the minimal
2580 spanning tree in the case that the call doesn't return.
2581
2582 Handle this by adding a dummy instruction in a new last basic block. */
2583 if (check_last_block
2584 && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
2585 {
2586 edge e;
2587 for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
2588 if (e->dest == EXIT_BLOCK_PTR)
2589 break;
2590 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2591 commit_edge_insertions ();
2592 }
2593
2594
2595 /* Now add fake edges to the function exit for any non constant
2596 calls since there is no way that we can determine if they will
2597 return or not... */
2598
2599 for (i = 0; i < bb_num; i++)
2600 {
2601 basic_block bb = bbs[i];
2602 rtx insn;
2603 rtx prev_insn;
2604
2605 for (insn = bb->end; ; insn = prev_insn)
2606 {
2607 prev_insn = PREV_INSN (insn);
2608 if (need_fake_edge_p (insn))
2609 {
2610 edge e;
2611
2612 /* The above condition should be enought to verify that there is
2613 no edge to the exit block in CFG already. Calling make_edge in
2614 such case would make us to mark that edge as fake and remove it
2615 later. */
2616 #ifdef ENABLE_CHECKING
2617 if (insn == bb->end)
2618 for (e = bb->succ; e; e = e->succ_next)
2619 if (e->dest == EXIT_BLOCK_PTR)
2620 abort ();
2621 #endif
2622
2623 /* Note that the following may create a new basic block
2624 and renumber the existing basic blocks. */
2625 e = split_block (bb, insn);
2626 if (e)
2627 blocks_split++;
2628
2629 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2630 }
2631 if (insn == bb->head)
2632 break;
2633 }
2634 }
2635
2636 if (blocks_split)
2637 verify_flow_info ();
2638
2639 free (bbs);
2640 return blocks_split;
2641 }
2642 \f
2643 /* Find unreachable blocks. An unreachable block will have NULL in
2644 block->aux, a non-NULL value indicates the block is reachable. */
2645
2646 void
2647 find_unreachable_blocks ()
2648 {
2649 edge e;
2650 int i, n;
2651 basic_block *tos, *worklist;
2652
2653 n = n_basic_blocks;
2654 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2655
2656 /* Use basic_block->aux as a marker. Clear them all. */
2657
2658 for (i = 0; i < n; ++i)
2659 BASIC_BLOCK (i)->aux = NULL;
2660
2661 /* Add our starting points to the worklist. Almost always there will
2662 be only one. It isn't inconcievable that we might one day directly
2663 support Fortran alternate entry points. */
2664
2665 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2666 {
2667 *tos++ = e->dest;
2668
2669 /* Mark the block with a handy non-null value. */
2670 e->dest->aux = e;
2671 }
2672
2673 /* Iterate: find everything reachable from what we've already seen. */
2674
2675 while (tos != worklist)
2676 {
2677 basic_block b = *--tos;
2678
2679 for (e = b->succ; e; e = e->succ_next)
2680 if (!e->dest->aux)
2681 {
2682 *tos++ = e->dest;
2683 e->dest->aux = e;
2684 }
2685 }
2686
2687 free (worklist);
2688 }
2689
2690 /* Delete all unreachable basic blocks. */
2691 static void
2692 delete_unreachable_blocks ()
2693 {
2694 int i;
2695
2696 find_unreachable_blocks ();
2697
2698 /* Delete all unreachable basic blocks. Count down so that we
2699 don't interfere with the block renumbering that happens in
2700 flow_delete_block. */
2701
2702 for (i = n_basic_blocks - 1; i >= 0; --i)
2703 {
2704 basic_block b = BASIC_BLOCK (i);
2705
2706 if (b->aux != NULL)
2707 /* This block was found. Tidy up the mark. */
2708 b->aux = NULL;
2709 else
2710 flow_delete_block (b);
2711 }
2712
2713 tidy_fallthru_edges ();
2714 }
2715
2716 /* Return true if NOTE is not one of the ones that must be kept paired,
2717 so that we may simply delete them. */
2718
2719 static int
2720 can_delete_note_p (note)
2721 rtx note;
2722 {
2723 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2724 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2725 }
2726
2727 /* Unlink a chain of insns between START and FINISH, leaving notes
2728 that must be paired. */
2729
2730 void
2731 flow_delete_insn_chain (start, finish)
2732 rtx start, finish;
2733 {
2734 /* Unchain the insns one by one. It would be quicker to delete all
2735 of these with a single unchaining, rather than one at a time, but
2736 we need to keep the NOTE's. */
2737
2738 rtx next;
2739
2740 while (1)
2741 {
2742 next = NEXT_INSN (start);
2743 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2744 ;
2745 else if (GET_CODE (start) == CODE_LABEL
2746 && ! can_delete_label_p (start))
2747 {
2748 const char *name = LABEL_NAME (start);
2749 PUT_CODE (start, NOTE);
2750 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2751 NOTE_SOURCE_FILE (start) = name;
2752 }
2753 else
2754 next = flow_delete_insn (start);
2755
2756 if (start == finish)
2757 break;
2758 start = next;
2759 }
2760 }
2761
2762 /* Delete the insns in a (non-live) block. We physically delete every
2763 non-deleted-note insn, and update the flow graph appropriately.
2764
2765 Return nonzero if we deleted an exception handler. */
2766
2767 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2768 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2769
2770 int
2771 flow_delete_block (b)
2772 basic_block b;
2773 {
2774 int deleted_handler = 0;
2775 rtx insn, end, tmp;
2776
2777 /* If the head of this block is a CODE_LABEL, then it might be the
2778 label for an exception handler which can't be reached.
2779
2780 We need to remove the label from the exception_handler_label list
2781 and remove the associated NOTE_INSN_EH_REGION_BEG and
2782 NOTE_INSN_EH_REGION_END notes. */
2783
2784 insn = b->head;
2785
2786 never_reached_warning (insn);
2787
2788 if (GET_CODE (insn) == CODE_LABEL)
2789 maybe_remove_eh_handler (insn);
2790
2791 /* Include any jump table following the basic block. */
2792 end = b->end;
2793 if (GET_CODE (end) == JUMP_INSN
2794 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2795 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2796 && GET_CODE (tmp) == JUMP_INSN
2797 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2798 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2799 end = tmp;
2800
2801 /* Include any barrier that may follow the basic block. */
2802 tmp = next_nonnote_insn (end);
2803 if (tmp && GET_CODE (tmp) == BARRIER)
2804 end = tmp;
2805
2806 /* Selectively delete the entire chain. */
2807 flow_delete_insn_chain (insn, end);
2808
2809 /* Remove the edges into and out of this block. Note that there may
2810 indeed be edges in, if we are removing an unreachable loop. */
2811 {
2812 edge e, next, *q;
2813
2814 for (e = b->pred; e; e = next)
2815 {
2816 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2817 continue;
2818 *q = e->succ_next;
2819 next = e->pred_next;
2820 n_edges--;
2821 free (e);
2822 }
2823 for (e = b->succ; e; e = next)
2824 {
2825 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2826 continue;
2827 *q = e->pred_next;
2828 next = e->succ_next;
2829 n_edges--;
2830 free (e);
2831 }
2832
2833 b->pred = NULL;
2834 b->succ = NULL;
2835 }
2836
2837 /* Remove the basic block from the array, and compact behind it. */
2838 expunge_block (b);
2839
2840 return deleted_handler;
2841 }
2842
2843 /* Remove block B from the basic block array and compact behind it. */
2844
2845 static void
2846 expunge_block (b)
2847 basic_block b;
2848 {
2849 int i, n = n_basic_blocks;
2850
2851 for (i = b->index; i + 1 < n; ++i)
2852 {
2853 basic_block x = BASIC_BLOCK (i + 1);
2854 BASIC_BLOCK (i) = x;
2855 x->index = i;
2856 }
2857
2858 basic_block_info->num_elements--;
2859 n_basic_blocks--;
2860 }
2861
2862 /* Delete INSN by patching it out. Return the next insn. */
2863
2864 rtx
2865 flow_delete_insn (insn)
2866 rtx insn;
2867 {
2868 rtx prev = PREV_INSN (insn);
2869 rtx next = NEXT_INSN (insn);
2870 rtx note;
2871
2872 PREV_INSN (insn) = NULL_RTX;
2873 NEXT_INSN (insn) = NULL_RTX;
2874 INSN_DELETED_P (insn) = 1;
2875
2876 if (prev)
2877 NEXT_INSN (prev) = next;
2878 if (next)
2879 PREV_INSN (next) = prev;
2880 else
2881 set_last_insn (prev);
2882
2883 if (GET_CODE (insn) == CODE_LABEL)
2884 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2885
2886 /* If deleting a jump, decrement the use count of the label. Deleting
2887 the label itself should happen in the normal course of block merging. */
2888 if (GET_CODE (insn) == JUMP_INSN
2889 && JUMP_LABEL (insn)
2890 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2891 LABEL_NUSES (JUMP_LABEL (insn))--;
2892
2893 /* Also if deleting an insn that references a label. */
2894 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2895 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2896 LABEL_NUSES (XEXP (note, 0))--;
2897
2898 if (GET_CODE (insn) == JUMP_INSN
2899 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2900 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2901 {
2902 rtx pat = PATTERN (insn);
2903 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2904 int len = XVECLEN (pat, diff_vec_p);
2905 int i;
2906
2907 for (i = 0; i < len; i++)
2908 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2909 }
2910
2911 return next;
2912 }
2913
2914 /* True if a given label can be deleted. */
2915
2916 static int
2917 can_delete_label_p (label)
2918 rtx label;
2919 {
2920 rtx x;
2921
2922 if (LABEL_PRESERVE_P (label))
2923 return 0;
2924
2925 for (x = forced_labels; x; x = XEXP (x, 1))
2926 if (label == XEXP (x, 0))
2927 return 0;
2928 for (x = label_value_list; x; x = XEXP (x, 1))
2929 if (label == XEXP (x, 0))
2930 return 0;
2931 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2932 if (label == XEXP (x, 0))
2933 return 0;
2934
2935 /* User declared labels must be preserved. */
2936 if (LABEL_NAME (label) != 0)
2937 return 0;
2938
2939 return 1;
2940 }
2941
2942 static int
2943 tail_recursion_label_p (label)
2944 rtx label;
2945 {
2946 rtx x;
2947
2948 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2949 if (label == XEXP (x, 0))
2950 return 1;
2951
2952 return 0;
2953 }
2954
2955 /* Blocks A and B are to be merged into a single block A. The insns
2956 are already contiguous, hence `nomove'. */
2957
2958 void
2959 merge_blocks_nomove (a, b)
2960 basic_block a, b;
2961 {
2962 edge e;
2963 rtx b_head, b_end, a_end;
2964 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2965 int b_empty = 0;
2966
2967 /* If there was a CODE_LABEL beginning B, delete it. */
2968 b_head = b->head;
2969 b_end = b->end;
2970 if (GET_CODE (b_head) == CODE_LABEL)
2971 {
2972 /* Detect basic blocks with nothing but a label. This can happen
2973 in particular at the end of a function. */
2974 if (b_head == b_end)
2975 b_empty = 1;
2976 del_first = del_last = b_head;
2977 b_head = NEXT_INSN (b_head);
2978 }
2979
2980 /* Delete the basic block note. */
2981 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2982 {
2983 if (b_head == b_end)
2984 b_empty = 1;
2985 if (! del_last)
2986 del_first = b_head;
2987 del_last = b_head;
2988 b_head = NEXT_INSN (b_head);
2989 }
2990
2991 /* If there was a jump out of A, delete it. */
2992 a_end = a->end;
2993 if (GET_CODE (a_end) == JUMP_INSN)
2994 {
2995 rtx prev;
2996
2997 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2998 if (GET_CODE (prev) != NOTE
2999 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
3000 || prev == a->head)
3001 break;
3002
3003 del_first = a_end;
3004
3005 #ifdef HAVE_cc0
3006 /* If this was a conditional jump, we need to also delete
3007 the insn that set cc0. */
3008 if (prev && sets_cc0_p (prev))
3009 {
3010 rtx tmp = prev;
3011 prev = prev_nonnote_insn (prev);
3012 if (!prev)
3013 prev = a->head;
3014 del_first = tmp;
3015 }
3016 #endif
3017
3018 a_end = prev;
3019 }
3020 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
3021 del_first = NEXT_INSN (a_end);
3022
3023 /* Delete everything marked above as well as crap that might be
3024 hanging out between the two blocks. */
3025 flow_delete_insn_chain (del_first, del_last);
3026
3027 /* Normally there should only be one successor of A and that is B, but
3028 partway though the merge of blocks for conditional_execution we'll
3029 be merging a TEST block with THEN and ELSE successors. Free the
3030 whole lot of them and hope the caller knows what they're doing. */
3031 while (a->succ)
3032 remove_edge (a->succ);
3033
3034 /* Adjust the edges out of B for the new owner. */
3035 for (e = b->succ; e; e = e->succ_next)
3036 e->src = a;
3037 a->succ = b->succ;
3038
3039 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
3040 b->pred = b->succ = NULL;
3041
3042 /* Reassociate the insns of B with A. */
3043 if (!b_empty)
3044 {
3045 if (basic_block_for_insn)
3046 {
3047 BLOCK_FOR_INSN (b_head) = a;
3048 while (b_head != b_end)
3049 {
3050 b_head = NEXT_INSN (b_head);
3051 BLOCK_FOR_INSN (b_head) = a;
3052 }
3053 }
3054 a_end = b_end;
3055 }
3056 a->end = a_end;
3057
3058 expunge_block (b);
3059 }
3060
3061 /* Blocks A and B are to be merged into a single block. A has no incoming
3062 fallthru edge, so it can be moved before B without adding or modifying
3063 any jumps (aside from the jump from A to B). */
3064
3065 static int
3066 merge_blocks_move_predecessor_nojumps (a, b)
3067 basic_block a, b;
3068 {
3069 rtx start, end, barrier;
3070 int index;
3071
3072 start = a->head;
3073 end = a->end;
3074
3075 barrier = next_nonnote_insn (end);
3076 if (GET_CODE (barrier) != BARRIER)
3077 abort ();
3078 flow_delete_insn (barrier);
3079
3080 /* Move block and loop notes out of the chain so that we do not
3081 disturb their order.
3082
3083 ??? A better solution would be to squeeze out all the non-nested notes
3084 and adjust the block trees appropriately. Even better would be to have
3085 a tighter connection between block trees and rtl so that this is not
3086 necessary. */
3087 start = squeeze_notes (start, end);
3088
3089 /* Scramble the insn chain. */
3090 if (end != PREV_INSN (b->head))
3091 reorder_insns (start, end, PREV_INSN (b->head));
3092
3093 if (rtl_dump_file)
3094 {
3095 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
3096 a->index, b->index);
3097 }
3098
3099 /* Swap the records for the two blocks around. Although we are deleting B,
3100 A is now where B was and we want to compact the BB array from where
3101 A used to be. */
3102 BASIC_BLOCK (a->index) = b;
3103 BASIC_BLOCK (b->index) = a;
3104 index = a->index;
3105 a->index = b->index;
3106 b->index = index;
3107
3108 /* Now blocks A and B are contiguous. Merge them. */
3109 merge_blocks_nomove (a, b);
3110
3111 return 1;
3112 }
3113
3114 /* Blocks A and B are to be merged into a single block. B has no outgoing
3115 fallthru edge, so it can be moved after A without adding or modifying
3116 any jumps (aside from the jump from A to B). */
3117
3118 static int
3119 merge_blocks_move_successor_nojumps (a, b)
3120 basic_block a, b;
3121 {
3122 rtx start, end, barrier;
3123
3124 start = b->head;
3125 end = b->end;
3126 barrier = NEXT_INSN (end);
3127
3128 /* Recognize a jump table following block B. */
3129 if (barrier
3130 && GET_CODE (barrier) == CODE_LABEL
3131 && NEXT_INSN (barrier)
3132 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
3133 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
3134 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
3135 {
3136 end = NEXT_INSN (barrier);
3137 barrier = NEXT_INSN (end);
3138 }
3139
3140 /* There had better have been a barrier there. Delete it. */
3141 if (barrier && GET_CODE (barrier) == BARRIER)
3142 flow_delete_insn (barrier);
3143
3144 /* Move block and loop notes out of the chain so that we do not
3145 disturb their order.
3146
3147 ??? A better solution would be to squeeze out all the non-nested notes
3148 and adjust the block trees appropriately. Even better would be to have
3149 a tighter connection between block trees and rtl so that this is not
3150 necessary. */
3151 start = squeeze_notes (start, end);
3152
3153 /* Scramble the insn chain. */
3154 reorder_insns (start, end, a->end);
3155
3156 /* Now blocks A and B are contiguous. Merge them. */
3157 merge_blocks_nomove (a, b);
3158
3159 if (rtl_dump_file)
3160 {
3161 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
3162 b->index, a->index);
3163 }
3164
3165 return 1;
3166 }
3167
3168 /* Attempt to merge basic blocks that are potentially non-adjacent.
3169 Return true iff the attempt succeeded. */
3170
3171 static int
3172 merge_blocks (e, b, c, mode)
3173 edge e;
3174 basic_block b, c;
3175 int mode;
3176 {
3177 /* If C has a tail recursion label, do not merge. There is no
3178 edge recorded from the call_placeholder back to this label, as
3179 that would make optimize_sibling_and_tail_recursive_calls more
3180 complex for no gain. */
3181 if (GET_CODE (c->head) == CODE_LABEL
3182 && tail_recursion_label_p (c->head))
3183 return 0;
3184
3185 /* If B has a fallthru edge to C, no need to move anything. */
3186 if (e->flags & EDGE_FALLTHRU)
3187 {
3188 merge_blocks_nomove (b, c);
3189
3190 if (rtl_dump_file)
3191 {
3192 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
3193 b->index, c->index);
3194 }
3195
3196 return 1;
3197 }
3198 /* Otherwise we will need to move code around. Do that only if expensive
3199 transformations are allowed. */
3200 else if (mode & CLEANUP_EXPENSIVE)
3201 {
3202 edge tmp_edge, c_fallthru_edge;
3203 int c_has_outgoing_fallthru;
3204 int b_has_incoming_fallthru;
3205
3206 /* Avoid overactive code motion, as the forwarder blocks should be
3207 eliminated by edge redirection instead. One exception might have
3208 been if B is a forwarder block and C has no fallthru edge, but
3209 that should be cleaned up by bb-reorder instead. */
3210 if (forwarder_block_p (b) || forwarder_block_p (c))
3211 return 0;
3212
3213 /* We must make sure to not munge nesting of lexical blocks,
3214 and loop notes. This is done by squeezing out all the notes
3215 and leaving them there to lie. Not ideal, but functional. */
3216
3217 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
3218 if (tmp_edge->flags & EDGE_FALLTHRU)
3219 break;
3220 c_has_outgoing_fallthru = (tmp_edge != NULL);
3221 c_fallthru_edge = tmp_edge;
3222
3223 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
3224 if (tmp_edge->flags & EDGE_FALLTHRU)
3225 break;
3226 b_has_incoming_fallthru = (tmp_edge != NULL);
3227
3228 /* If B does not have an incoming fallthru, then it can be moved
3229 immediately before C without introducing or modifying jumps.
3230 C cannot be the first block, so we do not have to worry about
3231 accessing a non-existent block. */
3232 if (! b_has_incoming_fallthru)
3233 return merge_blocks_move_predecessor_nojumps (b, c);
3234
3235 /* Otherwise, we're going to try to move C after B. If C does
3236 not have an outgoing fallthru, then it can be moved
3237 immediately after B without introducing or modifying jumps. */
3238 if (! c_has_outgoing_fallthru)
3239 return merge_blocks_move_successor_nojumps (b, c);
3240
3241 /* Otherwise, we'll need to insert an extra jump, and possibly
3242 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3243 as we don't have explicit return instructions before epilogues
3244 are generated, so give up on that case. */
3245
3246 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3247 && merge_blocks_move_successor_nojumps (b, c))
3248 {
3249 basic_block target = c_fallthru_edge->dest;
3250 rtx barrier;
3251 basic_block new;
3252
3253 /* This is a dirty hack to avoid code duplication.
3254
3255 Set edge to point to wrong basic block, so
3256 redirect_edge_and_branch_force will do the trick
3257 and rewire edge back to the original location. */
3258 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3259 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3260
3261 /* We've just created barrier, but another barrier is
3262 already present in the stream. Avoid the duplicate. */
3263 barrier = next_nonnote_insn (new ? new->end : b->end);
3264 if (GET_CODE (barrier) != BARRIER)
3265 abort ();
3266 flow_delete_insn (barrier);
3267
3268 return 1;
3269 }
3270
3271 return 0;
3272 }
3273 return 0;
3274 }
3275
3276 /* Simplify a conditional jump around an unconditional jump.
3277 Return true if something changed. */
3278
3279 static bool
3280 try_simplify_condjump (cbranch_block)
3281 basic_block cbranch_block;
3282 {
3283 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3284 edge cbranch_jump_edge, cbranch_fallthru_edge;
3285 rtx cbranch_insn;
3286
3287 /* Verify that there are exactly two successors. */
3288 if (!cbranch_block->succ
3289 || !cbranch_block->succ->succ_next
3290 || cbranch_block->succ->succ_next->succ_next)
3291 return false;
3292
3293 /* Verify that we've got a normal conditional branch at the end
3294 of the block. */
3295 cbranch_insn = cbranch_block->end;
3296 if (!any_condjump_p (cbranch_insn))
3297 return false;
3298
3299 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3300 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3301
3302 /* The next block must not have multiple predecessors, must not
3303 be the last block in the function, and must contain just the
3304 unconditional jump. */
3305 jump_block = cbranch_fallthru_edge->dest;
3306 if (jump_block->pred->pred_next
3307 || jump_block->index == n_basic_blocks - 1
3308 || !forwarder_block_p (jump_block))
3309 return false;
3310 jump_dest_block = jump_block->succ->dest;
3311
3312 /* The conditional branch must target the block after the
3313 unconditional branch. */
3314 cbranch_dest_block = cbranch_jump_edge->dest;
3315
3316 if (!can_fallthru (jump_block, cbranch_dest_block))
3317 return false;
3318
3319 /* Invert the conditional branch. Prevent jump.c from deleting
3320 "unreachable" instructions. */
3321 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3322 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3323 {
3324 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3325 return false;
3326 }
3327
3328 if (rtl_dump_file)
3329 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3330 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3331
3332 /* Success. Update the CFG to match. Note that after this point
3333 the edge variable names appear backwards; the redirection is done
3334 this way to preserve edge profile data. */
3335 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3336 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3337 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3338 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3339
3340 /* Delete the block with the unconditional jump, and clean up the mess. */
3341 flow_delete_block (jump_block);
3342 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3343
3344 return true;
3345 }
3346
3347 /* Attempt to forward edges leaving basic block B.
3348 Return true if sucessful. */
3349
3350 static bool
3351 try_forward_edges (mode, b)
3352 basic_block b;
3353 int mode;
3354 {
3355 bool changed = false;
3356 edge e, next;
3357
3358 for (e = b->succ; e ; e = next)
3359 {
3360 basic_block target, first;
3361 int counter;
3362
3363 next = e->succ_next;
3364
3365 /* Skip complex edges because we don't know how to update them.
3366
3367 Still handle fallthru edges, as we can suceed to forward fallthru
3368 edge to the same place as the branch edge of conditional branch
3369 and turn conditional branch to an unconditonal branch. */
3370 if (e->flags & EDGE_COMPLEX)
3371 continue;
3372
3373 target = first = e->dest;
3374 counter = 0;
3375
3376 /* Look for the real destination of the jump.
3377 Avoid inifinite loop in the infinite empty loop by counting
3378 up to n_basic_blocks. */
3379 while (forwarder_block_p (target)
3380 && target->succ->dest != EXIT_BLOCK_PTR
3381 && counter < n_basic_blocks)
3382 {
3383 /* Bypass trivial infinite loops. */
3384 if (target == target->succ->dest)
3385 counter = n_basic_blocks;
3386
3387 /* Avoid killing of loop pre-headers, as it is the place loop
3388 optimizer wants to hoist code to.
3389
3390 For fallthru forwarders, the LOOP_BEG note must appear between
3391 the header of block and CODE_LABEL of the loop, for non forwarders
3392 it must appear before the JUMP_INSN. */
3393 if (mode & CLEANUP_PRE_LOOP)
3394 {
3395 rtx insn = (target->succ->flags & EDGE_FALLTHRU
3396 ? target->head : prev_nonnote_insn (target->end));
3397
3398 if (GET_CODE (insn) != NOTE)
3399 insn = NEXT_INSN (insn);
3400
3401 for (;insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
3402 insn = NEXT_INSN (insn))
3403 if (GET_CODE (insn) == NOTE
3404 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
3405 break;
3406
3407 if (GET_CODE (insn) == NOTE)
3408 break;
3409 }
3410 target = target->succ->dest, counter++;
3411 }
3412
3413 if (counter >= n_basic_blocks)
3414 {
3415 if (rtl_dump_file)
3416 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3417 target->index);
3418 }
3419 else if (target == first)
3420 ; /* We didn't do anything. */
3421 else
3422 {
3423 /* Save the values now, as the edge may get removed. */
3424 gcov_type edge_count = e->count;
3425 int edge_probability = e->probability;
3426
3427 if (redirect_edge_and_branch (e, target))
3428 {
3429 /* We successfully forwarded the edge. Now update profile
3430 data: for each edge we traversed in the chain, remove
3431 the original edge's execution count. */
3432 int edge_frequency = ((edge_probability * b->frequency
3433 + REG_BR_PROB_BASE / 2)
3434 / REG_BR_PROB_BASE);
3435
3436 do
3437 {
3438 first->count -= edge_count;
3439 first->succ->count -= edge_count;
3440 first->frequency -= edge_frequency;
3441 first = first->succ->dest;
3442 }
3443 while (first != target);
3444
3445 changed = true;
3446 }
3447 else
3448 {
3449 if (rtl_dump_file)
3450 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3451 b->index, e->dest->index, target->index);
3452 }
3453 }
3454 }
3455
3456 return changed;
3457 }
3458
3459 /* Look through the insns at the end of BB1 and BB2 and find the longest
3460 sequence that are equivalent. Store the first insns for that sequence
3461 in *F1 and *F2 and return the sequence length.
3462
3463 To simplify callers of this function, if the blocks match exactly,
3464 store the head of the blocks in *F1 and *F2. */
3465
3466 static int
3467 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3468 int mode ATTRIBUTE_UNUSED;
3469 basic_block bb1, bb2;
3470 rtx *f1, *f2;
3471 {
3472 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3473 int ninsns = 0;
3474
3475 /* Skip simple jumps at the end of the blocks. Complex jumps still
3476 need to be compared for equivalence, which we'll do below. */
3477
3478 i1 = bb1->end;
3479 if (onlyjump_p (i1))
3480 i1 = PREV_INSN (i1);
3481 i2 = bb2->end;
3482 if (onlyjump_p (i2))
3483 i2 = PREV_INSN (i2);
3484
3485 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3486 while (true)
3487 {
3488 /* Ignore notes. */
3489 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3490 i1 = PREV_INSN (i1);
3491 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3492 i2 = PREV_INSN (i2);
3493
3494 if (i1 == bb1->head || i2 == bb2->head)
3495 break;
3496
3497 /* Verify that I1 and I2 are equivalent. */
3498
3499 if (GET_CODE (i1) != GET_CODE (i2))
3500 break;
3501
3502 p1 = PATTERN (i1);
3503 p2 = PATTERN (i2);
3504
3505 /* If this is a CALL_INSN, compare register usage information.
3506 If we don't check this on stack register machines, the two
3507 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3508 numbers of stack registers in the same basic block.
3509 If we don't check this on machines with delay slots, a delay slot may
3510 be filled that clobbers a parameter expected by the subroutine.
3511
3512 ??? We take the simple route for now and assume that if they're
3513 equal, they were constructed identically. */
3514
3515 if (GET_CODE (i1) == CALL_INSN
3516 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3517 CALL_INSN_FUNCTION_USAGE (i2)))
3518 break;
3519
3520 #ifdef STACK_REGS
3521 /* If cross_jump_death_matters is not 0, the insn's mode
3522 indicates whether or not the insn contains any stack-like
3523 regs. */
3524
3525 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3526 {
3527 /* If register stack conversion has already been done, then
3528 death notes must also be compared before it is certain that
3529 the two instruction streams match. */
3530
3531 rtx note;
3532 HARD_REG_SET i1_regset, i2_regset;
3533
3534 CLEAR_HARD_REG_SET (i1_regset);
3535 CLEAR_HARD_REG_SET (i2_regset);
3536
3537 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3538 if (REG_NOTE_KIND (note) == REG_DEAD
3539 && STACK_REG_P (XEXP (note, 0)))
3540 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3541
3542 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3543 if (REG_NOTE_KIND (note) == REG_DEAD
3544 && STACK_REG_P (XEXP (note, 0)))
3545 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3546
3547 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3548
3549 break;
3550
3551 done:
3552 ;
3553 }
3554 #endif
3555
3556 if (GET_CODE (p1) != GET_CODE (p2))
3557 break;
3558
3559 if (! rtx_renumbered_equal_p (p1, p2))
3560 {
3561 /* The following code helps take care of G++ cleanups. */
3562 rtx equiv1 = find_reg_equal_equiv_note (i1);
3563 rtx equiv2 = find_reg_equal_equiv_note (i2);
3564
3565 if (equiv1 && equiv2
3566 /* If the equivalences are not to a constant, they may
3567 reference pseudos that no longer exist, so we can't
3568 use them. */
3569 && CONSTANT_P (XEXP (equiv1, 0))
3570 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3571 {
3572 rtx s1 = single_set (i1);
3573 rtx s2 = single_set (i2);
3574 if (s1 != 0 && s2 != 0
3575 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3576 {
3577 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3578 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3579 if (! rtx_renumbered_equal_p (p1, p2))
3580 cancel_changes (0);
3581 else if (apply_change_group ())
3582 goto win;
3583 }
3584 }
3585 break;
3586 }
3587
3588 win:
3589 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3590 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3591 {
3592 afterlast1 = last1, afterlast2 = last2;
3593 last1 = i1, last2 = i2;
3594 ninsns++;
3595 }
3596 i1 = PREV_INSN (i1);
3597 i2 = PREV_INSN (i2);
3598 }
3599
3600 #ifdef HAVE_cc0
3601 if (ninsns)
3602 {
3603 /* Don't allow the insn after a compare to be shared by
3604 cross-jumping unless the compare is also shared. */
3605 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3606 last1 = afterlast1, last2 = afterlast2, ninsns--;
3607 }
3608 #endif
3609
3610 /* Include preceeding notes and labels in the cross-jump. One,
3611 this may bring us to the head of the blocks as requested above.
3612 Two, it keeps line number notes as matched as may be. */
3613 if (ninsns)
3614 {
3615 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3616 last1 = PREV_INSN (last1);
3617 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3618 last1 = PREV_INSN (last1);
3619 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3620 last2 = PREV_INSN (last2);
3621 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3622 last2 = PREV_INSN (last2);
3623
3624 *f1 = last1;
3625 *f2 = last2;
3626 }
3627
3628 return ninsns;
3629 }
3630
3631 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3632 the branch instruction. This means that if we commonize the control
3633 flow before end of the basic block, the semantic remains unchanged.
3634
3635 We may assume that there exists one edge with a common destination. */
3636
3637 static bool
3638 outgoing_edges_match (bb1, bb2)
3639 basic_block bb1;
3640 basic_block bb2;
3641 {
3642 /* If BB1 has only one successor, we must be looking at an unconditional
3643 jump. Which, by the assumption above, means that we only need to check
3644 that BB2 has one successor. */
3645 if (bb1->succ && !bb1->succ->succ_next)
3646 return (bb2->succ && !bb2->succ->succ_next);
3647
3648 /* Match conditional jumps - this may get tricky when fallthru and branch
3649 edges are crossed. */
3650 if (bb1->succ
3651 && bb1->succ->succ_next
3652 && !bb1->succ->succ_next->succ_next
3653 && any_condjump_p (bb1->end))
3654 {
3655 edge b1, f1, b2, f2;
3656 bool reverse, match;
3657 rtx set1, set2, cond1, cond2;
3658 enum rtx_code code1, code2;
3659
3660 if (!bb2->succ
3661 || !bb2->succ->succ_next
3662 || bb1->succ->succ_next->succ_next
3663 || !any_condjump_p (bb2->end))
3664 return false;
3665
3666 b1 = BRANCH_EDGE (bb1);
3667 b2 = BRANCH_EDGE (bb2);
3668 f1 = FALLTHRU_EDGE (bb1);
3669 f2 = FALLTHRU_EDGE (bb2);
3670
3671 /* Get around possible forwarders on fallthru edges. Other cases
3672 should be optimized out already. */
3673 if (forwarder_block_p (f1->dest))
3674 f1 = f1->dest->succ;
3675 if (forwarder_block_p (f2->dest))
3676 f2 = f2->dest->succ;
3677
3678 /* To simplify use of this function, return false if there are
3679 unneeded forwarder blocks. These will get eliminated later
3680 during cleanup_cfg. */
3681 if (forwarder_block_p (f1->dest)
3682 || forwarder_block_p (f2->dest)
3683 || forwarder_block_p (b1->dest)
3684 || forwarder_block_p (b2->dest))
3685 return false;
3686
3687 if (f1->dest == f2->dest && b1->dest == b2->dest)
3688 reverse = false;
3689 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3690 reverse = true;
3691 else
3692 return false;
3693
3694 set1 = pc_set (bb1->end);
3695 set2 = pc_set (bb2->end);
3696 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3697 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3698 reverse = !reverse;
3699
3700 cond1 = XEXP (SET_SRC (set1), 0);
3701 cond2 = XEXP (SET_SRC (set2), 0);
3702 code1 = GET_CODE (cond1);
3703 if (reverse)
3704 code2 = reversed_comparison_code (cond2, bb2->end);
3705 else
3706 code2 = GET_CODE (cond2);
3707 if (code2 == UNKNOWN)
3708 return false;
3709
3710 /* Verify codes and operands match. */
3711 match = ((code1 == code2
3712 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3713 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3714 || (code1 == swap_condition (code2)
3715 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3716 XEXP (cond2, 0))
3717 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3718 XEXP (cond2, 1))));
3719
3720 /* If we return true, we will join the blocks. Which means that
3721 we will only have one branch prediction bit to work with. Thus
3722 we require the existing branches to have probabilities that are
3723 roughly similar. */
3724 /* ??? We should use bb->frequency to allow merging in infrequently
3725 executed blocks, but at the moment it is not available when
3726 cleanup_cfg is run. */
3727 if (match && !optimize_size)
3728 {
3729 rtx note1, note2;
3730 int prob1, prob2;
3731 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3732 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3733
3734 if (note1 && note2)
3735 {
3736 prob1 = INTVAL (XEXP (note1, 0));
3737 prob2 = INTVAL (XEXP (note2, 0));
3738 if (reverse)
3739 prob2 = REG_BR_PROB_BASE - prob2;
3740
3741 /* Fail if the difference in probabilities is
3742 greater than 5%. */
3743 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3744 return false;
3745 }
3746 else if (note1 || note2)
3747 return false;
3748 }
3749
3750 if (rtl_dump_file && match)
3751 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3752 bb1->index, bb2->index);
3753
3754 return match;
3755 }
3756
3757 /* ??? We can handle computed jumps too. This may be important for
3758 inlined functions containing switch statements. Also jumps w/o
3759 fallthru edges can be handled by simply matching whole insn. */
3760 return false;
3761 }
3762
3763 /* E1 and E2 are edges with the same destination block. Search their
3764 predecessors for common code. If found, redirect control flow from
3765 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3766
3767 static bool
3768 try_crossjump_to_edge (mode, e1, e2)
3769 int mode;
3770 edge e1, e2;
3771 {
3772 int nmatch;
3773 basic_block src1 = e1->src, src2 = e2->src;
3774 basic_block redirect_to;
3775 rtx newpos1, newpos2;
3776 edge s;
3777 rtx last;
3778 rtx label;
3779 rtx note;
3780
3781 /* Search backward through forwarder blocks. We don't need to worry
3782 about multiple entry or chained forwarders, as they will be optimized
3783 away. We do this to look past the unconditional jump following a
3784 conditional jump that is required due to the current CFG shape. */
3785 if (src1->pred
3786 && !src1->pred->pred_next
3787 && forwarder_block_p (src1))
3788 {
3789 e1 = src1->pred;
3790 src1 = e1->src;
3791 }
3792 if (src2->pred
3793 && !src2->pred->pred_next
3794 && forwarder_block_p (src2))
3795 {
3796 e2 = src2->pred;
3797 src2 = e2->src;
3798 }
3799
3800 /* Nothing to do if we reach ENTRY, or a common source block. */
3801 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3802 return false;
3803 if (src1 == src2)
3804 return false;
3805
3806 /* Seeing more than 1 forwarder blocks would confuse us later... */
3807 if (forwarder_block_p (e1->dest)
3808 && forwarder_block_p (e1->dest->succ->dest))
3809 return false;
3810 if (forwarder_block_p (e2->dest)
3811 && forwarder_block_p (e2->dest->succ->dest))
3812 return false;
3813
3814 /* Likewise with dead code (possibly newly created by the other optimizations
3815 of cfg_cleanup). */
3816 if (!src1->pred || !src2->pred)
3817 return false;
3818
3819 /* Likewise with complex edges.
3820 ??? We should be able to handle most complex edges later with some
3821 care. */
3822 if (e1->flags & EDGE_COMPLEX)
3823 return false;
3824
3825 /* Look for the common insn sequence, part the first ... */
3826 if (!outgoing_edges_match (src1, src2))
3827 return false;
3828
3829 /* ... and part the second. */
3830 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3831 if (!nmatch)
3832 return false;
3833
3834 /* Avoid splitting if possible. */
3835 if (newpos2 == src2->head)
3836 redirect_to = src2;
3837 else
3838 {
3839 if (rtl_dump_file)
3840 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3841 src2->index, nmatch);
3842 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3843 }
3844
3845 if (rtl_dump_file)
3846 fprintf (rtl_dump_file,
3847 "Cross jumping from bb %i to bb %i; %i common insns\n",
3848 src1->index, src2->index, nmatch);
3849
3850 redirect_to->count += src1->count;
3851 redirect_to->frequency += src1->frequency;
3852
3853 /* Recompute the frequencies and counts of outgoing edges. */
3854 for (s = redirect_to->succ; s; s = s->succ_next)
3855 {
3856 edge s2;
3857 basic_block d = s->dest;
3858
3859 if (forwarder_block_p (d))
3860 d = d->succ->dest;
3861 for (s2 = src1->succ; ; s2 = s2->succ_next)
3862 {
3863 basic_block d2 = s2->dest;
3864 if (forwarder_block_p (d2))
3865 d2 = d2->succ->dest;
3866 if (d == d2)
3867 break;
3868 }
3869 s->count += s2->count;
3870
3871 /* Take care to update possible forwarder blocks. We verified
3872 that there is no more than one in the chain, so we can't run
3873 into infinite loop. */
3874 if (forwarder_block_p (s->dest))
3875 {
3876 s->dest->succ->count += s2->count;
3877 s->dest->count += s2->count;
3878 s->dest->frequency += EDGE_FREQUENCY (s);
3879 }
3880 if (forwarder_block_p (s2->dest))
3881 {
3882 s2->dest->succ->count -= s2->count;
3883 s2->dest->count -= s2->count;
3884 s2->dest->frequency -= EDGE_FREQUENCY (s);
3885 }
3886 if (!redirect_to->frequency && !src1->frequency)
3887 s->probability = (s->probability + s2->probability) / 2;
3888 else
3889 s->probability =
3890 ((s->probability * redirect_to->frequency +
3891 s2->probability * src1->frequency)
3892 / (redirect_to->frequency + src1->frequency));
3893 }
3894
3895 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3896 if (note)
3897 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3898
3899 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3900
3901 /* Skip possible basic block header. */
3902 if (GET_CODE (newpos1) == CODE_LABEL)
3903 newpos1 = NEXT_INSN (newpos1);
3904 if (GET_CODE (newpos1) == NOTE)
3905 newpos1 = NEXT_INSN (newpos1);
3906 last = src1->end;
3907
3908 /* Emit the jump insn. */
3909 label = block_label (redirect_to);
3910 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3911 JUMP_LABEL (src1->end) = label;
3912 LABEL_NUSES (label)++;
3913 if (basic_block_for_insn)
3914 set_block_for_new_insns (src1->end, src1);
3915
3916 /* Delete the now unreachable instructions. */
3917 flow_delete_insn_chain (newpos1, last);
3918
3919 /* Make sure there is a barrier after the new jump. */
3920 last = next_nonnote_insn (src1->end);
3921 if (!last || GET_CODE (last) != BARRIER)
3922 emit_barrier_after (src1->end);
3923
3924 /* Update CFG. */
3925 while (src1->succ)
3926 remove_edge (src1->succ);
3927 make_edge (NULL, src1, redirect_to, 0);
3928 src1->succ->probability = REG_BR_PROB_BASE;
3929 src1->succ->count = src1->count;
3930
3931 return true;
3932 }
3933
3934 /* Search the predecessors of BB for common insn sequences. When found,
3935 share code between them by redirecting control flow. Return true if
3936 any changes made. */
3937
3938 static bool
3939 try_crossjump_bb (mode, bb)
3940 int mode;
3941 basic_block bb;
3942 {
3943 edge e, e2, nexte2, nexte, fallthru;
3944 bool changed;
3945
3946 /* Nothing to do if there is not at least two incomming edges. */
3947 if (!bb->pred || !bb->pred->pred_next)
3948 return false;
3949
3950 /* It is always cheapest to redirect a block that ends in a branch to
3951 a block that falls through into BB, as that adds no branches to the
3952 program. We'll try that combination first. */
3953 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3954 if (fallthru->flags & EDGE_FALLTHRU)
3955 break;
3956
3957 changed = false;
3958 for (e = bb->pred; e; e = nexte)
3959 {
3960 nexte = e->pred_next;
3961
3962 /* Elide complex edges now, as neither try_crossjump_to_edge
3963 nor outgoing_edges_match can handle them. */
3964 if (e->flags & EDGE_COMPLEX)
3965 continue;
3966
3967 /* As noted above, first try with the fallthru predecessor. */
3968 if (fallthru)
3969 {
3970 /* Don't combine the fallthru edge into anything else.
3971 If there is a match, we'll do it the other way around. */
3972 if (e == fallthru)
3973 continue;
3974
3975 if (try_crossjump_to_edge (mode, e, fallthru))
3976 {
3977 changed = true;
3978 nexte = bb->pred;
3979 continue;
3980 }
3981 }
3982
3983 /* Non-obvious work limiting check: Recognize that we're going
3984 to call try_crossjump_bb on every basic block. So if we have
3985 two blocks with lots of outgoing edges (a switch) and they
3986 share lots of common destinations, then we would do the
3987 cross-jump check once for each common destination.
3988
3989 Now, if the blocks actually are cross-jump candidates, then
3990 all of their destinations will be shared. Which means that
3991 we only need check them for cross-jump candidacy once. We
3992 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3993 choosing to do the check from the block for which the edge
3994 in question is the first successor of A. */
3995 if (e->src->succ != e)
3996 continue;
3997
3998 for (e2 = bb->pred; e2; e2 = nexte2)
3999 {
4000 nexte2 = e2->pred_next;
4001
4002 if (e2 == e)
4003 continue;
4004
4005 /* We've already checked the fallthru edge above. */
4006 if (e2 == fallthru)
4007 continue;
4008
4009 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
4010 can handle complex edges. */
4011 if (e2->flags & EDGE_COMPLEX)
4012 continue;
4013
4014 /* The "first successor" check above only prevents multiple
4015 checks of crossjump(A,B). In order to prevent redundant
4016 checks of crossjump(B,A), require that A be the block
4017 with the lowest index. */
4018 if (e->src->index > e2->src->index)
4019 continue;
4020
4021 if (try_crossjump_to_edge (mode, e, e2))
4022 {
4023 changed = true;
4024 nexte = bb->pred;
4025 break;
4026 }
4027 }
4028 }
4029
4030 return changed;
4031 }
4032
4033 /* Do simple CFG optimizations - basic block merging, simplifying of jump
4034 instructions etc. Return nonzero if changes were made. */
4035
4036 static bool
4037 try_optimize_cfg (mode)
4038 int mode;
4039 {
4040 int i;
4041 bool changed_overall = false;
4042 bool changed;
4043 int iterations = 0;
4044
4045 /* Attempt to merge blocks as made possible by edge removal. If a block
4046 has only one successor, and the successor has only one predecessor,
4047 they may be combined. */
4048
4049 do
4050 {
4051 changed = false;
4052 iterations++;
4053
4054 if (rtl_dump_file)
4055 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
4056 iterations);
4057
4058 for (i = 0; i < n_basic_blocks;)
4059 {
4060 basic_block c, b = BASIC_BLOCK (i);
4061 edge s;
4062 bool changed_here = false;
4063
4064 /* Delete trivially dead basic blocks. */
4065 while (b->pred == NULL)
4066 {
4067 c = BASIC_BLOCK (b->index - 1);
4068 if (rtl_dump_file)
4069 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
4070 flow_delete_block (b);
4071 changed = true;
4072 b = c;
4073 }
4074
4075 /* Remove code labels no longer used. Don't do this before
4076 CALL_PLACEHOLDER is removed, as some branches may be hidden
4077 within. */
4078 if (b->pred->pred_next == NULL
4079 && (b->pred->flags & EDGE_FALLTHRU)
4080 && !(b->pred->flags & EDGE_COMPLEX)
4081 && GET_CODE (b->head) == CODE_LABEL
4082 && (!(mode & CLEANUP_PRE_SIBCALL)
4083 || !tail_recursion_label_p (b->head))
4084 /* If previous block ends with condjump jumping to next BB,
4085 we can't delete the label. */
4086 && (b->pred->src == ENTRY_BLOCK_PTR
4087 || !reg_mentioned_p (b->head, b->pred->src->end)))
4088 {
4089 rtx label = b->head;
4090 b->head = NEXT_INSN (b->head);
4091 flow_delete_insn_chain (label, label);
4092 if (rtl_dump_file)
4093 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
4094 b->index);
4095 }
4096
4097 /* If we fall through an empty block, we can remove it. */
4098 if (b->pred->pred_next == NULL
4099 && (b->pred->flags & EDGE_FALLTHRU)
4100 && GET_CODE (b->head) != CODE_LABEL
4101 && forwarder_block_p (b)
4102 /* Note that forwarder_block_p true ensures that there
4103 is a successor for this block. */
4104 && (b->succ->flags & EDGE_FALLTHRU)
4105 && n_basic_blocks > 1)
4106 {
4107 if (rtl_dump_file)
4108 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
4109 b->index);
4110 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
4111 redirect_edge_succ_nodup (b->pred, b->succ->dest);
4112 flow_delete_block (b);
4113 changed = true;
4114 b = c;
4115 }
4116
4117 /* Merge blocks. Loop because chains of blocks might be
4118 combineable. */
4119 while ((s = b->succ) != NULL
4120 && s->succ_next == NULL
4121 && !(s->flags & EDGE_COMPLEX)
4122 && (c = s->dest) != EXIT_BLOCK_PTR
4123 && c->pred->pred_next == NULL
4124 /* If the jump insn has side effects,
4125 we can't kill the edge. */
4126 && (GET_CODE (b->end) != JUMP_INSN
4127 || onlyjump_p (b->end))
4128 && merge_blocks (s, b, c, mode))
4129 changed_here = true;
4130
4131 /* Simplify branch over branch. */
4132 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
4133 changed_here = true;
4134
4135 /* If B has a single outgoing edge, but uses a non-trivial jump
4136 instruction without side-effects, we can either delete the
4137 jump entirely, or replace it with a simple unconditional jump.
4138 Use redirect_edge_and_branch to do the dirty work. */
4139 if (b->succ
4140 && ! b->succ->succ_next
4141 && b->succ->dest != EXIT_BLOCK_PTR
4142 && onlyjump_p (b->end)
4143 && redirect_edge_and_branch (b->succ, b->succ->dest))
4144 changed_here = true;
4145
4146 /* Simplify branch to branch. */
4147 if (try_forward_edges (mode, b))
4148 changed_here = true;
4149
4150 /* Look for shared code between blocks. */
4151 if ((mode & CLEANUP_CROSSJUMP)
4152 && try_crossjump_bb (mode, b))
4153 changed_here = true;
4154
4155 /* Don't get confused by the index shift caused by deleting
4156 blocks. */
4157 if (!changed_here)
4158 i = b->index + 1;
4159 else
4160 changed = true;
4161 }
4162
4163 if ((mode & CLEANUP_CROSSJUMP)
4164 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
4165 changed = true;
4166
4167 #ifdef ENABLE_CHECKING
4168 if (changed)
4169 verify_flow_info ();
4170 #endif
4171
4172 changed_overall |= changed;
4173 }
4174 while (changed);
4175 return changed_overall;
4176 }
4177
4178 /* The given edge should potentially be a fallthru edge. If that is in
4179 fact true, delete the jump and barriers that are in the way. */
4180
4181 void
4182 tidy_fallthru_edge (e, b, c)
4183 edge e;
4184 basic_block b, c;
4185 {
4186 rtx q;
4187
4188 /* ??? In a late-running flow pass, other folks may have deleted basic
4189 blocks by nopping out blocks, leaving multiple BARRIERs between here
4190 and the target label. They ought to be chastized and fixed.
4191
4192 We can also wind up with a sequence of undeletable labels between
4193 one block and the next.
4194
4195 So search through a sequence of barriers, labels, and notes for
4196 the head of block C and assert that we really do fall through. */
4197
4198 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
4199 return;
4200
4201 /* Remove what will soon cease being the jump insn from the source block.
4202 If block B consisted only of this single jump, turn it into a deleted
4203 note. */
4204 q = b->end;
4205 if (GET_CODE (q) == JUMP_INSN
4206 && onlyjump_p (q)
4207 && (any_uncondjump_p (q)
4208 || (b->succ == e && e->succ_next == NULL)))
4209 {
4210 #ifdef HAVE_cc0
4211 /* If this was a conditional jump, we need to also delete
4212 the insn that set cc0. */
4213 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
4214 q = PREV_INSN (q);
4215 #endif
4216
4217 if (b->head == q)
4218 {
4219 PUT_CODE (q, NOTE);
4220 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
4221 NOTE_SOURCE_FILE (q) = 0;
4222 }
4223 else
4224 {
4225 q = PREV_INSN (q);
4226
4227 /* We don't want a block to end on a line-number note since that has
4228 the potential of changing the code between -g and not -g. */
4229 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
4230 q = PREV_INSN (q);
4231 }
4232
4233 b->end = q;
4234 }
4235
4236 /* Selectively unlink the sequence. */
4237 if (q != PREV_INSN (c->head))
4238 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
4239
4240 e->flags |= EDGE_FALLTHRU;
4241 }
4242
4243 /* Fix up edges that now fall through, or rather should now fall through
4244 but previously required a jump around now deleted blocks. Simplify
4245 the search by only examining blocks numerically adjacent, since this
4246 is how find_basic_blocks created them. */
4247
4248 static void
4249 tidy_fallthru_edges ()
4250 {
4251 int i;
4252
4253 for (i = 1; i < n_basic_blocks; ++i)
4254 {
4255 basic_block b = BASIC_BLOCK (i - 1);
4256 basic_block c = BASIC_BLOCK (i);
4257 edge s;
4258
4259 /* We care about simple conditional or unconditional jumps with
4260 a single successor.
4261
4262 If we had a conditional branch to the next instruction when
4263 find_basic_blocks was called, then there will only be one
4264 out edge for the block which ended with the conditional
4265 branch (since we do not create duplicate edges).
4266
4267 Furthermore, the edge will be marked as a fallthru because we
4268 merge the flags for the duplicate edges. So we do not want to
4269 check that the edge is not a FALLTHRU edge. */
4270 if ((s = b->succ) != NULL
4271 && ! (s->flags & EDGE_COMPLEX)
4272 && s->succ_next == NULL
4273 && s->dest == c
4274 /* If the jump insn has side effects, we can't tidy the edge. */
4275 && (GET_CODE (b->end) != JUMP_INSN
4276 || onlyjump_p (b->end)))
4277 tidy_fallthru_edge (s, b, c);
4278 }
4279 }
4280 \f
4281 /* Perform data flow analysis.
4282 F is the first insn of the function; FLAGS is a set of PROP_* flags
4283 to be used in accumulating flow info. */
4284
4285 void
4286 life_analysis (f, file, flags)
4287 rtx f;
4288 FILE *file;
4289 int flags;
4290 {
4291 #ifdef ELIMINABLE_REGS
4292 register int i;
4293 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4294 #endif
4295
4296 /* Record which registers will be eliminated. We use this in
4297 mark_used_regs. */
4298
4299 CLEAR_HARD_REG_SET (elim_reg_set);
4300
4301 #ifdef ELIMINABLE_REGS
4302 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4303 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4304 #else
4305 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4306 #endif
4307
4308 if (! optimize)
4309 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
4310
4311 /* The post-reload life analysis have (on a global basis) the same
4312 registers live as was computed by reload itself. elimination
4313 Otherwise offsets and such may be incorrect.
4314
4315 Reload will make some registers as live even though they do not
4316 appear in the rtl.
4317
4318 We don't want to create new auto-incs after reload, since they
4319 are unlikely to be useful and can cause problems with shared
4320 stack slots. */
4321 if (reload_completed)
4322 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4323
4324 /* We want alias analysis information for local dead store elimination. */
4325 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4326 init_alias_analysis ();
4327
4328 /* Always remove no-op moves. Do this before other processing so
4329 that we don't have to keep re-scanning them. */
4330 delete_noop_moves (f);
4331
4332 /* Some targets can emit simpler epilogues if they know that sp was
4333 not ever modified during the function. After reload, of course,
4334 we've already emitted the epilogue so there's no sense searching. */
4335 if (! reload_completed)
4336 notice_stack_pointer_modification (f);
4337
4338 /* Allocate and zero out data structures that will record the
4339 data from lifetime analysis. */
4340 allocate_reg_life_data ();
4341 allocate_bb_life_data ();
4342
4343 /* Find the set of registers live on function exit. */
4344 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4345
4346 /* "Update" life info from zero. It'd be nice to begin the
4347 relaxation with just the exit and noreturn blocks, but that set
4348 is not immediately handy. */
4349
4350 if (flags & PROP_REG_INFO)
4351 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4352 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4353
4354 /* Clean up. */
4355 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4356 end_alias_analysis ();
4357
4358 if (file)
4359 dump_flow_info (file);
4360
4361 free_basic_block_vars (1);
4362
4363 #ifdef ENABLE_CHECKING
4364 {
4365 rtx insn;
4366
4367 /* Search for any REG_LABEL notes which reference deleted labels. */
4368 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4369 {
4370 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4371
4372 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4373 abort ();
4374 }
4375 }
4376 #endif
4377 /* Removing dead insns should've made jumptables really dead. */
4378 delete_dead_jumptables ();
4379 }
4380
4381 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4382 Search for REGNO. If found, abort if it is not wider than word_mode. */
4383
4384 static int
4385 verify_wide_reg_1 (px, pregno)
4386 rtx *px;
4387 void *pregno;
4388 {
4389 rtx x = *px;
4390 unsigned int regno = *(int *) pregno;
4391
4392 if (GET_CODE (x) == REG && REGNO (x) == regno)
4393 {
4394 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4395 abort ();
4396 return 1;
4397 }
4398 return 0;
4399 }
4400
4401 /* A subroutine of verify_local_live_at_start. Search through insns
4402 between HEAD and END looking for register REGNO. */
4403
4404 static void
4405 verify_wide_reg (regno, head, end)
4406 int regno;
4407 rtx head, end;
4408 {
4409 while (1)
4410 {
4411 if (INSN_P (head)
4412 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4413 return;
4414 if (head == end)
4415 break;
4416 head = NEXT_INSN (head);
4417 }
4418
4419 /* We didn't find the register at all. Something's way screwy. */
4420 if (rtl_dump_file)
4421 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4422 print_rtl_and_abort ();
4423 }
4424
4425 /* A subroutine of update_life_info. Verify that there are no untoward
4426 changes in live_at_start during a local update. */
4427
4428 static void
4429 verify_local_live_at_start (new_live_at_start, bb)
4430 regset new_live_at_start;
4431 basic_block bb;
4432 {
4433 if (reload_completed)
4434 {
4435 /* After reload, there are no pseudos, nor subregs of multi-word
4436 registers. The regsets should exactly match. */
4437 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4438 {
4439 if (rtl_dump_file)
4440 {
4441 fprintf (rtl_dump_file,
4442 "live_at_start mismatch in bb %d, aborting\n",
4443 bb->index);
4444 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4445 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4446 }
4447 print_rtl_and_abort ();
4448 }
4449 }
4450 else
4451 {
4452 int i;
4453
4454 /* Find the set of changed registers. */
4455 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4456
4457 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4458 {
4459 /* No registers should die. */
4460 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4461 {
4462 if (rtl_dump_file)
4463 fprintf (rtl_dump_file,
4464 "Register %d died unexpectedly in block %d\n", i,
4465 bb->index);
4466 print_rtl_and_abort ();
4467 }
4468
4469 /* Verify that the now-live register is wider than word_mode. */
4470 verify_wide_reg (i, bb->head, bb->end);
4471 });
4472 }
4473 }
4474
4475 /* Updates life information starting with the basic blocks set in BLOCKS.
4476 If BLOCKS is null, consider it to be the universal set.
4477
4478 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4479 we are only expecting local modifications to basic blocks. If we find
4480 extra registers live at the beginning of a block, then we either killed
4481 useful data, or we have a broken split that wants data not provided.
4482 If we find registers removed from live_at_start, that means we have
4483 a broken peephole that is killing a register it shouldn't.
4484
4485 ??? This is not true in one situation -- when a pre-reload splitter
4486 generates subregs of a multi-word pseudo, current life analysis will
4487 lose the kill. So we _can_ have a pseudo go live. How irritating.
4488
4489 Including PROP_REG_INFO does not properly refresh regs_ever_live
4490 unless the caller resets it to zero. */
4491
4492 void
4493 update_life_info (blocks, extent, prop_flags)
4494 sbitmap blocks;
4495 enum update_life_extent extent;
4496 int prop_flags;
4497 {
4498 regset tmp;
4499 regset_head tmp_head;
4500 int i;
4501
4502 tmp = INITIALIZE_REG_SET (tmp_head);
4503
4504 /* Changes to the CFG are only allowed when
4505 doing a global update for the entire CFG. */
4506 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
4507 && (extent == UPDATE_LIFE_LOCAL || blocks))
4508 abort ();
4509
4510 /* For a global update, we go through the relaxation process again. */
4511 if (extent != UPDATE_LIFE_LOCAL)
4512 {
4513 for ( ; ; )
4514 {
4515 int changed = 0;
4516
4517 calculate_global_regs_live (blocks, blocks,
4518 prop_flags & (PROP_SCAN_DEAD_CODE
4519 | PROP_ALLOW_CFG_CHANGES));
4520
4521 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4522 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
4523 break;
4524
4525 /* Removing dead code may allow the CFG to be simplified which
4526 in turn may allow for further dead code detection / removal. */
4527 for (i = n_basic_blocks - 1; i >= 0; --i)
4528 {
4529 basic_block bb = BASIC_BLOCK (i);
4530
4531 COPY_REG_SET (tmp, bb->global_live_at_end);
4532 changed |= propagate_block (bb, tmp, NULL, NULL,
4533 prop_flags & (PROP_SCAN_DEAD_CODE
4534 | PROP_KILL_DEAD_CODE));
4535 }
4536
4537 if (! changed || ! try_optimize_cfg (CLEANUP_EXPENSIVE))
4538 break;
4539
4540 delete_unreachable_blocks ();
4541 mark_critical_edges ();
4542 }
4543
4544 /* If asked, remove notes from the blocks we'll update. */
4545 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4546 count_or_remove_death_notes (blocks, 1);
4547 }
4548
4549 if (blocks)
4550 {
4551 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4552 {
4553 basic_block bb = BASIC_BLOCK (i);
4554
4555 COPY_REG_SET (tmp, bb->global_live_at_end);
4556 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4557
4558 if (extent == UPDATE_LIFE_LOCAL)
4559 verify_local_live_at_start (tmp, bb);
4560 });
4561 }
4562 else
4563 {
4564 for (i = n_basic_blocks - 1; i >= 0; --i)
4565 {
4566 basic_block bb = BASIC_BLOCK (i);
4567
4568 COPY_REG_SET (tmp, bb->global_live_at_end);
4569 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4570
4571 if (extent == UPDATE_LIFE_LOCAL)
4572 verify_local_live_at_start (tmp, bb);
4573 }
4574 }
4575
4576 FREE_REG_SET (tmp);
4577
4578 if (prop_flags & PROP_REG_INFO)
4579 {
4580 /* The only pseudos that are live at the beginning of the function
4581 are those that were not set anywhere in the function. local-alloc
4582 doesn't know how to handle these correctly, so mark them as not
4583 local to any one basic block. */
4584 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4585 FIRST_PSEUDO_REGISTER, i,
4586 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4587
4588 /* We have a problem with any pseudoreg that lives across the setjmp.
4589 ANSI says that if a user variable does not change in value between
4590 the setjmp and the longjmp, then the longjmp preserves it. This
4591 includes longjmp from a place where the pseudo appears dead.
4592 (In principle, the value still exists if it is in scope.)
4593 If the pseudo goes in a hard reg, some other value may occupy
4594 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4595 Conclusion: such a pseudo must not go in a hard reg. */
4596 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4597 FIRST_PSEUDO_REGISTER, i,
4598 {
4599 if (regno_reg_rtx[i] != 0)
4600 {
4601 REG_LIVE_LENGTH (i) = -1;
4602 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4603 }
4604 });
4605 }
4606 }
4607
4608 /* Free the variables allocated by find_basic_blocks.
4609
4610 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4611
4612 void
4613 free_basic_block_vars (keep_head_end_p)
4614 int keep_head_end_p;
4615 {
4616 if (basic_block_for_insn)
4617 {
4618 VARRAY_FREE (basic_block_for_insn);
4619 basic_block_for_insn = NULL;
4620 }
4621
4622 if (! keep_head_end_p)
4623 {
4624 if (basic_block_info)
4625 {
4626 clear_edges ();
4627 VARRAY_FREE (basic_block_info);
4628 }
4629 n_basic_blocks = 0;
4630
4631 ENTRY_BLOCK_PTR->aux = NULL;
4632 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4633 EXIT_BLOCK_PTR->aux = NULL;
4634 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4635 }
4636 }
4637
4638 /* Delete any insns that copy a register to itself. */
4639
4640 void
4641 delete_noop_moves (f)
4642 rtx f ATTRIBUTE_UNUSED;
4643 {
4644 int i;
4645 rtx insn, next;
4646 basic_block bb;
4647
4648 for (i = 0; i < n_basic_blocks; i++)
4649 {
4650 bb = BASIC_BLOCK (i);
4651 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4652 {
4653 next = NEXT_INSN (insn);
4654 if (INSN_P (insn) && noop_move_p (insn))
4655 {
4656 /* Do not call flow_delete_insn here to not confuse backward
4657 pointers of LIBCALL block. */
4658 PUT_CODE (insn, NOTE);
4659 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4660 NOTE_SOURCE_FILE (insn) = 0;
4661 }
4662 }
4663 }
4664 }
4665
4666 /* Delete any jump tables never referenced. We can't delete them at the
4667 time of removing tablejump insn as they are referenced by the preceeding
4668 insns computing the destination, so we delay deleting and garbagecollect
4669 them once life information is computed. */
4670 static void
4671 delete_dead_jumptables ()
4672 {
4673 rtx insn, next;
4674 for (insn = get_insns (); insn; insn = next)
4675 {
4676 next = NEXT_INSN (insn);
4677 if (GET_CODE (insn) == CODE_LABEL
4678 && LABEL_NUSES (insn) == 0
4679 && GET_CODE (next) == JUMP_INSN
4680 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4681 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4682 {
4683 if (rtl_dump_file)
4684 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4685 flow_delete_insn (NEXT_INSN (insn));
4686 flow_delete_insn (insn);
4687 next = NEXT_INSN (next);
4688 }
4689 }
4690 }
4691
4692 /* Determine if the stack pointer is constant over the life of the function.
4693 Only useful before prologues have been emitted. */
4694
4695 static void
4696 notice_stack_pointer_modification_1 (x, pat, data)
4697 rtx x;
4698 rtx pat ATTRIBUTE_UNUSED;
4699 void *data ATTRIBUTE_UNUSED;
4700 {
4701 if (x == stack_pointer_rtx
4702 /* The stack pointer is only modified indirectly as the result
4703 of a push until later in flow. See the comments in rtl.texi
4704 regarding Embedded Side-Effects on Addresses. */
4705 || (GET_CODE (x) == MEM
4706 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4708 current_function_sp_is_unchanging = 0;
4709 }
4710
4711 static void
4712 notice_stack_pointer_modification (f)
4713 rtx f;
4714 {
4715 rtx insn;
4716
4717 /* Assume that the stack pointer is unchanging if alloca hasn't
4718 been used. */
4719 current_function_sp_is_unchanging = !current_function_calls_alloca;
4720 if (! current_function_sp_is_unchanging)
4721 return;
4722
4723 for (insn = f; insn; insn = NEXT_INSN (insn))
4724 {
4725 if (INSN_P (insn))
4726 {
4727 /* Check if insn modifies the stack pointer. */
4728 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4729 NULL);
4730 if (! current_function_sp_is_unchanging)
4731 return;
4732 }
4733 }
4734 }
4735
4736 /* Mark a register in SET. Hard registers in large modes get all
4737 of their component registers set as well. */
4738
4739 static void
4740 mark_reg (reg, xset)
4741 rtx reg;
4742 void *xset;
4743 {
4744 regset set = (regset) xset;
4745 int regno = REGNO (reg);
4746
4747 if (GET_MODE (reg) == BLKmode)
4748 abort ();
4749
4750 SET_REGNO_REG_SET (set, regno);
4751 if (regno < FIRST_PSEUDO_REGISTER)
4752 {
4753 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4754 while (--n > 0)
4755 SET_REGNO_REG_SET (set, regno + n);
4756 }
4757 }
4758
4759 /* Mark those regs which are needed at the end of the function as live
4760 at the end of the last basic block. */
4761
4762 static void
4763 mark_regs_live_at_end (set)
4764 regset set;
4765 {
4766 unsigned int i;
4767
4768 /* If exiting needs the right stack value, consider the stack pointer
4769 live at the end of the function. */
4770 if ((HAVE_epilogue && reload_completed)
4771 || ! EXIT_IGNORE_STACK
4772 || (! FRAME_POINTER_REQUIRED
4773 && ! current_function_calls_alloca
4774 && flag_omit_frame_pointer)
4775 || current_function_sp_is_unchanging)
4776 {
4777 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4778 }
4779
4780 /* Mark the frame pointer if needed at the end of the function. If
4781 we end up eliminating it, it will be removed from the live list
4782 of each basic block by reload. */
4783
4784 if (! reload_completed || frame_pointer_needed)
4785 {
4786 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4787 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4788 /* If they are different, also mark the hard frame pointer as live. */
4789 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4790 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4791 #endif
4792 }
4793
4794 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4795 /* Many architectures have a GP register even without flag_pic.
4796 Assume the pic register is not in use, or will be handled by
4797 other means, if it is not fixed. */
4798 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4799 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4800 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4801 #endif
4802
4803 /* Mark all global registers, and all registers used by the epilogue
4804 as being live at the end of the function since they may be
4805 referenced by our caller. */
4806 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4807 if (global_regs[i] || EPILOGUE_USES (i))
4808 SET_REGNO_REG_SET (set, i);
4809
4810 if (HAVE_epilogue && reload_completed)
4811 {
4812 /* Mark all call-saved registers that we actually used. */
4813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4814 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4815 SET_REGNO_REG_SET (set, i);
4816 }
4817
4818 #ifdef EH_RETURN_DATA_REGNO
4819 /* Mark the registers that will contain data for the handler. */
4820 if (reload_completed && current_function_calls_eh_return)
4821 for (i = 0; ; ++i)
4822 {
4823 unsigned regno = EH_RETURN_DATA_REGNO(i);
4824 if (regno == INVALID_REGNUM)
4825 break;
4826 SET_REGNO_REG_SET (set, regno);
4827 }
4828 #endif
4829 #ifdef EH_RETURN_STACKADJ_RTX
4830 if ((! HAVE_epilogue || ! reload_completed)
4831 && current_function_calls_eh_return)
4832 {
4833 rtx tmp = EH_RETURN_STACKADJ_RTX;
4834 if (tmp && REG_P (tmp))
4835 mark_reg (tmp, set);
4836 }
4837 #endif
4838 #ifdef EH_RETURN_HANDLER_RTX
4839 if ((! HAVE_epilogue || ! reload_completed)
4840 && current_function_calls_eh_return)
4841 {
4842 rtx tmp = EH_RETURN_HANDLER_RTX;
4843 if (tmp && REG_P (tmp))
4844 mark_reg (tmp, set);
4845 }
4846 #endif
4847
4848 /* Mark function return value. */
4849 diddle_return_value (mark_reg, set);
4850 }
4851
4852 /* Callback function for for_each_successor_phi. DATA is a regset.
4853 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4854 INSN, in the regset. */
4855
4856 static int
4857 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4858 rtx insn ATTRIBUTE_UNUSED;
4859 int dest_regno ATTRIBUTE_UNUSED;
4860 int src_regno;
4861 void *data;
4862 {
4863 regset live = (regset) data;
4864 SET_REGNO_REG_SET (live, src_regno);
4865 return 0;
4866 }
4867
4868 /* Propagate global life info around the graph of basic blocks. Begin
4869 considering blocks with their corresponding bit set in BLOCKS_IN.
4870 If BLOCKS_IN is null, consider it the universal set.
4871
4872 BLOCKS_OUT is set for every block that was changed. */
4873
4874 static void
4875 calculate_global_regs_live (blocks_in, blocks_out, flags)
4876 sbitmap blocks_in, blocks_out;
4877 int flags;
4878 {
4879 basic_block *queue, *qhead, *qtail, *qend;
4880 regset tmp, new_live_at_end, call_used;
4881 regset_head tmp_head, call_used_head;
4882 regset_head new_live_at_end_head;
4883 int i;
4884
4885 tmp = INITIALIZE_REG_SET (tmp_head);
4886 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4887 call_used = INITIALIZE_REG_SET (call_used_head);
4888
4889 /* Inconveniently, this is only redily available in hard reg set form. */
4890 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4891 if (call_used_regs[i])
4892 SET_REGNO_REG_SET (call_used, i);
4893
4894 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4895 because the `head == tail' style test for an empty queue doesn't
4896 work with a full queue. */
4897 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4898 qtail = queue;
4899 qhead = qend = queue + n_basic_blocks + 2;
4900
4901 /* Queue the blocks set in the initial mask. Do this in reverse block
4902 number order so that we are more likely for the first round to do
4903 useful work. We use AUX non-null to flag that the block is queued. */
4904 if (blocks_in)
4905 {
4906 /* Clear out the garbage that might be hanging out in bb->aux. */
4907 for (i = n_basic_blocks - 1; i >= 0; --i)
4908 BASIC_BLOCK (i)->aux = NULL;
4909
4910 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4911 {
4912 basic_block bb = BASIC_BLOCK (i);
4913 *--qhead = bb;
4914 bb->aux = bb;
4915 });
4916 }
4917 else
4918 {
4919 for (i = 0; i < n_basic_blocks; ++i)
4920 {
4921 basic_block bb = BASIC_BLOCK (i);
4922 *--qhead = bb;
4923 bb->aux = bb;
4924 }
4925 }
4926
4927 if (blocks_out)
4928 sbitmap_zero (blocks_out);
4929
4930 /* We work through the queue until there are no more blocks. What
4931 is live at the end of this block is precisely the union of what
4932 is live at the beginning of all its successors. So, we set its
4933 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4934 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4935 this block by walking through the instructions in this block in
4936 reverse order and updating as we go. If that changed
4937 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4938 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4939
4940 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4941 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4942 must either be live at the end of the block, or used within the
4943 block. In the latter case, it will certainly never disappear
4944 from GLOBAL_LIVE_AT_START. In the former case, the register
4945 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4946 for one of the successor blocks. By induction, that cannot
4947 occur. */
4948 while (qhead != qtail)
4949 {
4950 int rescan, changed;
4951 basic_block bb;
4952 edge e;
4953
4954 bb = *qhead++;
4955 if (qhead == qend)
4956 qhead = queue;
4957 bb->aux = NULL;
4958
4959 /* Begin by propagating live_at_start from the successor blocks. */
4960 CLEAR_REG_SET (new_live_at_end);
4961 for (e = bb->succ; e; e = e->succ_next)
4962 {
4963 basic_block sb = e->dest;
4964
4965 /* Call-clobbered registers die across exception and call edges. */
4966 /* ??? Abnormal call edges ignored for the moment, as this gets
4967 confused by sibling call edges, which crashes reg-stack. */
4968 if (e->flags & EDGE_EH)
4969 {
4970 bitmap_operation (tmp, sb->global_live_at_start,
4971 call_used, BITMAP_AND_COMPL);
4972 IOR_REG_SET (new_live_at_end, tmp);
4973 }
4974 else
4975 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4976 }
4977
4978 /* The all-important stack pointer must always be live. */
4979 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4980
4981 /* Before reload, there are a few registers that must be forced
4982 live everywhere -- which might not already be the case for
4983 blocks within infinite loops. */
4984 if (! reload_completed)
4985 {
4986 /* Any reference to any pseudo before reload is a potential
4987 reference of the frame pointer. */
4988 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4989
4990 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4991 /* Pseudos with argument area equivalences may require
4992 reloading via the argument pointer. */
4993 if (fixed_regs[ARG_POINTER_REGNUM])
4994 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4995 #endif
4996
4997 /* Any constant, or pseudo with constant equivalences, may
4998 require reloading from memory using the pic register. */
4999 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
5000 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
5001 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
5002 }
5003
5004 /* Regs used in phi nodes are not included in
5005 global_live_at_start, since they are live only along a
5006 particular edge. Set those regs that are live because of a
5007 phi node alternative corresponding to this particular block. */
5008 if (in_ssa_form)
5009 for_each_successor_phi (bb, &set_phi_alternative_reg,
5010 new_live_at_end);
5011
5012 if (bb == ENTRY_BLOCK_PTR)
5013 {
5014 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5015 continue;
5016 }
5017
5018 /* On our first pass through this block, we'll go ahead and continue.
5019 Recognize first pass by local_set NULL. On subsequent passes, we
5020 get to skip out early if live_at_end wouldn't have changed. */
5021
5022 if (bb->local_set == NULL)
5023 {
5024 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5025 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5026 rescan = 1;
5027 }
5028 else
5029 {
5030 /* If any bits were removed from live_at_end, we'll have to
5031 rescan the block. This wouldn't be necessary if we had
5032 precalculated local_live, however with PROP_SCAN_DEAD_CODE
5033 local_live is really dependent on live_at_end. */
5034 CLEAR_REG_SET (tmp);
5035 rescan = bitmap_operation (tmp, bb->global_live_at_end,
5036 new_live_at_end, BITMAP_AND_COMPL);
5037
5038 if (! rescan)
5039 {
5040 /* If any of the registers in the new live_at_end set are
5041 conditionally set in this basic block, we must rescan.
5042 This is because conditional lifetimes at the end of the
5043 block do not just take the live_at_end set into account,
5044 but also the liveness at the start of each successor
5045 block. We can miss changes in those sets if we only
5046 compare the new live_at_end against the previous one. */
5047 CLEAR_REG_SET (tmp);
5048 rescan = bitmap_operation (tmp, new_live_at_end,
5049 bb->cond_local_set, BITMAP_AND);
5050 }
5051
5052 if (! rescan)
5053 {
5054 /* Find the set of changed bits. Take this opportunity
5055 to notice that this set is empty and early out. */
5056 CLEAR_REG_SET (tmp);
5057 changed = bitmap_operation (tmp, bb->global_live_at_end,
5058 new_live_at_end, BITMAP_XOR);
5059 if (! changed)
5060 continue;
5061
5062 /* If any of the changed bits overlap with local_set,
5063 we'll have to rescan the block. Detect overlap by
5064 the AND with ~local_set turning off bits. */
5065 rescan = bitmap_operation (tmp, tmp, bb->local_set,
5066 BITMAP_AND_COMPL);
5067 }
5068 }
5069
5070 /* Let our caller know that BB changed enough to require its
5071 death notes updated. */
5072 if (blocks_out)
5073 SET_BIT (blocks_out, bb->index);
5074
5075 if (! rescan)
5076 {
5077 /* Add to live_at_start the set of all registers in
5078 new_live_at_end that aren't in the old live_at_end. */
5079
5080 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
5081 BITMAP_AND_COMPL);
5082 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5083
5084 changed = bitmap_operation (bb->global_live_at_start,
5085 bb->global_live_at_start,
5086 tmp, BITMAP_IOR);
5087 if (! changed)
5088 continue;
5089 }
5090 else
5091 {
5092 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
5093
5094 /* Rescan the block insn by insn to turn (a copy of) live_at_end
5095 into live_at_start. */
5096 propagate_block (bb, new_live_at_end, bb->local_set,
5097 bb->cond_local_set, flags);
5098
5099 /* If live_at start didn't change, no need to go farther. */
5100 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
5101 continue;
5102
5103 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
5104 }
5105
5106 /* Queue all predecessors of BB so that we may re-examine
5107 their live_at_end. */
5108 for (e = bb->pred; e; e = e->pred_next)
5109 {
5110 basic_block pb = e->src;
5111 if (pb->aux == NULL)
5112 {
5113 *qtail++ = pb;
5114 if (qtail == qend)
5115 qtail = queue;
5116 pb->aux = pb;
5117 }
5118 }
5119 }
5120
5121 FREE_REG_SET (tmp);
5122 FREE_REG_SET (new_live_at_end);
5123 FREE_REG_SET (call_used);
5124
5125 if (blocks_out)
5126 {
5127 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
5128 {
5129 basic_block bb = BASIC_BLOCK (i);
5130 FREE_REG_SET (bb->local_set);
5131 FREE_REG_SET (bb->cond_local_set);
5132 });
5133 }
5134 else
5135 {
5136 for (i = n_basic_blocks - 1; i >= 0; --i)
5137 {
5138 basic_block bb = BASIC_BLOCK (i);
5139 FREE_REG_SET (bb->local_set);
5140 FREE_REG_SET (bb->cond_local_set);
5141 }
5142 }
5143
5144 free (queue);
5145 }
5146 \f
5147 /* Subroutines of life analysis. */
5148
5149 /* Allocate the permanent data structures that represent the results
5150 of life analysis. Not static since used also for stupid life analysis. */
5151
5152 void
5153 allocate_bb_life_data ()
5154 {
5155 register int i;
5156
5157 for (i = 0; i < n_basic_blocks; i++)
5158 {
5159 basic_block bb = BASIC_BLOCK (i);
5160
5161 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5162 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5163 }
5164
5165 ENTRY_BLOCK_PTR->global_live_at_end
5166 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5167 EXIT_BLOCK_PTR->global_live_at_start
5168 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5169
5170 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
5171 }
5172
5173 void
5174 allocate_reg_life_data ()
5175 {
5176 int i;
5177
5178 max_regno = max_reg_num ();
5179
5180 /* Recalculate the register space, in case it has grown. Old style
5181 vector oriented regsets would set regset_{size,bytes} here also. */
5182 allocate_reg_info (max_regno, FALSE, FALSE);
5183
5184 /* Reset all the data we'll collect in propagate_block and its
5185 subroutines. */
5186 for (i = 0; i < max_regno; i++)
5187 {
5188 REG_N_SETS (i) = 0;
5189 REG_N_REFS (i) = 0;
5190 REG_N_DEATHS (i) = 0;
5191 REG_N_CALLS_CROSSED (i) = 0;
5192 REG_LIVE_LENGTH (i) = 0;
5193 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
5194 }
5195 }
5196
5197 /* Delete dead instructions for propagate_block. */
5198
5199 static void
5200 propagate_block_delete_insn (bb, insn)
5201 basic_block bb;
5202 rtx insn;
5203 {
5204 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
5205
5206 /* If the insn referred to a label, and that label was attached to
5207 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
5208 pretty much mandatory to delete it, because the ADDR_VEC may be
5209 referencing labels that no longer exist.
5210
5211 INSN may reference a deleted label, particularly when a jump
5212 table has been optimized into a direct jump. There's no
5213 real good way to fix up the reference to the deleted label
5214 when the label is deleted, so we just allow it here.
5215
5216 After dead code elimination is complete, we do search for
5217 any REG_LABEL notes which reference deleted labels as a
5218 sanity check. */
5219
5220 if (inote && GET_CODE (inote) == CODE_LABEL)
5221 {
5222 rtx label = XEXP (inote, 0);
5223 rtx next;
5224
5225 /* The label may be forced if it has been put in the constant
5226 pool. If that is the only use we must discard the table
5227 jump following it, but not the label itself. */
5228 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
5229 && (next = next_nonnote_insn (label)) != NULL
5230 && GET_CODE (next) == JUMP_INSN
5231 && (GET_CODE (PATTERN (next)) == ADDR_VEC
5232 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
5233 {
5234 rtx pat = PATTERN (next);
5235 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
5236 int len = XVECLEN (pat, diff_vec_p);
5237 int i;
5238
5239 for (i = 0; i < len; i++)
5240 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
5241
5242 flow_delete_insn (next);
5243 }
5244 }
5245
5246 if (bb->end == insn)
5247 bb->end = PREV_INSN (insn);
5248 flow_delete_insn (insn);
5249 }
5250
5251 /* Delete dead libcalls for propagate_block. Return the insn
5252 before the libcall. */
5253
5254 static rtx
5255 propagate_block_delete_libcall (bb, insn, note)
5256 basic_block bb;
5257 rtx insn, note;
5258 {
5259 rtx first = XEXP (note, 0);
5260 rtx before = PREV_INSN (first);
5261
5262 if (insn == bb->end)
5263 bb->end = before;
5264
5265 flow_delete_insn_chain (first, insn);
5266 return before;
5267 }
5268
5269 /* Update the life-status of regs for one insn. Return the previous insn. */
5270
5271 rtx
5272 propagate_one_insn (pbi, insn)
5273 struct propagate_block_info *pbi;
5274 rtx insn;
5275 {
5276 rtx prev = PREV_INSN (insn);
5277 int flags = pbi->flags;
5278 int insn_is_dead = 0;
5279 int libcall_is_dead = 0;
5280 rtx note;
5281 int i;
5282
5283 if (! INSN_P (insn))
5284 return prev;
5285
5286 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5287 if (flags & PROP_SCAN_DEAD_CODE)
5288 {
5289 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
5290 libcall_is_dead = (insn_is_dead && note != 0
5291 && libcall_dead_p (pbi, note, insn));
5292 }
5293
5294 /* If an instruction consists of just dead store(s) on final pass,
5295 delete it. */
5296 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
5297 {
5298 /* If we're trying to delete a prologue or epilogue instruction
5299 that isn't flagged as possibly being dead, something is wrong.
5300 But if we are keeping the stack pointer depressed, we might well
5301 be deleting insns that are used to compute the amount to update
5302 it by, so they are fine. */
5303 if (reload_completed
5304 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5305 && (TYPE_RETURNS_STACK_DEPRESSED
5306 (TREE_TYPE (current_function_decl))))
5307 && (((HAVE_epilogue || HAVE_prologue)
5308 && prologue_epilogue_contains (insn))
5309 || (HAVE_sibcall_epilogue
5310 && sibcall_epilogue_contains (insn)))
5311 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
5312 abort ();
5313
5314 /* Record sets. Do this even for dead instructions, since they
5315 would have killed the values if they hadn't been deleted. */
5316 mark_set_regs (pbi, PATTERN (insn), insn);
5317
5318 /* CC0 is now known to be dead. Either this insn used it,
5319 in which case it doesn't anymore, or clobbered it,
5320 so the next insn can't use it. */
5321 pbi->cc0_live = 0;
5322
5323 if (libcall_is_dead)
5324 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
5325 else
5326 propagate_block_delete_insn (pbi->bb, insn);
5327
5328 return prev;
5329 }
5330
5331 /* See if this is an increment or decrement that can be merged into
5332 a following memory address. */
5333 #ifdef AUTO_INC_DEC
5334 {
5335 register rtx x = single_set (insn);
5336
5337 /* Does this instruction increment or decrement a register? */
5338 if ((flags & PROP_AUTOINC)
5339 && x != 0
5340 && GET_CODE (SET_DEST (x)) == REG
5341 && (GET_CODE (SET_SRC (x)) == PLUS
5342 || GET_CODE (SET_SRC (x)) == MINUS)
5343 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5344 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5345 /* Ok, look for a following memory ref we can combine with.
5346 If one is found, change the memory ref to a PRE_INC
5347 or PRE_DEC, cancel this insn, and return 1.
5348 Return 0 if nothing has been done. */
5349 && try_pre_increment_1 (pbi, insn))
5350 return prev;
5351 }
5352 #endif /* AUTO_INC_DEC */
5353
5354 CLEAR_REG_SET (pbi->new_set);
5355
5356 /* If this is not the final pass, and this insn is copying the value of
5357 a library call and it's dead, don't scan the insns that perform the
5358 library call, so that the call's arguments are not marked live. */
5359 if (libcall_is_dead)
5360 {
5361 /* Record the death of the dest reg. */
5362 mark_set_regs (pbi, PATTERN (insn), insn);
5363
5364 insn = XEXP (note, 0);
5365 return PREV_INSN (insn);
5366 }
5367 else if (GET_CODE (PATTERN (insn)) == SET
5368 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5369 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5370 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5371 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5372 /* We have an insn to pop a constant amount off the stack.
5373 (Such insns use PLUS regardless of the direction of the stack,
5374 and any insn to adjust the stack by a constant is always a pop.)
5375 These insns, if not dead stores, have no effect on life. */
5376 ;
5377 else
5378 {
5379 /* Any regs live at the time of a call instruction must not go
5380 in a register clobbered by calls. Find all regs now live and
5381 record this for them. */
5382
5383 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5384 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5385 { REG_N_CALLS_CROSSED (i)++; });
5386
5387 /* Record sets. Do this even for dead instructions, since they
5388 would have killed the values if they hadn't been deleted. */
5389 mark_set_regs (pbi, PATTERN (insn), insn);
5390
5391 if (GET_CODE (insn) == CALL_INSN)
5392 {
5393 register int i;
5394 rtx note, cond;
5395
5396 cond = NULL_RTX;
5397 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5398 cond = COND_EXEC_TEST (PATTERN (insn));
5399
5400 /* Non-constant calls clobber memory. */
5401 if (! CONST_OR_PURE_CALL_P (insn))
5402 {
5403 free_EXPR_LIST_list (&pbi->mem_set_list);
5404 pbi->mem_set_list_len = 0;
5405 }
5406
5407 /* There may be extra registers to be clobbered. */
5408 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5409 note;
5410 note = XEXP (note, 1))
5411 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5412 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5413 cond, insn, pbi->flags);
5414
5415 /* Calls change all call-used and global registers. */
5416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5417 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5418 {
5419 /* We do not want REG_UNUSED notes for these registers. */
5420 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5421 cond, insn,
5422 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5423 }
5424 }
5425
5426 /* If an insn doesn't use CC0, it becomes dead since we assume
5427 that every insn clobbers it. So show it dead here;
5428 mark_used_regs will set it live if it is referenced. */
5429 pbi->cc0_live = 0;
5430
5431 /* Record uses. */
5432 if (! insn_is_dead)
5433 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5434
5435 /* Sometimes we may have inserted something before INSN (such as a move)
5436 when we make an auto-inc. So ensure we will scan those insns. */
5437 #ifdef AUTO_INC_DEC
5438 prev = PREV_INSN (insn);
5439 #endif
5440
5441 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5442 {
5443 register int i;
5444 rtx note, cond;
5445
5446 cond = NULL_RTX;
5447 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5448 cond = COND_EXEC_TEST (PATTERN (insn));
5449
5450 /* Calls use their arguments. */
5451 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5452 note;
5453 note = XEXP (note, 1))
5454 if (GET_CODE (XEXP (note, 0)) == USE)
5455 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5456 cond, insn);
5457
5458 /* The stack ptr is used (honorarily) by a CALL insn. */
5459 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5460
5461 /* Calls may also reference any of the global registers,
5462 so they are made live. */
5463 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5464 if (global_regs[i])
5465 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5466 cond, insn);
5467 }
5468 }
5469
5470 /* On final pass, update counts of how many insns in which each reg
5471 is live. */
5472 if (flags & PROP_REG_INFO)
5473 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5474 { REG_LIVE_LENGTH (i)++; });
5475
5476 return prev;
5477 }
5478
5479 /* Initialize a propagate_block_info struct for public consumption.
5480 Note that the structure itself is opaque to this file, but that
5481 the user can use the regsets provided here. */
5482
5483 struct propagate_block_info *
5484 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5485 basic_block bb;
5486 regset live, local_set, cond_local_set;
5487 int flags;
5488 {
5489 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5490
5491 pbi->bb = bb;
5492 pbi->reg_live = live;
5493 pbi->mem_set_list = NULL_RTX;
5494 pbi->mem_set_list_len = 0;
5495 pbi->local_set = local_set;
5496 pbi->cond_local_set = cond_local_set;
5497 pbi->cc0_live = 0;
5498 pbi->flags = flags;
5499
5500 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5501 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5502 else
5503 pbi->reg_next_use = NULL;
5504
5505 pbi->new_set = BITMAP_XMALLOC ();
5506
5507 #ifdef HAVE_conditional_execution
5508 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5509 free_reg_cond_life_info);
5510 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5511
5512 /* If this block ends in a conditional branch, for each register live
5513 from one side of the branch and not the other, record the register
5514 as conditionally dead. */
5515 if (GET_CODE (bb->end) == JUMP_INSN
5516 && any_condjump_p (bb->end))
5517 {
5518 regset_head diff_head;
5519 regset diff = INITIALIZE_REG_SET (diff_head);
5520 basic_block bb_true, bb_false;
5521 rtx cond_true, cond_false, set_src;
5522 int i;
5523
5524 /* Identify the successor blocks. */
5525 bb_true = bb->succ->dest;
5526 if (bb->succ->succ_next != NULL)
5527 {
5528 bb_false = bb->succ->succ_next->dest;
5529
5530 if (bb->succ->flags & EDGE_FALLTHRU)
5531 {
5532 basic_block t = bb_false;
5533 bb_false = bb_true;
5534 bb_true = t;
5535 }
5536 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5537 abort ();
5538 }
5539 else
5540 {
5541 /* This can happen with a conditional jump to the next insn. */
5542 if (JUMP_LABEL (bb->end) != bb_true->head)
5543 abort ();
5544
5545 /* Simplest way to do nothing. */
5546 bb_false = bb_true;
5547 }
5548
5549 /* Extract the condition from the branch. */
5550 set_src = SET_SRC (pc_set (bb->end));
5551 cond_true = XEXP (set_src, 0);
5552 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5553 GET_MODE (cond_true), XEXP (cond_true, 0),
5554 XEXP (cond_true, 1));
5555 if (GET_CODE (XEXP (set_src, 1)) == PC)
5556 {
5557 rtx t = cond_false;
5558 cond_false = cond_true;
5559 cond_true = t;
5560 }
5561
5562 /* Compute which register lead different lives in the successors. */
5563 if (bitmap_operation (diff, bb_true->global_live_at_start,
5564 bb_false->global_live_at_start, BITMAP_XOR))
5565 {
5566 rtx reg = XEXP (cond_true, 0);
5567
5568 if (GET_CODE (reg) == SUBREG)
5569 reg = SUBREG_REG (reg);
5570
5571 if (GET_CODE (reg) != REG)
5572 abort ();
5573
5574 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5575
5576 /* For each such register, mark it conditionally dead. */
5577 EXECUTE_IF_SET_IN_REG_SET
5578 (diff, 0, i,
5579 {
5580 struct reg_cond_life_info *rcli;
5581 rtx cond;
5582
5583 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5584
5585 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5586 cond = cond_false;
5587 else
5588 cond = cond_true;
5589 rcli->condition = cond;
5590 rcli->stores = const0_rtx;
5591 rcli->orig_condition = cond;
5592
5593 splay_tree_insert (pbi->reg_cond_dead, i,
5594 (splay_tree_value) rcli);
5595 });
5596 }
5597
5598 FREE_REG_SET (diff);
5599 }
5600 #endif
5601
5602 /* If this block has no successors, any stores to the frame that aren't
5603 used later in the block are dead. So make a pass over the block
5604 recording any such that are made and show them dead at the end. We do
5605 a very conservative and simple job here. */
5606 if (optimize
5607 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5608 && (TYPE_RETURNS_STACK_DEPRESSED
5609 (TREE_TYPE (current_function_decl))))
5610 && (flags & PROP_SCAN_DEAD_CODE)
5611 && (bb->succ == NULL
5612 || (bb->succ->succ_next == NULL
5613 && bb->succ->dest == EXIT_BLOCK_PTR
5614 && ! current_function_calls_eh_return)))
5615 {
5616 rtx insn, set;
5617 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5618 if (GET_CODE (insn) == INSN
5619 && (set = single_set (insn))
5620 && GET_CODE (SET_DEST (set)) == MEM)
5621 {
5622 rtx mem = SET_DEST (set);
5623 rtx canon_mem = canon_rtx (mem);
5624
5625 /* This optimization is performed by faking a store to the
5626 memory at the end of the block. This doesn't work for
5627 unchanging memories because multiple stores to unchanging
5628 memory is illegal and alias analysis doesn't consider it. */
5629 if (RTX_UNCHANGING_P (canon_mem))
5630 continue;
5631
5632 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5633 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5634 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5635 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5636 add_to_mem_set_list (pbi, canon_mem);
5637 }
5638 }
5639
5640 return pbi;
5641 }
5642
5643 /* Release a propagate_block_info struct. */
5644
5645 void
5646 free_propagate_block_info (pbi)
5647 struct propagate_block_info *pbi;
5648 {
5649 free_EXPR_LIST_list (&pbi->mem_set_list);
5650
5651 BITMAP_XFREE (pbi->new_set);
5652
5653 #ifdef HAVE_conditional_execution
5654 splay_tree_delete (pbi->reg_cond_dead);
5655 BITMAP_XFREE (pbi->reg_cond_reg);
5656 #endif
5657
5658 if (pbi->reg_next_use)
5659 free (pbi->reg_next_use);
5660
5661 free (pbi);
5662 }
5663
5664 /* Compute the registers live at the beginning of a basic block BB from
5665 those live at the end.
5666
5667 When called, REG_LIVE contains those live at the end. On return, it
5668 contains those live at the beginning.
5669
5670 LOCAL_SET, if non-null, will be set with all registers killed
5671 unconditionally by this basic block.
5672 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5673 killed conditionally by this basic block. If there is any unconditional
5674 set of a register, then the corresponding bit will be set in LOCAL_SET
5675 and cleared in COND_LOCAL_SET.
5676 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5677 case, the resulting set will be equal to the union of the two sets that
5678 would otherwise be computed.
5679
5680 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
5681
5682 int
5683 propagate_block (bb, live, local_set, cond_local_set, flags)
5684 basic_block bb;
5685 regset live;
5686 regset local_set;
5687 regset cond_local_set;
5688 int flags;
5689 {
5690 struct propagate_block_info *pbi;
5691 rtx insn, prev;
5692 int changed;
5693
5694 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5695
5696 if (flags & PROP_REG_INFO)
5697 {
5698 register int i;
5699
5700 /* Process the regs live at the end of the block.
5701 Mark them as not local to any one basic block. */
5702 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5703 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5704 }
5705
5706 /* Scan the block an insn at a time from end to beginning. */
5707
5708 changed = 0;
5709 for (insn = bb->end;; insn = prev)
5710 {
5711 /* If this is a call to `setjmp' et al, warn if any
5712 non-volatile datum is live. */
5713 if ((flags & PROP_REG_INFO)
5714 && GET_CODE (insn) == CALL
5715 && find_reg_note (insn, REG_SETJMP, NULL))
5716 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5717
5718 prev = propagate_one_insn (pbi, insn);
5719 changed |= NEXT_INSN (prev) != insn;
5720
5721 if (insn == bb->head)
5722 break;
5723 }
5724
5725 free_propagate_block_info (pbi);
5726
5727 return changed;
5728 }
5729 \f
5730 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5731 (SET expressions whose destinations are registers dead after the insn).
5732 NEEDED is the regset that says which regs are alive after the insn.
5733
5734 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5735
5736 If X is the entire body of an insn, NOTES contains the reg notes
5737 pertaining to the insn. */
5738
5739 static int
5740 insn_dead_p (pbi, x, call_ok, notes)
5741 struct propagate_block_info *pbi;
5742 rtx x;
5743 int call_ok;
5744 rtx notes ATTRIBUTE_UNUSED;
5745 {
5746 enum rtx_code code = GET_CODE (x);
5747
5748 #ifdef AUTO_INC_DEC
5749 /* If flow is invoked after reload, we must take existing AUTO_INC
5750 expresions into account. */
5751 if (reload_completed)
5752 {
5753 for (; notes; notes = XEXP (notes, 1))
5754 {
5755 if (REG_NOTE_KIND (notes) == REG_INC)
5756 {
5757 int regno = REGNO (XEXP (notes, 0));
5758
5759 /* Don't delete insns to set global regs. */
5760 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5761 || REGNO_REG_SET_P (pbi->reg_live, regno))
5762 return 0;
5763 }
5764 }
5765 }
5766 #endif
5767
5768 /* If setting something that's a reg or part of one,
5769 see if that register's altered value will be live. */
5770
5771 if (code == SET)
5772 {
5773 rtx r = SET_DEST (x);
5774
5775 #ifdef HAVE_cc0
5776 if (GET_CODE (r) == CC0)
5777 return ! pbi->cc0_live;
5778 #endif
5779
5780 /* A SET that is a subroutine call cannot be dead. */
5781 if (GET_CODE (SET_SRC (x)) == CALL)
5782 {
5783 if (! call_ok)
5784 return 0;
5785 }
5786
5787 /* Don't eliminate loads from volatile memory or volatile asms. */
5788 else if (volatile_refs_p (SET_SRC (x)))
5789 return 0;
5790
5791 if (GET_CODE (r) == MEM)
5792 {
5793 rtx temp, canon_r;
5794
5795 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
5796 return 0;
5797
5798 canon_r = canon_rtx (r);
5799
5800 /* Walk the set of memory locations we are currently tracking
5801 and see if one is an identical match to this memory location.
5802 If so, this memory write is dead (remember, we're walking
5803 backwards from the end of the block to the start). Since
5804 rtx_equal_p does not check the alias set or flags, we also
5805 must have the potential for them to conflict (anti_dependence). */
5806 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5807 if (anti_dependence (r, XEXP (temp, 0)))
5808 {
5809 rtx mem = XEXP (temp, 0);
5810
5811 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
5812 && (GET_MODE_SIZE (GET_MODE (canon_r))
5813 <= GET_MODE_SIZE (GET_MODE (mem))))
5814 return 1;
5815
5816 #ifdef AUTO_INC_DEC
5817 /* Check if memory reference matches an auto increment. Only
5818 post increment/decrement or modify are valid. */
5819 if (GET_MODE (mem) == GET_MODE (r)
5820 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5821 || GET_CODE (XEXP (mem, 0)) == POST_INC
5822 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5823 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5824 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5825 return 1;
5826 #endif
5827 }
5828 }
5829 else
5830 {
5831 while (GET_CODE (r) == SUBREG
5832 || GET_CODE (r) == STRICT_LOW_PART
5833 || GET_CODE (r) == ZERO_EXTRACT)
5834 r = XEXP (r, 0);
5835
5836 if (GET_CODE (r) == REG)
5837 {
5838 int regno = REGNO (r);
5839
5840 /* Obvious. */
5841 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5842 return 0;
5843
5844 /* If this is a hard register, verify that subsequent
5845 words are not needed. */
5846 if (regno < FIRST_PSEUDO_REGISTER)
5847 {
5848 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5849
5850 while (--n > 0)
5851 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5852 return 0;
5853 }
5854
5855 /* Don't delete insns to set global regs. */
5856 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5857 return 0;
5858
5859 /* Make sure insns to set the stack pointer aren't deleted. */
5860 if (regno == STACK_POINTER_REGNUM)
5861 return 0;
5862
5863 /* ??? These bits might be redundant with the force live bits
5864 in calculate_global_regs_live. We would delete from
5865 sequential sets; whether this actually affects real code
5866 for anything but the stack pointer I don't know. */
5867 /* Make sure insns to set the frame pointer aren't deleted. */
5868 if (regno == FRAME_POINTER_REGNUM
5869 && (! reload_completed || frame_pointer_needed))
5870 return 0;
5871 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5872 if (regno == HARD_FRAME_POINTER_REGNUM
5873 && (! reload_completed || frame_pointer_needed))
5874 return 0;
5875 #endif
5876
5877 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5878 /* Make sure insns to set arg pointer are never deleted
5879 (if the arg pointer isn't fixed, there will be a USE
5880 for it, so we can treat it normally). */
5881 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5882 return 0;
5883 #endif
5884
5885 /* Otherwise, the set is dead. */
5886 return 1;
5887 }
5888 }
5889 }
5890
5891 /* If performing several activities, insn is dead if each activity
5892 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5893 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5894 worth keeping. */
5895 else if (code == PARALLEL)
5896 {
5897 int i = XVECLEN (x, 0);
5898
5899 for (i--; i >= 0; i--)
5900 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5901 && GET_CODE (XVECEXP (x, 0, i)) != USE
5902 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5903 return 0;
5904
5905 return 1;
5906 }
5907
5908 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5909 is not necessarily true for hard registers. */
5910 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5911 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5912 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5913 return 1;
5914
5915 /* We do not check other CLOBBER or USE here. An insn consisting of just
5916 a CLOBBER or just a USE should not be deleted. */
5917 return 0;
5918 }
5919
5920 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5921 return 1 if the entire library call is dead.
5922 This is true if INSN copies a register (hard or pseudo)
5923 and if the hard return reg of the call insn is dead.
5924 (The caller should have tested the destination of the SET inside
5925 INSN already for death.)
5926
5927 If this insn doesn't just copy a register, then we don't
5928 have an ordinary libcall. In that case, cse could not have
5929 managed to substitute the source for the dest later on,
5930 so we can assume the libcall is dead.
5931
5932 PBI is the block info giving pseudoregs live before this insn.
5933 NOTE is the REG_RETVAL note of the insn. */
5934
5935 static int
5936 libcall_dead_p (pbi, note, insn)
5937 struct propagate_block_info *pbi;
5938 rtx note;
5939 rtx insn;
5940 {
5941 rtx x = single_set (insn);
5942
5943 if (x)
5944 {
5945 register rtx r = SET_SRC (x);
5946
5947 if (GET_CODE (r) == REG)
5948 {
5949 rtx call = XEXP (note, 0);
5950 rtx call_pat;
5951 register int i;
5952
5953 /* Find the call insn. */
5954 while (call != insn && GET_CODE (call) != CALL_INSN)
5955 call = NEXT_INSN (call);
5956
5957 /* If there is none, do nothing special,
5958 since ordinary death handling can understand these insns. */
5959 if (call == insn)
5960 return 0;
5961
5962 /* See if the hard reg holding the value is dead.
5963 If this is a PARALLEL, find the call within it. */
5964 call_pat = PATTERN (call);
5965 if (GET_CODE (call_pat) == PARALLEL)
5966 {
5967 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5968 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5969 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5970 break;
5971
5972 /* This may be a library call that is returning a value
5973 via invisible pointer. Do nothing special, since
5974 ordinary death handling can understand these insns. */
5975 if (i < 0)
5976 return 0;
5977
5978 call_pat = XVECEXP (call_pat, 0, i);
5979 }
5980
5981 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5982 }
5983 }
5984 return 1;
5985 }
5986
5987 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5988 live at function entry. Don't count global register variables, variables
5989 in registers that can be used for function arg passing, or variables in
5990 fixed hard registers. */
5991
5992 int
5993 regno_uninitialized (regno)
5994 int regno;
5995 {
5996 if (n_basic_blocks == 0
5997 || (regno < FIRST_PSEUDO_REGISTER
5998 && (global_regs[regno]
5999 || fixed_regs[regno]
6000 || FUNCTION_ARG_REGNO_P (regno))))
6001 return 0;
6002
6003 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
6004 }
6005
6006 /* 1 if register REGNO was alive at a place where `setjmp' was called
6007 and was set more than once or is an argument.
6008 Such regs may be clobbered by `longjmp'. */
6009
6010 int
6011 regno_clobbered_at_setjmp (regno)
6012 int regno;
6013 {
6014 if (n_basic_blocks == 0)
6015 return 0;
6016
6017 return ((REG_N_SETS (regno) > 1
6018 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
6019 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
6020 }
6021 \f
6022 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
6023 maximal list size; look for overlaps in mode and select the largest. */
6024 static void
6025 add_to_mem_set_list (pbi, mem)
6026 struct propagate_block_info *pbi;
6027 rtx mem;
6028 {
6029 rtx i;
6030
6031 /* We don't know how large a BLKmode store is, so we must not
6032 take them into consideration. */
6033 if (GET_MODE (mem) == BLKmode)
6034 return;
6035
6036 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
6037 {
6038 rtx e = XEXP (i, 0);
6039 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
6040 {
6041 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
6042 {
6043 #ifdef AUTO_INC_DEC
6044 /* If we must store a copy of the mem, we can just modify
6045 the mode of the stored copy. */
6046 if (pbi->flags & PROP_AUTOINC)
6047 PUT_MODE (e, GET_MODE (mem));
6048 else
6049 #endif
6050 XEXP (i, 0) = mem;
6051 }
6052 return;
6053 }
6054 }
6055
6056 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
6057 {
6058 #ifdef AUTO_INC_DEC
6059 /* Store a copy of mem, otherwise the address may be
6060 scrogged by find_auto_inc. */
6061 if (pbi->flags & PROP_AUTOINC)
6062 mem = shallow_copy_rtx (mem);
6063 #endif
6064 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
6065 pbi->mem_set_list_len++;
6066 }
6067 }
6068
6069 /* INSN references memory, possibly using autoincrement addressing modes.
6070 Find any entries on the mem_set_list that need to be invalidated due
6071 to an address change. */
6072
6073 static void
6074 invalidate_mems_from_autoinc (pbi, insn)
6075 struct propagate_block_info *pbi;
6076 rtx insn;
6077 {
6078 rtx note = REG_NOTES (insn);
6079 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
6080 if (REG_NOTE_KIND (note) == REG_INC)
6081 invalidate_mems_from_set (pbi, XEXP (note, 0));
6082 }
6083
6084 /* EXP is a REG. Remove any dependant entries from pbi->mem_set_list. */
6085
6086 static void
6087 invalidate_mems_from_set (pbi, exp)
6088 struct propagate_block_info *pbi;
6089 rtx exp;
6090 {
6091 rtx temp = pbi->mem_set_list;
6092 rtx prev = NULL_RTX;
6093 rtx next;
6094
6095 while (temp)
6096 {
6097 next = XEXP (temp, 1);
6098 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
6099 {
6100 /* Splice this entry out of the list. */
6101 if (prev)
6102 XEXP (prev, 1) = next;
6103 else
6104 pbi->mem_set_list = next;
6105 free_EXPR_LIST_node (temp);
6106 pbi->mem_set_list_len--;
6107 }
6108 else
6109 prev = temp;
6110 temp = next;
6111 }
6112 }
6113
6114 /* Process the registers that are set within X. Their bits are set to
6115 1 in the regset DEAD, because they are dead prior to this insn.
6116
6117 If INSN is nonzero, it is the insn being processed.
6118
6119 FLAGS is the set of operations to perform. */
6120
6121 static void
6122 mark_set_regs (pbi, x, insn)
6123 struct propagate_block_info *pbi;
6124 rtx x, insn;
6125 {
6126 rtx cond = NULL_RTX;
6127 rtx link;
6128 enum rtx_code code;
6129
6130 if (insn)
6131 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6132 {
6133 if (REG_NOTE_KIND (link) == REG_INC)
6134 mark_set_1 (pbi, SET, XEXP (link, 0),
6135 (GET_CODE (x) == COND_EXEC
6136 ? COND_EXEC_TEST (x) : NULL_RTX),
6137 insn, pbi->flags);
6138 }
6139 retry:
6140 switch (code = GET_CODE (x))
6141 {
6142 case SET:
6143 case CLOBBER:
6144 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
6145 return;
6146
6147 case COND_EXEC:
6148 cond = COND_EXEC_TEST (x);
6149 x = COND_EXEC_CODE (x);
6150 goto retry;
6151
6152 case PARALLEL:
6153 {
6154 register int i;
6155 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6156 {
6157 rtx sub = XVECEXP (x, 0, i);
6158 switch (code = GET_CODE (sub))
6159 {
6160 case COND_EXEC:
6161 if (cond != NULL_RTX)
6162 abort ();
6163
6164 cond = COND_EXEC_TEST (sub);
6165 sub = COND_EXEC_CODE (sub);
6166 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
6167 break;
6168 /* Fall through. */
6169
6170 case SET:
6171 case CLOBBER:
6172 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
6173 break;
6174
6175 default:
6176 break;
6177 }
6178 }
6179 break;
6180 }
6181
6182 default:
6183 break;
6184 }
6185 }
6186
6187 /* Process a single set, which appears in INSN. REG (which may not
6188 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
6189 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
6190 If the set is conditional (because it appear in a COND_EXEC), COND
6191 will be the condition. */
6192
6193 static void
6194 mark_set_1 (pbi, code, reg, cond, insn, flags)
6195 struct propagate_block_info *pbi;
6196 enum rtx_code code;
6197 rtx reg, cond, insn;
6198 int flags;
6199 {
6200 int regno_first = -1, regno_last = -1;
6201 unsigned long not_dead = 0;
6202 int i;
6203
6204 /* Modifying just one hardware register of a multi-reg value or just a
6205 byte field of a register does not mean the value from before this insn
6206 is now dead. Of course, if it was dead after it's unused now. */
6207
6208 switch (GET_CODE (reg))
6209 {
6210 case PARALLEL:
6211 /* Some targets place small structures in registers for return values of
6212 functions. We have to detect this case specially here to get correct
6213 flow information. */
6214 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
6215 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
6216 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
6217 flags);
6218 return;
6219
6220 case ZERO_EXTRACT:
6221 case SIGN_EXTRACT:
6222 case STRICT_LOW_PART:
6223 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
6224 do
6225 reg = XEXP (reg, 0);
6226 while (GET_CODE (reg) == SUBREG
6227 || GET_CODE (reg) == ZERO_EXTRACT
6228 || GET_CODE (reg) == SIGN_EXTRACT
6229 || GET_CODE (reg) == STRICT_LOW_PART);
6230 if (GET_CODE (reg) == MEM)
6231 break;
6232 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
6233 /* Fall through. */
6234
6235 case REG:
6236 regno_last = regno_first = REGNO (reg);
6237 if (regno_first < FIRST_PSEUDO_REGISTER)
6238 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6239 break;
6240
6241 case SUBREG:
6242 if (GET_CODE (SUBREG_REG (reg)) == REG)
6243 {
6244 enum machine_mode outer_mode = GET_MODE (reg);
6245 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
6246
6247 /* Identify the range of registers affected. This is moderately
6248 tricky for hard registers. See alter_subreg. */
6249
6250 regno_last = regno_first = REGNO (SUBREG_REG (reg));
6251 if (regno_first < FIRST_PSEUDO_REGISTER)
6252 {
6253 regno_first += subreg_regno_offset (regno_first, inner_mode,
6254 SUBREG_BYTE (reg),
6255 outer_mode);
6256 regno_last = (regno_first
6257 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
6258
6259 /* Since we've just adjusted the register number ranges, make
6260 sure REG matches. Otherwise some_was_live will be clear
6261 when it shouldn't have been, and we'll create incorrect
6262 REG_UNUSED notes. */
6263 reg = gen_rtx_REG (outer_mode, regno_first);
6264 }
6265 else
6266 {
6267 /* If the number of words in the subreg is less than the number
6268 of words in the full register, we have a well-defined partial
6269 set. Otherwise the high bits are undefined.
6270
6271 This is only really applicable to pseudos, since we just took
6272 care of multi-word hard registers. */
6273 if (((GET_MODE_SIZE (outer_mode)
6274 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
6275 < ((GET_MODE_SIZE (inner_mode)
6276 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
6277 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
6278 regno_first);
6279
6280 reg = SUBREG_REG (reg);
6281 }
6282 }
6283 else
6284 reg = SUBREG_REG (reg);
6285 break;
6286
6287 default:
6288 break;
6289 }
6290
6291 /* If this set is a MEM, then it kills any aliased writes.
6292 If this set is a REG, then it kills any MEMs which use the reg. */
6293 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6294 {
6295 if (GET_CODE (reg) == REG)
6296 invalidate_mems_from_set (pbi, reg);
6297
6298 /* If the memory reference had embedded side effects (autoincrement
6299 address modes. Then we may need to kill some entries on the
6300 memory set list. */
6301 if (insn && GET_CODE (reg) == MEM)
6302 invalidate_mems_from_autoinc (pbi, insn);
6303
6304 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
6305 /* ??? With more effort we could track conditional memory life. */
6306 && ! cond
6307 /* There are no REG_INC notes for SP, so we can't assume we'll see
6308 everything that invalidates it. To be safe, don't eliminate any
6309 stores though SP; none of them should be redundant anyway. */
6310 && ! reg_mentioned_p (stack_pointer_rtx, reg))
6311 add_to_mem_set_list (pbi, canon_rtx (reg));
6312 }
6313
6314 if (GET_CODE (reg) == REG
6315 && ! (regno_first == FRAME_POINTER_REGNUM
6316 && (! reload_completed || frame_pointer_needed))
6317 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
6318 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
6319 && (! reload_completed || frame_pointer_needed))
6320 #endif
6321 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
6322 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
6323 #endif
6324 )
6325 {
6326 int some_was_live = 0, some_was_dead = 0;
6327
6328 for (i = regno_first; i <= regno_last; ++i)
6329 {
6330 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6331 if (pbi->local_set)
6332 {
6333 /* Order of the set operation matters here since both
6334 sets may be the same. */
6335 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6336 if (cond != NULL_RTX
6337 && ! REGNO_REG_SET_P (pbi->local_set, i))
6338 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6339 else
6340 SET_REGNO_REG_SET (pbi->local_set, i);
6341 }
6342 if (code != CLOBBER)
6343 SET_REGNO_REG_SET (pbi->new_set, i);
6344
6345 some_was_live |= needed_regno;
6346 some_was_dead |= ! needed_regno;
6347 }
6348
6349 #ifdef HAVE_conditional_execution
6350 /* Consider conditional death in deciding that the register needs
6351 a death note. */
6352 if (some_was_live && ! not_dead
6353 /* The stack pointer is never dead. Well, not strictly true,
6354 but it's very difficult to tell from here. Hopefully
6355 combine_stack_adjustments will fix up the most egregious
6356 errors. */
6357 && regno_first != STACK_POINTER_REGNUM)
6358 {
6359 for (i = regno_first; i <= regno_last; ++i)
6360 if (! mark_regno_cond_dead (pbi, i, cond))
6361 not_dead |= ((unsigned long) 1) << (i - regno_first);
6362 }
6363 #endif
6364
6365 /* Additional data to record if this is the final pass. */
6366 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6367 | PROP_DEATH_NOTES | PROP_AUTOINC))
6368 {
6369 register rtx y;
6370 register int blocknum = pbi->bb->index;
6371
6372 y = NULL_RTX;
6373 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6374 {
6375 y = pbi->reg_next_use[regno_first];
6376
6377 /* The next use is no longer next, since a store intervenes. */
6378 for (i = regno_first; i <= regno_last; ++i)
6379 pbi->reg_next_use[i] = 0;
6380 }
6381
6382 if (flags & PROP_REG_INFO)
6383 {
6384 for (i = regno_first; i <= regno_last; ++i)
6385 {
6386 /* Count (weighted) references, stores, etc. This counts a
6387 register twice if it is modified, but that is correct. */
6388 REG_N_SETS (i) += 1;
6389 REG_N_REFS (i) += 1;
6390 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
6391
6392 /* The insns where a reg is live are normally counted
6393 elsewhere, but we want the count to include the insn
6394 where the reg is set, and the normal counting mechanism
6395 would not count it. */
6396 REG_LIVE_LENGTH (i) += 1;
6397 }
6398
6399 /* If this is a hard reg, record this function uses the reg. */
6400 if (regno_first < FIRST_PSEUDO_REGISTER)
6401 {
6402 for (i = regno_first; i <= regno_last; i++)
6403 regs_ever_live[i] = 1;
6404 }
6405 else
6406 {
6407 /* Keep track of which basic blocks each reg appears in. */
6408 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6409 REG_BASIC_BLOCK (regno_first) = blocknum;
6410 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6411 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6412 }
6413 }
6414
6415 if (! some_was_dead)
6416 {
6417 if (flags & PROP_LOG_LINKS)
6418 {
6419 /* Make a logical link from the next following insn
6420 that uses this register, back to this insn.
6421 The following insns have already been processed.
6422
6423 We don't build a LOG_LINK for hard registers containing
6424 in ASM_OPERANDs. If these registers get replaced,
6425 we might wind up changing the semantics of the insn,
6426 even if reload can make what appear to be valid
6427 assignments later. */
6428 if (y && (BLOCK_NUM (y) == blocknum)
6429 && (regno_first >= FIRST_PSEUDO_REGISTER
6430 || asm_noperands (PATTERN (y)) < 0))
6431 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6432 }
6433 }
6434 else if (not_dead)
6435 ;
6436 else if (! some_was_live)
6437 {
6438 if (flags & PROP_REG_INFO)
6439 REG_N_DEATHS (regno_first) += 1;
6440
6441 if (flags & PROP_DEATH_NOTES)
6442 {
6443 /* Note that dead stores have already been deleted
6444 when possible. If we get here, we have found a
6445 dead store that cannot be eliminated (because the
6446 same insn does something useful). Indicate this
6447 by marking the reg being set as dying here. */
6448 REG_NOTES (insn)
6449 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6450 }
6451 }
6452 else
6453 {
6454 if (flags & PROP_DEATH_NOTES)
6455 {
6456 /* This is a case where we have a multi-word hard register
6457 and some, but not all, of the words of the register are
6458 needed in subsequent insns. Write REG_UNUSED notes
6459 for those parts that were not needed. This case should
6460 be rare. */
6461
6462 for (i = regno_first; i <= regno_last; ++i)
6463 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6464 REG_NOTES (insn)
6465 = alloc_EXPR_LIST (REG_UNUSED,
6466 gen_rtx_REG (reg_raw_mode[i], i),
6467 REG_NOTES (insn));
6468 }
6469 }
6470 }
6471
6472 /* Mark the register as being dead. */
6473 if (some_was_live
6474 /* The stack pointer is never dead. Well, not strictly true,
6475 but it's very difficult to tell from here. Hopefully
6476 combine_stack_adjustments will fix up the most egregious
6477 errors. */
6478 && regno_first != STACK_POINTER_REGNUM)
6479 {
6480 for (i = regno_first; i <= regno_last; ++i)
6481 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6482 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6483 }
6484 }
6485 else if (GET_CODE (reg) == REG)
6486 {
6487 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6488 pbi->reg_next_use[regno_first] = 0;
6489 }
6490
6491 /* If this is the last pass and this is a SCRATCH, show it will be dying
6492 here and count it. */
6493 else if (GET_CODE (reg) == SCRATCH)
6494 {
6495 if (flags & PROP_DEATH_NOTES)
6496 REG_NOTES (insn)
6497 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6498 }
6499 }
6500 \f
6501 #ifdef HAVE_conditional_execution
6502 /* Mark REGNO conditionally dead.
6503 Return true if the register is now unconditionally dead. */
6504
6505 static int
6506 mark_regno_cond_dead (pbi, regno, cond)
6507 struct propagate_block_info *pbi;
6508 int regno;
6509 rtx cond;
6510 {
6511 /* If this is a store to a predicate register, the value of the
6512 predicate is changing, we don't know that the predicate as seen
6513 before is the same as that seen after. Flush all dependent
6514 conditions from reg_cond_dead. This will make all such
6515 conditionally live registers unconditionally live. */
6516 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6517 flush_reg_cond_reg (pbi, regno);
6518
6519 /* If this is an unconditional store, remove any conditional
6520 life that may have existed. */
6521 if (cond == NULL_RTX)
6522 splay_tree_remove (pbi->reg_cond_dead, regno);
6523 else
6524 {
6525 splay_tree_node node;
6526 struct reg_cond_life_info *rcli;
6527 rtx ncond;
6528
6529 /* Otherwise this is a conditional set. Record that fact.
6530 It may have been conditionally used, or there may be a
6531 subsequent set with a complimentary condition. */
6532
6533 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6534 if (node == NULL)
6535 {
6536 /* The register was unconditionally live previously.
6537 Record the current condition as the condition under
6538 which it is dead. */
6539 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6540 rcli->condition = cond;
6541 rcli->stores = cond;
6542 rcli->orig_condition = const0_rtx;
6543 splay_tree_insert (pbi->reg_cond_dead, regno,
6544 (splay_tree_value) rcli);
6545
6546 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6547
6548 /* Not unconditionaly dead. */
6549 return 0;
6550 }
6551 else
6552 {
6553 /* The register was conditionally live previously.
6554 Add the new condition to the old. */
6555 rcli = (struct reg_cond_life_info *) node->value;
6556 ncond = rcli->condition;
6557 ncond = ior_reg_cond (ncond, cond, 1);
6558 if (rcli->stores == const0_rtx)
6559 rcli->stores = cond;
6560 else if (rcli->stores != const1_rtx)
6561 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6562
6563 /* If the register is now unconditionally dead, remove the entry
6564 in the splay_tree. A register is unconditionally dead if the
6565 dead condition ncond is true. A register is also unconditionally
6566 dead if the sum of all conditional stores is an unconditional
6567 store (stores is true), and the dead condition is identically the
6568 same as the original dead condition initialized at the end of
6569 the block. This is a pointer compare, not an rtx_equal_p
6570 compare. */
6571 if (ncond == const1_rtx
6572 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6573 splay_tree_remove (pbi->reg_cond_dead, regno);
6574 else
6575 {
6576 rcli->condition = ncond;
6577
6578 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6579
6580 /* Not unconditionaly dead. */
6581 return 0;
6582 }
6583 }
6584 }
6585
6586 return 1;
6587 }
6588
6589 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6590
6591 static void
6592 free_reg_cond_life_info (value)
6593 splay_tree_value value;
6594 {
6595 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6596 free (rcli);
6597 }
6598
6599 /* Helper function for flush_reg_cond_reg. */
6600
6601 static int
6602 flush_reg_cond_reg_1 (node, data)
6603 splay_tree_node node;
6604 void *data;
6605 {
6606 struct reg_cond_life_info *rcli;
6607 int *xdata = (int *) data;
6608 unsigned int regno = xdata[0];
6609
6610 /* Don't need to search if last flushed value was farther on in
6611 the in-order traversal. */
6612 if (xdata[1] >= (int) node->key)
6613 return 0;
6614
6615 /* Splice out portions of the expression that refer to regno. */
6616 rcli = (struct reg_cond_life_info *) node->value;
6617 rcli->condition = elim_reg_cond (rcli->condition, regno);
6618 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6619 rcli->stores = elim_reg_cond (rcli->stores, regno);
6620
6621 /* If the entire condition is now false, signal the node to be removed. */
6622 if (rcli->condition == const0_rtx)
6623 {
6624 xdata[1] = node->key;
6625 return -1;
6626 }
6627 else if (rcli->condition == const1_rtx)
6628 abort ();
6629
6630 return 0;
6631 }
6632
6633 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6634
6635 static void
6636 flush_reg_cond_reg (pbi, regno)
6637 struct propagate_block_info *pbi;
6638 int regno;
6639 {
6640 int pair[2];
6641
6642 pair[0] = regno;
6643 pair[1] = -1;
6644 while (splay_tree_foreach (pbi->reg_cond_dead,
6645 flush_reg_cond_reg_1, pair) == -1)
6646 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6647
6648 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6649 }
6650
6651 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6652 For ior/and, the ADD flag determines whether we want to add the new
6653 condition X to the old one unconditionally. If it is zero, we will
6654 only return a new expression if X allows us to simplify part of
6655 OLD, otherwise we return OLD unchanged to the caller.
6656 If ADD is nonzero, we will return a new condition in all cases. The
6657 toplevel caller of one of these functions should always pass 1 for
6658 ADD. */
6659
6660 static rtx
6661 ior_reg_cond (old, x, add)
6662 rtx old, x;
6663 int add;
6664 {
6665 rtx op0, op1;
6666
6667 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6668 {
6669 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6670 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6671 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6672 return const1_rtx;
6673 if (GET_CODE (x) == GET_CODE (old)
6674 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6675 return old;
6676 if (! add)
6677 return old;
6678 return gen_rtx_IOR (0, old, x);
6679 }
6680
6681 switch (GET_CODE (old))
6682 {
6683 case IOR:
6684 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6685 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6686 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6687 {
6688 if (op0 == const0_rtx)
6689 return op1;
6690 if (op1 == const0_rtx)
6691 return op0;
6692 if (op0 == const1_rtx || op1 == const1_rtx)
6693 return const1_rtx;
6694 if (op0 == XEXP (old, 0))
6695 op0 = gen_rtx_IOR (0, op0, x);
6696 else
6697 op1 = gen_rtx_IOR (0, op1, x);
6698 return gen_rtx_IOR (0, op0, op1);
6699 }
6700 if (! add)
6701 return old;
6702 return gen_rtx_IOR (0, old, x);
6703
6704 case AND:
6705 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6706 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6707 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6708 {
6709 if (op0 == const1_rtx)
6710 return op1;
6711 if (op1 == const1_rtx)
6712 return op0;
6713 if (op0 == const0_rtx || op1 == const0_rtx)
6714 return const0_rtx;
6715 if (op0 == XEXP (old, 0))
6716 op0 = gen_rtx_IOR (0, op0, x);
6717 else
6718 op1 = gen_rtx_IOR (0, op1, x);
6719 return gen_rtx_AND (0, op0, op1);
6720 }
6721 if (! add)
6722 return old;
6723 return gen_rtx_IOR (0, old, x);
6724
6725 case NOT:
6726 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6727 if (op0 != XEXP (old, 0))
6728 return not_reg_cond (op0);
6729 if (! add)
6730 return old;
6731 return gen_rtx_IOR (0, old, x);
6732
6733 default:
6734 abort ();
6735 }
6736 }
6737
6738 static rtx
6739 not_reg_cond (x)
6740 rtx x;
6741 {
6742 enum rtx_code x_code;
6743
6744 if (x == const0_rtx)
6745 return const1_rtx;
6746 else if (x == const1_rtx)
6747 return const0_rtx;
6748 x_code = GET_CODE (x);
6749 if (x_code == NOT)
6750 return XEXP (x, 0);
6751 if (GET_RTX_CLASS (x_code) == '<'
6752 && GET_CODE (XEXP (x, 0)) == REG)
6753 {
6754 if (XEXP (x, 1) != const0_rtx)
6755 abort ();
6756
6757 return gen_rtx_fmt_ee (reverse_condition (x_code),
6758 VOIDmode, XEXP (x, 0), const0_rtx);
6759 }
6760 return gen_rtx_NOT (0, x);
6761 }
6762
6763 static rtx
6764 and_reg_cond (old, x, add)
6765 rtx old, x;
6766 int add;
6767 {
6768 rtx op0, op1;
6769
6770 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6771 {
6772 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6773 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6774 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6775 return const0_rtx;
6776 if (GET_CODE (x) == GET_CODE (old)
6777 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6778 return old;
6779 if (! add)
6780 return old;
6781 return gen_rtx_AND (0, old, x);
6782 }
6783
6784 switch (GET_CODE (old))
6785 {
6786 case IOR:
6787 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6788 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6789 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6790 {
6791 if (op0 == const0_rtx)
6792 return op1;
6793 if (op1 == const0_rtx)
6794 return op0;
6795 if (op0 == const1_rtx || op1 == const1_rtx)
6796 return const1_rtx;
6797 if (op0 == XEXP (old, 0))
6798 op0 = gen_rtx_AND (0, op0, x);
6799 else
6800 op1 = gen_rtx_AND (0, op1, x);
6801 return gen_rtx_IOR (0, op0, op1);
6802 }
6803 if (! add)
6804 return old;
6805 return gen_rtx_AND (0, old, x);
6806
6807 case AND:
6808 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6809 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6810 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6811 {
6812 if (op0 == const1_rtx)
6813 return op1;
6814 if (op1 == const1_rtx)
6815 return op0;
6816 if (op0 == const0_rtx || op1 == const0_rtx)
6817 return const0_rtx;
6818 if (op0 == XEXP (old, 0))
6819 op0 = gen_rtx_AND (0, op0, x);
6820 else
6821 op1 = gen_rtx_AND (0, op1, x);
6822 return gen_rtx_AND (0, op0, op1);
6823 }
6824 if (! add)
6825 return old;
6826
6827 /* If X is identical to one of the existing terms of the AND,
6828 then just return what we already have. */
6829 /* ??? There really should be some sort of recursive check here in
6830 case there are nested ANDs. */
6831 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6832 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6833 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6834 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6835 return old;
6836
6837 return gen_rtx_AND (0, old, x);
6838
6839 case NOT:
6840 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6841 if (op0 != XEXP (old, 0))
6842 return not_reg_cond (op0);
6843 if (! add)
6844 return old;
6845 return gen_rtx_AND (0, old, x);
6846
6847 default:
6848 abort ();
6849 }
6850 }
6851
6852 /* Given a condition X, remove references to reg REGNO and return the
6853 new condition. The removal will be done so that all conditions
6854 involving REGNO are considered to evaluate to false. This function
6855 is used when the value of REGNO changes. */
6856
6857 static rtx
6858 elim_reg_cond (x, regno)
6859 rtx x;
6860 unsigned int regno;
6861 {
6862 rtx op0, op1;
6863
6864 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6865 {
6866 if (REGNO (XEXP (x, 0)) == regno)
6867 return const0_rtx;
6868 return x;
6869 }
6870
6871 switch (GET_CODE (x))
6872 {
6873 case AND:
6874 op0 = elim_reg_cond (XEXP (x, 0), regno);
6875 op1 = elim_reg_cond (XEXP (x, 1), regno);
6876 if (op0 == const0_rtx || op1 == const0_rtx)
6877 return const0_rtx;
6878 if (op0 == const1_rtx)
6879 return op1;
6880 if (op1 == const1_rtx)
6881 return op0;
6882 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6883 return x;
6884 return gen_rtx_AND (0, op0, op1);
6885
6886 case IOR:
6887 op0 = elim_reg_cond (XEXP (x, 0), regno);
6888 op1 = elim_reg_cond (XEXP (x, 1), regno);
6889 if (op0 == const1_rtx || op1 == const1_rtx)
6890 return const1_rtx;
6891 if (op0 == const0_rtx)
6892 return op1;
6893 if (op1 == const0_rtx)
6894 return op0;
6895 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6896 return x;
6897 return gen_rtx_IOR (0, op0, op1);
6898
6899 case NOT:
6900 op0 = elim_reg_cond (XEXP (x, 0), regno);
6901 if (op0 == const0_rtx)
6902 return const1_rtx;
6903 if (op0 == const1_rtx)
6904 return const0_rtx;
6905 if (op0 != XEXP (x, 0))
6906 return not_reg_cond (op0);
6907 return x;
6908
6909 default:
6910 abort ();
6911 }
6912 }
6913 #endif /* HAVE_conditional_execution */
6914 \f
6915 #ifdef AUTO_INC_DEC
6916
6917 /* Try to substitute the auto-inc expression INC as the address inside
6918 MEM which occurs in INSN. Currently, the address of MEM is an expression
6919 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6920 that has a single set whose source is a PLUS of INCR_REG and something
6921 else. */
6922
6923 static void
6924 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6925 struct propagate_block_info *pbi;
6926 rtx inc, insn, mem, incr, incr_reg;
6927 {
6928 int regno = REGNO (incr_reg);
6929 rtx set = single_set (incr);
6930 rtx q = SET_DEST (set);
6931 rtx y = SET_SRC (set);
6932 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6933
6934 /* Make sure this reg appears only once in this insn. */
6935 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6936 return;
6937
6938 if (dead_or_set_p (incr, incr_reg)
6939 /* Mustn't autoinc an eliminable register. */
6940 && (regno >= FIRST_PSEUDO_REGISTER
6941 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6942 {
6943 /* This is the simple case. Try to make the auto-inc. If
6944 we can't, we are done. Otherwise, we will do any
6945 needed updates below. */
6946 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6947 return;
6948 }
6949 else if (GET_CODE (q) == REG
6950 /* PREV_INSN used here to check the semi-open interval
6951 [insn,incr). */
6952 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6953 /* We must also check for sets of q as q may be
6954 a call clobbered hard register and there may
6955 be a call between PREV_INSN (insn) and incr. */
6956 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6957 {
6958 /* We have *p followed sometime later by q = p+size.
6959 Both p and q must be live afterward,
6960 and q is not used between INSN and its assignment.
6961 Change it to q = p, ...*q..., q = q+size.
6962 Then fall into the usual case. */
6963 rtx insns, temp;
6964
6965 start_sequence ();
6966 emit_move_insn (q, incr_reg);
6967 insns = get_insns ();
6968 end_sequence ();
6969
6970 if (basic_block_for_insn)
6971 for (temp = insns; temp; temp = NEXT_INSN (temp))
6972 set_block_for_insn (temp, pbi->bb);
6973
6974 /* If we can't make the auto-inc, or can't make the
6975 replacement into Y, exit. There's no point in making
6976 the change below if we can't do the auto-inc and doing
6977 so is not correct in the pre-inc case. */
6978
6979 XEXP (inc, 0) = q;
6980 validate_change (insn, &XEXP (mem, 0), inc, 1);
6981 validate_change (incr, &XEXP (y, opnum), q, 1);
6982 if (! apply_change_group ())
6983 return;
6984
6985 /* We now know we'll be doing this change, so emit the
6986 new insn(s) and do the updates. */
6987 emit_insns_before (insns, insn);
6988
6989 if (pbi->bb->head == insn)
6990 pbi->bb->head = insns;
6991
6992 /* INCR will become a NOTE and INSN won't contain a
6993 use of INCR_REG. If a use of INCR_REG was just placed in
6994 the insn before INSN, make that the next use.
6995 Otherwise, invalidate it. */
6996 if (GET_CODE (PREV_INSN (insn)) == INSN
6997 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6998 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6999 pbi->reg_next_use[regno] = PREV_INSN (insn);
7000 else
7001 pbi->reg_next_use[regno] = 0;
7002
7003 incr_reg = q;
7004 regno = REGNO (q);
7005
7006 /* REGNO is now used in INCR which is below INSN, but
7007 it previously wasn't live here. If we don't mark
7008 it as live, we'll put a REG_DEAD note for it
7009 on this insn, which is incorrect. */
7010 SET_REGNO_REG_SET (pbi->reg_live, regno);
7011
7012 /* If there are any calls between INSN and INCR, show
7013 that REGNO now crosses them. */
7014 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
7015 if (GET_CODE (temp) == CALL_INSN)
7016 REG_N_CALLS_CROSSED (regno)++;
7017 }
7018 else
7019 return;
7020
7021 /* If we haven't returned, it means we were able to make the
7022 auto-inc, so update the status. First, record that this insn
7023 has an implicit side effect. */
7024
7025 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
7026
7027 /* Modify the old increment-insn to simply copy
7028 the already-incremented value of our register. */
7029 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
7030 abort ();
7031
7032 /* If that makes it a no-op (copying the register into itself) delete
7033 it so it won't appear to be a "use" and a "set" of this
7034 register. */
7035 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
7036 {
7037 /* If the original source was dead, it's dead now. */
7038 rtx note;
7039
7040 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
7041 {
7042 remove_note (incr, note);
7043 if (XEXP (note, 0) != incr_reg)
7044 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
7045 }
7046
7047 PUT_CODE (incr, NOTE);
7048 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
7049 NOTE_SOURCE_FILE (incr) = 0;
7050 }
7051
7052 if (regno >= FIRST_PSEUDO_REGISTER)
7053 {
7054 /* Count an extra reference to the reg. When a reg is
7055 incremented, spilling it is worse, so we want to make
7056 that less likely. */
7057 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7058
7059 /* Count the increment as a setting of the register,
7060 even though it isn't a SET in rtl. */
7061 REG_N_SETS (regno)++;
7062 }
7063 }
7064
7065 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
7066 reference. */
7067
7068 static void
7069 find_auto_inc (pbi, x, insn)
7070 struct propagate_block_info *pbi;
7071 rtx x;
7072 rtx insn;
7073 {
7074 rtx addr = XEXP (x, 0);
7075 HOST_WIDE_INT offset = 0;
7076 rtx set, y, incr, inc_val;
7077 int regno;
7078 int size = GET_MODE_SIZE (GET_MODE (x));
7079
7080 if (GET_CODE (insn) == JUMP_INSN)
7081 return;
7082
7083 /* Here we detect use of an index register which might be good for
7084 postincrement, postdecrement, preincrement, or predecrement. */
7085
7086 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
7087 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
7088
7089 if (GET_CODE (addr) != REG)
7090 return;
7091
7092 regno = REGNO (addr);
7093
7094 /* Is the next use an increment that might make auto-increment? */
7095 incr = pbi->reg_next_use[regno];
7096 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
7097 return;
7098 set = single_set (incr);
7099 if (set == 0 || GET_CODE (set) != SET)
7100 return;
7101 y = SET_SRC (set);
7102
7103 if (GET_CODE (y) != PLUS)
7104 return;
7105
7106 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
7107 inc_val = XEXP (y, 1);
7108 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
7109 inc_val = XEXP (y, 0);
7110 else
7111 return;
7112
7113 if (GET_CODE (inc_val) == CONST_INT)
7114 {
7115 if (HAVE_POST_INCREMENT
7116 && (INTVAL (inc_val) == size && offset == 0))
7117 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
7118 incr, addr);
7119 else if (HAVE_POST_DECREMENT
7120 && (INTVAL (inc_val) == -size && offset == 0))
7121 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
7122 incr, addr);
7123 else if (HAVE_PRE_INCREMENT
7124 && (INTVAL (inc_val) == size && offset == size))
7125 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
7126 incr, addr);
7127 else if (HAVE_PRE_DECREMENT
7128 && (INTVAL (inc_val) == -size && offset == -size))
7129 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
7130 incr, addr);
7131 else if (HAVE_POST_MODIFY_DISP && offset == 0)
7132 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7133 gen_rtx_PLUS (Pmode,
7134 addr,
7135 inc_val)),
7136 insn, x, incr, addr);
7137 }
7138 else if (GET_CODE (inc_val) == REG
7139 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
7140 NEXT_INSN (incr)))
7141
7142 {
7143 if (HAVE_POST_MODIFY_REG && offset == 0)
7144 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
7145 gen_rtx_PLUS (Pmode,
7146 addr,
7147 inc_val)),
7148 insn, x, incr, addr);
7149 }
7150 }
7151
7152 #endif /* AUTO_INC_DEC */
7153 \f
7154 static void
7155 mark_used_reg (pbi, reg, cond, insn)
7156 struct propagate_block_info *pbi;
7157 rtx reg;
7158 rtx cond ATTRIBUTE_UNUSED;
7159 rtx insn;
7160 {
7161 unsigned int regno_first, regno_last, i;
7162 int some_was_live, some_was_dead, some_not_set;
7163
7164 regno_last = regno_first = REGNO (reg);
7165 if (regno_first < FIRST_PSEUDO_REGISTER)
7166 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
7167
7168 /* Find out if any of this register is live after this instruction. */
7169 some_was_live = some_was_dead = 0;
7170 for (i = regno_first; i <= regno_last; ++i)
7171 {
7172 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
7173 some_was_live |= needed_regno;
7174 some_was_dead |= ! needed_regno;
7175 }
7176
7177 /* Find out if any of the register was set this insn. */
7178 some_not_set = 0;
7179 for (i = regno_first; i <= regno_last; ++i)
7180 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
7181
7182 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
7183 {
7184 /* Record where each reg is used, so when the reg is set we know
7185 the next insn that uses it. */
7186 pbi->reg_next_use[regno_first] = insn;
7187 }
7188
7189 if (pbi->flags & PROP_REG_INFO)
7190 {
7191 if (regno_first < FIRST_PSEUDO_REGISTER)
7192 {
7193 /* If this is a register we are going to try to eliminate,
7194 don't mark it live here. If we are successful in
7195 eliminating it, it need not be live unless it is used for
7196 pseudos, in which case it will have been set live when it
7197 was allocated to the pseudos. If the register will not
7198 be eliminated, reload will set it live at that point.
7199
7200 Otherwise, record that this function uses this register. */
7201 /* ??? The PPC backend tries to "eliminate" on the pic
7202 register to itself. This should be fixed. In the mean
7203 time, hack around it. */
7204
7205 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
7206 && (regno_first == FRAME_POINTER_REGNUM
7207 || regno_first == ARG_POINTER_REGNUM)))
7208 for (i = regno_first; i <= regno_last; ++i)
7209 regs_ever_live[i] = 1;
7210 }
7211 else
7212 {
7213 /* Keep track of which basic block each reg appears in. */
7214
7215 register int blocknum = pbi->bb->index;
7216 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
7217 REG_BASIC_BLOCK (regno_first) = blocknum;
7218 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
7219 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
7220
7221 /* Count (weighted) number of uses of each reg. */
7222 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
7223 REG_N_REFS (regno_first)++;
7224 }
7225 }
7226
7227 /* Record and count the insns in which a reg dies. If it is used in
7228 this insn and was dead below the insn then it dies in this insn.
7229 If it was set in this insn, we do not make a REG_DEAD note;
7230 likewise if we already made such a note. */
7231 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
7232 && some_was_dead
7233 && some_not_set)
7234 {
7235 /* Check for the case where the register dying partially
7236 overlaps the register set by this insn. */
7237 if (regno_first != regno_last)
7238 for (i = regno_first; i <= regno_last; ++i)
7239 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
7240
7241 /* If none of the words in X is needed, make a REG_DEAD note.
7242 Otherwise, we must make partial REG_DEAD notes. */
7243 if (! some_was_live)
7244 {
7245 if ((pbi->flags & PROP_DEATH_NOTES)
7246 && ! find_regno_note (insn, REG_DEAD, regno_first))
7247 REG_NOTES (insn)
7248 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
7249
7250 if (pbi->flags & PROP_REG_INFO)
7251 REG_N_DEATHS (regno_first)++;
7252 }
7253 else
7254 {
7255 /* Don't make a REG_DEAD note for a part of a register
7256 that is set in the insn. */
7257 for (i = regno_first; i <= regno_last; ++i)
7258 if (! REGNO_REG_SET_P (pbi->reg_live, i)
7259 && ! dead_or_set_regno_p (insn, i))
7260 REG_NOTES (insn)
7261 = alloc_EXPR_LIST (REG_DEAD,
7262 gen_rtx_REG (reg_raw_mode[i], i),
7263 REG_NOTES (insn));
7264 }
7265 }
7266
7267 /* Mark the register as being live. */
7268 for (i = regno_first; i <= regno_last; ++i)
7269 {
7270 SET_REGNO_REG_SET (pbi->reg_live, i);
7271
7272 #ifdef HAVE_conditional_execution
7273 /* If this is a conditional use, record that fact. If it is later
7274 conditionally set, we'll know to kill the register. */
7275 if (cond != NULL_RTX)
7276 {
7277 splay_tree_node node;
7278 struct reg_cond_life_info *rcli;
7279 rtx ncond;
7280
7281 if (some_was_live)
7282 {
7283 node = splay_tree_lookup (pbi->reg_cond_dead, i);
7284 if (node == NULL)
7285 {
7286 /* The register was unconditionally live previously.
7287 No need to do anything. */
7288 }
7289 else
7290 {
7291 /* The register was conditionally live previously.
7292 Subtract the new life cond from the old death cond. */
7293 rcli = (struct reg_cond_life_info *) node->value;
7294 ncond = rcli->condition;
7295 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
7296
7297 /* If the register is now unconditionally live,
7298 remove the entry in the splay_tree. */
7299 if (ncond == const0_rtx)
7300 splay_tree_remove (pbi->reg_cond_dead, i);
7301 else
7302 {
7303 rcli->condition = ncond;
7304 SET_REGNO_REG_SET (pbi->reg_cond_reg,
7305 REGNO (XEXP (cond, 0)));
7306 }
7307 }
7308 }
7309 else
7310 {
7311 /* The register was not previously live at all. Record
7312 the condition under which it is still dead. */
7313 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
7314 rcli->condition = not_reg_cond (cond);
7315 rcli->stores = const0_rtx;
7316 rcli->orig_condition = const0_rtx;
7317 splay_tree_insert (pbi->reg_cond_dead, i,
7318 (splay_tree_value) rcli);
7319
7320 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
7321 }
7322 }
7323 else if (some_was_live)
7324 {
7325 /* The register may have been conditionally live previously, but
7326 is now unconditionally live. Remove it from the conditionally
7327 dead list, so that a conditional set won't cause us to think
7328 it dead. */
7329 splay_tree_remove (pbi->reg_cond_dead, i);
7330 }
7331 #endif
7332 }
7333 }
7334
7335 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7336 This is done assuming the registers needed from X are those that
7337 have 1-bits in PBI->REG_LIVE.
7338
7339 INSN is the containing instruction. If INSN is dead, this function
7340 is not called. */
7341
7342 static void
7343 mark_used_regs (pbi, x, cond, insn)
7344 struct propagate_block_info *pbi;
7345 rtx x, cond, insn;
7346 {
7347 register RTX_CODE code;
7348 register int regno;
7349 int flags = pbi->flags;
7350
7351 retry:
7352 code = GET_CODE (x);
7353 switch (code)
7354 {
7355 case LABEL_REF:
7356 case SYMBOL_REF:
7357 case CONST_INT:
7358 case CONST:
7359 case CONST_DOUBLE:
7360 case PC:
7361 case ADDR_VEC:
7362 case ADDR_DIFF_VEC:
7363 return;
7364
7365 #ifdef HAVE_cc0
7366 case CC0:
7367 pbi->cc0_live = 1;
7368 return;
7369 #endif
7370
7371 case CLOBBER:
7372 /* If we are clobbering a MEM, mark any registers inside the address
7373 as being used. */
7374 if (GET_CODE (XEXP (x, 0)) == MEM)
7375 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7376 return;
7377
7378 case MEM:
7379 /* Don't bother watching stores to mems if this is not the
7380 final pass. We'll not be deleting dead stores this round. */
7381 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7382 {
7383 /* Invalidate the data for the last MEM stored, but only if MEM is
7384 something that can be stored into. */
7385 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7386 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7387 /* Needn't clear the memory set list. */
7388 ;
7389 else
7390 {
7391 rtx temp = pbi->mem_set_list;
7392 rtx prev = NULL_RTX;
7393 rtx next;
7394
7395 while (temp)
7396 {
7397 next = XEXP (temp, 1);
7398 if (anti_dependence (XEXP (temp, 0), x))
7399 {
7400 /* Splice temp out of the list. */
7401 if (prev)
7402 XEXP (prev, 1) = next;
7403 else
7404 pbi->mem_set_list = next;
7405 free_EXPR_LIST_node (temp);
7406 pbi->mem_set_list_len--;
7407 }
7408 else
7409 prev = temp;
7410 temp = next;
7411 }
7412 }
7413
7414 /* If the memory reference had embedded side effects (autoincrement
7415 address modes. Then we may need to kill some entries on the
7416 memory set list. */
7417 if (insn)
7418 invalidate_mems_from_autoinc (pbi, insn);
7419 }
7420
7421 #ifdef AUTO_INC_DEC
7422 if (flags & PROP_AUTOINC)
7423 find_auto_inc (pbi, x, insn);
7424 #endif
7425 break;
7426
7427 case SUBREG:
7428 #ifdef CLASS_CANNOT_CHANGE_MODE
7429 if (GET_CODE (SUBREG_REG (x)) == REG
7430 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7431 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7432 GET_MODE (SUBREG_REG (x))))
7433 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7434 #endif
7435
7436 /* While we're here, optimize this case. */
7437 x = SUBREG_REG (x);
7438 if (GET_CODE (x) != REG)
7439 goto retry;
7440 /* Fall through. */
7441
7442 case REG:
7443 /* See a register other than being set => mark it as needed. */
7444 mark_used_reg (pbi, x, cond, insn);
7445 return;
7446
7447 case SET:
7448 {
7449 register rtx testreg = SET_DEST (x);
7450 int mark_dest = 0;
7451
7452 /* If storing into MEM, don't show it as being used. But do
7453 show the address as being used. */
7454 if (GET_CODE (testreg) == MEM)
7455 {
7456 #ifdef AUTO_INC_DEC
7457 if (flags & PROP_AUTOINC)
7458 find_auto_inc (pbi, testreg, insn);
7459 #endif
7460 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7461 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7462 return;
7463 }
7464
7465 /* Storing in STRICT_LOW_PART is like storing in a reg
7466 in that this SET might be dead, so ignore it in TESTREG.
7467 but in some other ways it is like using the reg.
7468
7469 Storing in a SUBREG or a bit field is like storing the entire
7470 register in that if the register's value is not used
7471 then this SET is not needed. */
7472 while (GET_CODE (testreg) == STRICT_LOW_PART
7473 || GET_CODE (testreg) == ZERO_EXTRACT
7474 || GET_CODE (testreg) == SIGN_EXTRACT
7475 || GET_CODE (testreg) == SUBREG)
7476 {
7477 #ifdef CLASS_CANNOT_CHANGE_MODE
7478 if (GET_CODE (testreg) == SUBREG
7479 && GET_CODE (SUBREG_REG (testreg)) == REG
7480 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7481 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7482 GET_MODE (testreg)))
7483 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7484 #endif
7485
7486 /* Modifying a single register in an alternate mode
7487 does not use any of the old value. But these other
7488 ways of storing in a register do use the old value. */
7489 if (GET_CODE (testreg) == SUBREG
7490 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7491 ;
7492 else
7493 mark_dest = 1;
7494
7495 testreg = XEXP (testreg, 0);
7496 }
7497
7498 /* If this is a store into a register or group of registers,
7499 recursively scan the value being stored. */
7500
7501 if ((GET_CODE (testreg) == PARALLEL
7502 && GET_MODE (testreg) == BLKmode)
7503 || (GET_CODE (testreg) == REG
7504 && (regno = REGNO (testreg),
7505 ! (regno == FRAME_POINTER_REGNUM
7506 && (! reload_completed || frame_pointer_needed)))
7507 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7508 && ! (regno == HARD_FRAME_POINTER_REGNUM
7509 && (! reload_completed || frame_pointer_needed))
7510 #endif
7511 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7512 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7513 #endif
7514 ))
7515 {
7516 if (mark_dest)
7517 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7518 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7519 return;
7520 }
7521 }
7522 break;
7523
7524 case ASM_OPERANDS:
7525 case UNSPEC_VOLATILE:
7526 case TRAP_IF:
7527 case ASM_INPUT:
7528 {
7529 /* Traditional and volatile asm instructions must be considered to use
7530 and clobber all hard registers, all pseudo-registers and all of
7531 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7532
7533 Consider for instance a volatile asm that changes the fpu rounding
7534 mode. An insn should not be moved across this even if it only uses
7535 pseudo-regs because it might give an incorrectly rounded result.
7536
7537 ?!? Unfortunately, marking all hard registers as live causes massive
7538 problems for the register allocator and marking all pseudos as live
7539 creates mountains of uninitialized variable warnings.
7540
7541 So for now, just clear the memory set list and mark any regs
7542 we can find in ASM_OPERANDS as used. */
7543 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7544 {
7545 free_EXPR_LIST_list (&pbi->mem_set_list);
7546 pbi->mem_set_list_len = 0;
7547 }
7548
7549 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7550 We can not just fall through here since then we would be confused
7551 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7552 traditional asms unlike their normal usage. */
7553 if (code == ASM_OPERANDS)
7554 {
7555 int j;
7556
7557 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7558 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7559 }
7560 break;
7561 }
7562
7563 case COND_EXEC:
7564 if (cond != NULL_RTX)
7565 abort ();
7566
7567 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7568
7569 cond = COND_EXEC_TEST (x);
7570 x = COND_EXEC_CODE (x);
7571 goto retry;
7572
7573 case PHI:
7574 /* We _do_not_ want to scan operands of phi nodes. Operands of
7575 a phi function are evaluated only when control reaches this
7576 block along a particular edge. Therefore, regs that appear
7577 as arguments to phi should not be added to the global live at
7578 start. */
7579 return;
7580
7581 default:
7582 break;
7583 }
7584
7585 /* Recursively scan the operands of this expression. */
7586
7587 {
7588 register const char * const fmt = GET_RTX_FORMAT (code);
7589 register int i;
7590
7591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7592 {
7593 if (fmt[i] == 'e')
7594 {
7595 /* Tail recursive case: save a function call level. */
7596 if (i == 0)
7597 {
7598 x = XEXP (x, 0);
7599 goto retry;
7600 }
7601 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7602 }
7603 else if (fmt[i] == 'E')
7604 {
7605 register int j;
7606 for (j = 0; j < XVECLEN (x, i); j++)
7607 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7608 }
7609 }
7610 }
7611 }
7612 \f
7613 #ifdef AUTO_INC_DEC
7614
7615 static int
7616 try_pre_increment_1 (pbi, insn)
7617 struct propagate_block_info *pbi;
7618 rtx insn;
7619 {
7620 /* Find the next use of this reg. If in same basic block,
7621 make it do pre-increment or pre-decrement if appropriate. */
7622 rtx x = single_set (insn);
7623 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7624 * INTVAL (XEXP (SET_SRC (x), 1)));
7625 int regno = REGNO (SET_DEST (x));
7626 rtx y = pbi->reg_next_use[regno];
7627 if (y != 0
7628 && SET_DEST (x) != stack_pointer_rtx
7629 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7630 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7631 mode would be better. */
7632 && ! dead_or_set_p (y, SET_DEST (x))
7633 && try_pre_increment (y, SET_DEST (x), amount))
7634 {
7635 /* We have found a suitable auto-increment and already changed
7636 insn Y to do it. So flush this increment instruction. */
7637 propagate_block_delete_insn (pbi->bb, insn);
7638
7639 /* Count a reference to this reg for the increment insn we are
7640 deleting. When a reg is incremented, spilling it is worse,
7641 so we want to make that less likely. */
7642 if (regno >= FIRST_PSEUDO_REGISTER)
7643 {
7644 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
7645 REG_N_SETS (regno)++;
7646 }
7647
7648 /* Flush any remembered memories depending on the value of
7649 the incremented register. */
7650 invalidate_mems_from_set (pbi, SET_DEST (x));
7651
7652 return 1;
7653 }
7654 return 0;
7655 }
7656
7657 /* Try to change INSN so that it does pre-increment or pre-decrement
7658 addressing on register REG in order to add AMOUNT to REG.
7659 AMOUNT is negative for pre-decrement.
7660 Returns 1 if the change could be made.
7661 This checks all about the validity of the result of modifying INSN. */
7662
7663 static int
7664 try_pre_increment (insn, reg, amount)
7665 rtx insn, reg;
7666 HOST_WIDE_INT amount;
7667 {
7668 register rtx use;
7669
7670 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7671 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7672 int pre_ok = 0;
7673 /* Nonzero if we can try to make a post-increment or post-decrement.
7674 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7675 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7676 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7677 int post_ok = 0;
7678
7679 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7680 int do_post = 0;
7681
7682 /* From the sign of increment, see which possibilities are conceivable
7683 on this target machine. */
7684 if (HAVE_PRE_INCREMENT && amount > 0)
7685 pre_ok = 1;
7686 if (HAVE_POST_INCREMENT && amount > 0)
7687 post_ok = 1;
7688
7689 if (HAVE_PRE_DECREMENT && amount < 0)
7690 pre_ok = 1;
7691 if (HAVE_POST_DECREMENT && amount < 0)
7692 post_ok = 1;
7693
7694 if (! (pre_ok || post_ok))
7695 return 0;
7696
7697 /* It is not safe to add a side effect to a jump insn
7698 because if the incremented register is spilled and must be reloaded
7699 there would be no way to store the incremented value back in memory. */
7700
7701 if (GET_CODE (insn) == JUMP_INSN)
7702 return 0;
7703
7704 use = 0;
7705 if (pre_ok)
7706 use = find_use_as_address (PATTERN (insn), reg, 0);
7707 if (post_ok && (use == 0 || use == (rtx) 1))
7708 {
7709 use = find_use_as_address (PATTERN (insn), reg, -amount);
7710 do_post = 1;
7711 }
7712
7713 if (use == 0 || use == (rtx) 1)
7714 return 0;
7715
7716 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7717 return 0;
7718
7719 /* See if this combination of instruction and addressing mode exists. */
7720 if (! validate_change (insn, &XEXP (use, 0),
7721 gen_rtx_fmt_e (amount > 0
7722 ? (do_post ? POST_INC : PRE_INC)
7723 : (do_post ? POST_DEC : PRE_DEC),
7724 Pmode, reg), 0))
7725 return 0;
7726
7727 /* Record that this insn now has an implicit side effect on X. */
7728 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7729 return 1;
7730 }
7731
7732 #endif /* AUTO_INC_DEC */
7733 \f
7734 /* Find the place in the rtx X where REG is used as a memory address.
7735 Return the MEM rtx that so uses it.
7736 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7737 (plus REG (const_int PLUSCONST)).
7738
7739 If such an address does not appear, return 0.
7740 If REG appears more than once, or is used other than in such an address,
7741 return (rtx)1. */
7742
7743 rtx
7744 find_use_as_address (x, reg, plusconst)
7745 register rtx x;
7746 rtx reg;
7747 HOST_WIDE_INT plusconst;
7748 {
7749 enum rtx_code code = GET_CODE (x);
7750 const char * const fmt = GET_RTX_FORMAT (code);
7751 register int i;
7752 register rtx value = 0;
7753 register rtx tem;
7754
7755 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7756 return x;
7757
7758 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7759 && XEXP (XEXP (x, 0), 0) == reg
7760 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7761 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7762 return x;
7763
7764 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7765 {
7766 /* If REG occurs inside a MEM used in a bit-field reference,
7767 that is unacceptable. */
7768 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7769 return (rtx) (HOST_WIDE_INT) 1;
7770 }
7771
7772 if (x == reg)
7773 return (rtx) (HOST_WIDE_INT) 1;
7774
7775 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7776 {
7777 if (fmt[i] == 'e')
7778 {
7779 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7780 if (value == 0)
7781 value = tem;
7782 else if (tem != 0)
7783 return (rtx) (HOST_WIDE_INT) 1;
7784 }
7785 else if (fmt[i] == 'E')
7786 {
7787 register int j;
7788 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7789 {
7790 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7791 if (value == 0)
7792 value = tem;
7793 else if (tem != 0)
7794 return (rtx) (HOST_WIDE_INT) 1;
7795 }
7796 }
7797 }
7798
7799 return value;
7800 }
7801 \f
7802 /* Write information about registers and basic blocks into FILE.
7803 This is part of making a debugging dump. */
7804
7805 void
7806 dump_regset (r, outf)
7807 regset r;
7808 FILE *outf;
7809 {
7810 int i;
7811 if (r == NULL)
7812 {
7813 fputs (" (nil)", outf);
7814 return;
7815 }
7816
7817 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7818 {
7819 fprintf (outf, " %d", i);
7820 if (i < FIRST_PSEUDO_REGISTER)
7821 fprintf (outf, " [%s]",
7822 reg_names[i]);
7823 });
7824 }
7825
7826 /* Print a human-reaable representation of R on the standard error
7827 stream. This function is designed to be used from within the
7828 debugger. */
7829
7830 void
7831 debug_regset (r)
7832 regset r;
7833 {
7834 dump_regset (r, stderr);
7835 putc ('\n', stderr);
7836 }
7837
7838 void
7839 dump_flow_info (file)
7840 FILE *file;
7841 {
7842 register int i;
7843 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7844
7845 fprintf (file, "%d registers.\n", max_regno);
7846 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7847 if (REG_N_REFS (i))
7848 {
7849 enum reg_class class, altclass;
7850 fprintf (file, "\nRegister %d used %d times across %d insns",
7851 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7852 if (REG_BASIC_BLOCK (i) >= 0)
7853 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7854 if (REG_N_SETS (i))
7855 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7856 (REG_N_SETS (i) == 1) ? "" : "s");
7857 if (REG_USERVAR_P (regno_reg_rtx[i]))
7858 fprintf (file, "; user var");
7859 if (REG_N_DEATHS (i) != 1)
7860 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7861 if (REG_N_CALLS_CROSSED (i) == 1)
7862 fprintf (file, "; crosses 1 call");
7863 else if (REG_N_CALLS_CROSSED (i))
7864 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7865 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7866 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7867 class = reg_preferred_class (i);
7868 altclass = reg_alternate_class (i);
7869 if (class != GENERAL_REGS || altclass != ALL_REGS)
7870 {
7871 if (altclass == ALL_REGS || class == ALL_REGS)
7872 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7873 else if (altclass == NO_REGS)
7874 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7875 else
7876 fprintf (file, "; pref %s, else %s",
7877 reg_class_names[(int) class],
7878 reg_class_names[(int) altclass]);
7879 }
7880 if (REG_POINTER (regno_reg_rtx[i]))
7881 fprintf (file, "; pointer");
7882 fprintf (file, ".\n");
7883 }
7884
7885 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7886 for (i = 0; i < n_basic_blocks; i++)
7887 {
7888 register basic_block bb = BASIC_BLOCK (i);
7889 register edge e;
7890
7891 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7892 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7893 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7894 fprintf (file, ", freq %i.\n", bb->frequency);
7895
7896 fprintf (file, "Predecessors: ");
7897 for (e = bb->pred; e; e = e->pred_next)
7898 dump_edge_info (file, e, 0);
7899
7900 fprintf (file, "\nSuccessors: ");
7901 for (e = bb->succ; e; e = e->succ_next)
7902 dump_edge_info (file, e, 1);
7903
7904 fprintf (file, "\nRegisters live at start:");
7905 dump_regset (bb->global_live_at_start, file);
7906
7907 fprintf (file, "\nRegisters live at end:");
7908 dump_regset (bb->global_live_at_end, file);
7909
7910 putc ('\n', file);
7911 }
7912
7913 putc ('\n', file);
7914 }
7915
7916 void
7917 debug_flow_info ()
7918 {
7919 dump_flow_info (stderr);
7920 }
7921
7922 void
7923 dump_edge_info (file, e, do_succ)
7924 FILE *file;
7925 edge e;
7926 int do_succ;
7927 {
7928 basic_block side = (do_succ ? e->dest : e->src);
7929
7930 if (side == ENTRY_BLOCK_PTR)
7931 fputs (" ENTRY", file);
7932 else if (side == EXIT_BLOCK_PTR)
7933 fputs (" EXIT", file);
7934 else
7935 fprintf (file, " %d", side->index);
7936
7937 if (e->probability)
7938 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7939
7940 if (e->count)
7941 {
7942 fprintf (file, " count:");
7943 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7944 }
7945
7946 if (e->flags)
7947 {
7948 static const char * const bitnames[] = {
7949 "fallthru", "crit", "ab", "abcall", "eh", "fake", "dfs_back"
7950 };
7951 int comma = 0;
7952 int i, flags = e->flags;
7953
7954 fputc (' ', file);
7955 fputc ('(', file);
7956 for (i = 0; flags; i++)
7957 if (flags & (1 << i))
7958 {
7959 flags &= ~(1 << i);
7960
7961 if (comma)
7962 fputc (',', file);
7963 if (i < (int) ARRAY_SIZE (bitnames))
7964 fputs (bitnames[i], file);
7965 else
7966 fprintf (file, "%d", i);
7967 comma = 1;
7968 }
7969 fputc (')', file);
7970 }
7971 }
7972 \f
7973 /* Print out one basic block with live information at start and end. */
7974
7975 void
7976 dump_bb (bb, outf)
7977 basic_block bb;
7978 FILE *outf;
7979 {
7980 rtx insn;
7981 rtx last;
7982 edge e;
7983
7984 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7985 bb->index, bb->loop_depth);
7986 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7987 putc ('\n', outf);
7988
7989 fputs (";; Predecessors: ", outf);
7990 for (e = bb->pred; e; e = e->pred_next)
7991 dump_edge_info (outf, e, 0);
7992 putc ('\n', outf);
7993
7994 fputs (";; Registers live at start:", outf);
7995 dump_regset (bb->global_live_at_start, outf);
7996 putc ('\n', outf);
7997
7998 for (insn = bb->head, last = NEXT_INSN (bb->end);
7999 insn != last;
8000 insn = NEXT_INSN (insn))
8001 print_rtl_single (outf, insn);
8002
8003 fputs (";; Registers live at end:", outf);
8004 dump_regset (bb->global_live_at_end, outf);
8005 putc ('\n', outf);
8006
8007 fputs (";; Successors: ", outf);
8008 for (e = bb->succ; e; e = e->succ_next)
8009 dump_edge_info (outf, e, 1);
8010 putc ('\n', outf);
8011 }
8012
8013 void
8014 debug_bb (bb)
8015 basic_block bb;
8016 {
8017 dump_bb (bb, stderr);
8018 }
8019
8020 void
8021 debug_bb_n (n)
8022 int n;
8023 {
8024 dump_bb (BASIC_BLOCK (n), stderr);
8025 }
8026
8027 /* Like print_rtl, but also print out live information for the start of each
8028 basic block. */
8029
8030 void
8031 print_rtl_with_bb (outf, rtx_first)
8032 FILE *outf;
8033 rtx rtx_first;
8034 {
8035 register rtx tmp_rtx;
8036
8037 if (rtx_first == 0)
8038 fprintf (outf, "(nil)\n");
8039 else
8040 {
8041 int i;
8042 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
8043 int max_uid = get_max_uid ();
8044 basic_block *start = (basic_block *)
8045 xcalloc (max_uid, sizeof (basic_block));
8046 basic_block *end = (basic_block *)
8047 xcalloc (max_uid, sizeof (basic_block));
8048 enum bb_state *in_bb_p = (enum bb_state *)
8049 xcalloc (max_uid, sizeof (enum bb_state));
8050
8051 for (i = n_basic_blocks - 1; i >= 0; i--)
8052 {
8053 basic_block bb = BASIC_BLOCK (i);
8054 rtx x;
8055
8056 start[INSN_UID (bb->head)] = bb;
8057 end[INSN_UID (bb->end)] = bb;
8058 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
8059 {
8060 enum bb_state state = IN_MULTIPLE_BB;
8061 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
8062 state = IN_ONE_BB;
8063 in_bb_p[INSN_UID (x)] = state;
8064
8065 if (x == bb->end)
8066 break;
8067 }
8068 }
8069
8070 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
8071 {
8072 int did_output;
8073 basic_block bb;
8074
8075 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
8076 {
8077 fprintf (outf, ";; Start of basic block %d, registers live:",
8078 bb->index);
8079 dump_regset (bb->global_live_at_start, outf);
8080 putc ('\n', outf);
8081 }
8082
8083 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
8084 && GET_CODE (tmp_rtx) != NOTE
8085 && GET_CODE (tmp_rtx) != BARRIER)
8086 fprintf (outf, ";; Insn is not within a basic block\n");
8087 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
8088 fprintf (outf, ";; Insn is in multiple basic blocks\n");
8089
8090 did_output = print_rtl_single (outf, tmp_rtx);
8091
8092 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
8093 {
8094 fprintf (outf, ";; End of basic block %d, registers live:\n",
8095 bb->index);
8096 dump_regset (bb->global_live_at_end, outf);
8097 putc ('\n', outf);
8098 }
8099
8100 if (did_output)
8101 putc ('\n', outf);
8102 }
8103
8104 free (start);
8105 free (end);
8106 free (in_bb_p);
8107 }
8108
8109 if (current_function_epilogue_delay_list != 0)
8110 {
8111 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
8112 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
8113 tmp_rtx = XEXP (tmp_rtx, 1))
8114 print_rtl_single (outf, XEXP (tmp_rtx, 0));
8115 }
8116 }
8117
8118 /* Dump the rtl into the current debugging dump file, then abort. */
8119
8120 static void
8121 print_rtl_and_abort_fcn (file, line, function)
8122 const char *file;
8123 int line;
8124 const char *function;
8125 {
8126 if (rtl_dump_file)
8127 {
8128 print_rtl_with_bb (rtl_dump_file, get_insns ());
8129 fclose (rtl_dump_file);
8130 }
8131
8132 fancy_abort (file, line, function);
8133 }
8134
8135 /* Recompute register set/reference counts immediately prior to register
8136 allocation.
8137
8138 This avoids problems with set/reference counts changing to/from values
8139 which have special meanings to the register allocators.
8140
8141 Additionally, the reference counts are the primary component used by the
8142 register allocators to prioritize pseudos for allocation to hard regs.
8143 More accurate reference counts generally lead to better register allocation.
8144
8145 F is the first insn to be scanned.
8146
8147 LOOP_STEP denotes how much loop_depth should be incremented per
8148 loop nesting level in order to increase the ref count more for
8149 references in a loop.
8150
8151 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
8152 possibly other information which is used by the register allocators. */
8153
8154 void
8155 recompute_reg_usage (f, loop_step)
8156 rtx f ATTRIBUTE_UNUSED;
8157 int loop_step ATTRIBUTE_UNUSED;
8158 {
8159 allocate_reg_life_data ();
8160 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
8161 }
8162
8163 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
8164 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
8165 of the number of registers that died. */
8166
8167 int
8168 count_or_remove_death_notes (blocks, kill)
8169 sbitmap blocks;
8170 int kill;
8171 {
8172 int i, count = 0;
8173
8174 for (i = n_basic_blocks - 1; i >= 0; --i)
8175 {
8176 basic_block bb;
8177 rtx insn;
8178
8179 if (blocks && ! TEST_BIT (blocks, i))
8180 continue;
8181
8182 bb = BASIC_BLOCK (i);
8183
8184 for (insn = bb->head;; insn = NEXT_INSN (insn))
8185 {
8186 if (INSN_P (insn))
8187 {
8188 rtx *pprev = &REG_NOTES (insn);
8189 rtx link = *pprev;
8190
8191 while (link)
8192 {
8193 switch (REG_NOTE_KIND (link))
8194 {
8195 case REG_DEAD:
8196 if (GET_CODE (XEXP (link, 0)) == REG)
8197 {
8198 rtx reg = XEXP (link, 0);
8199 int n;
8200
8201 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
8202 n = 1;
8203 else
8204 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
8205 count += n;
8206 }
8207 /* Fall through. */
8208
8209 case REG_UNUSED:
8210 if (kill)
8211 {
8212 rtx next = XEXP (link, 1);
8213 free_EXPR_LIST_node (link);
8214 *pprev = link = next;
8215 break;
8216 }
8217 /* Fall through. */
8218
8219 default:
8220 pprev = &XEXP (link, 1);
8221 link = *pprev;
8222 break;
8223 }
8224 }
8225 }
8226
8227 if (insn == bb->end)
8228 break;
8229 }
8230 }
8231
8232 return count;
8233 }
8234
8235
8236 /* Update insns block within BB. */
8237
8238 void
8239 update_bb_for_insn (bb)
8240 basic_block bb;
8241 {
8242 rtx insn;
8243
8244 if (! basic_block_for_insn)
8245 return;
8246
8247 for (insn = bb->head; ; insn = NEXT_INSN (insn))
8248 {
8249 set_block_for_insn (insn, bb);
8250
8251 if (insn == bb->end)
8252 break;
8253 }
8254 }
8255
8256
8257 /* Record INSN's block as BB. */
8258
8259 void
8260 set_block_for_insn (insn, bb)
8261 rtx insn;
8262 basic_block bb;
8263 {
8264 size_t uid = INSN_UID (insn);
8265 if (uid >= basic_block_for_insn->num_elements)
8266 {
8267 int new_size;
8268
8269 /* Add one-eighth the size so we don't keep calling xrealloc. */
8270 new_size = uid + (uid + 7) / 8;
8271
8272 VARRAY_GROW (basic_block_for_insn, new_size);
8273 }
8274 VARRAY_BB (basic_block_for_insn, uid) = bb;
8275 }
8276
8277 /* When a new insn has been inserted into an existing block, it will
8278 sometimes emit more than a single insn. This routine will set the
8279 block number for the specified insn, and look backwards in the insn
8280 chain to see if there are any other uninitialized insns immediately
8281 previous to this one, and set the block number for them too. */
8282
8283 void
8284 set_block_for_new_insns (insn, bb)
8285 rtx insn;
8286 basic_block bb;
8287 {
8288 set_block_for_insn (insn, bb);
8289
8290 /* Scan the previous instructions setting the block number until we find
8291 an instruction that has the block number set, or we find a note
8292 of any kind. */
8293 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
8294 {
8295 if (GET_CODE (insn) == NOTE)
8296 break;
8297 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
8298 || BLOCK_FOR_INSN (insn) == 0)
8299 set_block_for_insn (insn, bb);
8300 else
8301 break;
8302 }
8303 }
8304 \f
8305 /* Verify the CFG consistency. This function check some CFG invariants and
8306 aborts when something is wrong. Hope that this function will help to
8307 convert many optimization passes to preserve CFG consistent.
8308
8309 Currently it does following checks:
8310
8311 - test head/end pointers
8312 - overlapping of basic blocks
8313 - edge list correctness
8314 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
8315 - tails of basic blocks (ensure that boundary is necesary)
8316 - scans body of the basic block for JUMP_INSN, CODE_LABEL
8317 and NOTE_INSN_BASIC_BLOCK
8318 - check that all insns are in the basic blocks
8319 (except the switch handling code, barriers and notes)
8320 - check that all returns are followed by barriers
8321
8322 In future it can be extended check a lot of other stuff as well
8323 (reachability of basic blocks, life information, etc. etc.). */
8324
8325 void
8326 verify_flow_info ()
8327 {
8328 const int max_uid = get_max_uid ();
8329 const rtx rtx_first = get_insns ();
8330 rtx last_head = get_last_insn ();
8331 basic_block *bb_info, *last_visited;
8332 rtx x;
8333 int i, last_bb_num_seen, num_bb_notes, err = 0;
8334
8335 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8336 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8337 sizeof (basic_block));
8338
8339 for (i = n_basic_blocks - 1; i >= 0; i--)
8340 {
8341 basic_block bb = BASIC_BLOCK (i);
8342 rtx head = bb->head;
8343 rtx end = bb->end;
8344
8345 /* Verify the end of the basic block is in the INSN chain. */
8346 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8347 if (x == end)
8348 break;
8349 if (!x)
8350 {
8351 error ("End insn %d for block %d not found in the insn stream.",
8352 INSN_UID (end), bb->index);
8353 err = 1;
8354 }
8355
8356 /* Work backwards from the end to the head of the basic block
8357 to verify the head is in the RTL chain. */
8358 for (; x != NULL_RTX; x = PREV_INSN (x))
8359 {
8360 /* While walking over the insn chain, verify insns appear
8361 in only one basic block and initialize the BB_INFO array
8362 used by other passes. */
8363 if (bb_info[INSN_UID (x)] != NULL)
8364 {
8365 error ("Insn %d is in multiple basic blocks (%d and %d)",
8366 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8367 err = 1;
8368 }
8369 bb_info[INSN_UID (x)] = bb;
8370
8371 if (x == head)
8372 break;
8373 }
8374 if (!x)
8375 {
8376 error ("Head insn %d for block %d not found in the insn stream.",
8377 INSN_UID (head), bb->index);
8378 err = 1;
8379 }
8380
8381 last_head = x;
8382 }
8383
8384 /* Now check the basic blocks (boundaries etc.) */
8385 for (i = n_basic_blocks - 1; i >= 0; i--)
8386 {
8387 basic_block bb = BASIC_BLOCK (i);
8388 /* Check correctness of edge lists. */
8389 edge e;
8390 int has_fallthru = 0;
8391
8392 e = bb->succ;
8393 while (e)
8394 {
8395 if (last_visited [e->dest->index + 2] == bb)
8396 {
8397 error ("verify_flow_info: Duplicate edge %i->%i",
8398 e->src->index, e->dest->index);
8399 err = 1;
8400 }
8401 last_visited [e->dest->index + 2] = bb;
8402
8403 if (e->flags & EDGE_FALLTHRU)
8404 has_fallthru = 1;
8405
8406 if ((e->flags & EDGE_FALLTHRU)
8407 && e->src != ENTRY_BLOCK_PTR
8408 && e->dest != EXIT_BLOCK_PTR)
8409 {
8410 rtx insn;
8411 if (e->src->index + 1 != e->dest->index)
8412 {
8413 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8414 e->src->index, e->dest->index);
8415 err = 1;
8416 }
8417 else
8418 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8419 insn = NEXT_INSN (insn))
8420 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8421 {
8422 error ("verify_flow_info: Incorrect fallthru %i->%i",
8423 e->src->index, e->dest->index);
8424 fatal_insn ("Wrong insn in the fallthru edge", insn);
8425 err = 1;
8426 }
8427 }
8428 if (e->src != bb)
8429 {
8430 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8431 bb->index);
8432 fprintf (stderr, "Predecessor: ");
8433 dump_edge_info (stderr, e, 0);
8434 fprintf (stderr, "\nSuccessor: ");
8435 dump_edge_info (stderr, e, 1);
8436 fprintf (stderr, "\n");
8437 err = 1;
8438 }
8439 if (e->dest != EXIT_BLOCK_PTR)
8440 {
8441 edge e2 = e->dest->pred;
8442 while (e2 && e2 != e)
8443 e2 = e2->pred_next;
8444 if (!e2)
8445 {
8446 error ("Basic block %i edge lists are corrupted", bb->index);
8447 err = 1;
8448 }
8449 }
8450 e = e->succ_next;
8451 }
8452 if (!has_fallthru)
8453 {
8454 rtx insn = bb->end;
8455
8456 /* Ensure existence of barrier in BB with no fallthru edges. */
8457 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8458 insn = NEXT_INSN (insn))
8459 if (!insn
8460 || (GET_CODE (insn) == NOTE
8461 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8462 {
8463 error ("Missing barrier after block %i", bb->index);
8464 err = 1;
8465 }
8466 }
8467
8468 e = bb->pred;
8469 while (e)
8470 {
8471 if (e->dest != bb)
8472 {
8473 error ("Basic block %d pred edge is corrupted", bb->index);
8474 fputs ("Predecessor: ", stderr);
8475 dump_edge_info (stderr, e, 0);
8476 fputs ("\nSuccessor: ", stderr);
8477 dump_edge_info (stderr, e, 1);
8478 fputc ('\n', stderr);
8479 err = 1;
8480 }
8481 if (e->src != ENTRY_BLOCK_PTR)
8482 {
8483 edge e2 = e->src->succ;
8484 while (e2 && e2 != e)
8485 e2 = e2->succ_next;
8486 if (!e2)
8487 {
8488 error ("Basic block %i edge lists are corrupted", bb->index);
8489 err = 1;
8490 }
8491 }
8492 e = e->pred_next;
8493 }
8494
8495 /* OK pointers are correct. Now check the header of basic
8496 block. It ought to contain optional CODE_LABEL followed
8497 by NOTE_BASIC_BLOCK. */
8498 x = bb->head;
8499 if (GET_CODE (x) == CODE_LABEL)
8500 {
8501 if (bb->end == x)
8502 {
8503 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8504 bb->index);
8505 err = 1;
8506 }
8507 x = NEXT_INSN (x);
8508 }
8509 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8510 {
8511 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8512 bb->index);
8513 err = 1;
8514 }
8515
8516 if (bb->end == x)
8517 {
8518 /* Do checks for empty blocks here */
8519 }
8520 else
8521 {
8522 x = NEXT_INSN (x);
8523 while (x)
8524 {
8525 if (NOTE_INSN_BASIC_BLOCK_P (x))
8526 {
8527 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8528 INSN_UID (x), bb->index);
8529 err = 1;
8530 }
8531
8532 if (x == bb->end)
8533 break;
8534
8535 if (GET_CODE (x) == JUMP_INSN
8536 || GET_CODE (x) == CODE_LABEL
8537 || GET_CODE (x) == BARRIER)
8538 {
8539 error ("In basic block %d:", bb->index);
8540 fatal_insn ("Flow control insn inside a basic block", x);
8541 }
8542
8543 x = NEXT_INSN (x);
8544 }
8545 }
8546 }
8547
8548 last_bb_num_seen = -1;
8549 num_bb_notes = 0;
8550 x = rtx_first;
8551 while (x)
8552 {
8553 if (NOTE_INSN_BASIC_BLOCK_P (x))
8554 {
8555 basic_block bb = NOTE_BASIC_BLOCK (x);
8556 num_bb_notes++;
8557 if (bb->index != last_bb_num_seen + 1)
8558 internal_error ("Basic blocks not numbered consecutively.");
8559
8560 last_bb_num_seen = bb->index;
8561 }
8562
8563 if (!bb_info[INSN_UID (x)])
8564 {
8565 switch (GET_CODE (x))
8566 {
8567 case BARRIER:
8568 case NOTE:
8569 break;
8570
8571 case CODE_LABEL:
8572 /* An addr_vec is placed outside any block block. */
8573 if (NEXT_INSN (x)
8574 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8575 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8576 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8577 {
8578 x = NEXT_INSN (x);
8579 }
8580
8581 /* But in any case, non-deletable labels can appear anywhere. */
8582 break;
8583
8584 default:
8585 fatal_insn ("Insn outside basic block", x);
8586 }
8587 }
8588
8589 if (INSN_P (x)
8590 && GET_CODE (x) == JUMP_INSN
8591 && returnjump_p (x) && ! condjump_p (x)
8592 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8593 fatal_insn ("Return not followed by barrier", x);
8594
8595 x = NEXT_INSN (x);
8596 }
8597
8598 if (num_bb_notes != n_basic_blocks)
8599 internal_error
8600 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8601 num_bb_notes, n_basic_blocks);
8602
8603 if (err)
8604 internal_error ("verify_flow_info failed.");
8605
8606 /* Clean up. */
8607 free (bb_info);
8608 free (last_visited);
8609 }
8610 \f
8611 /* Functions to access an edge list with a vector representation.
8612 Enough data is kept such that given an index number, the
8613 pred and succ that edge represents can be determined, or
8614 given a pred and a succ, its index number can be returned.
8615 This allows algorithms which consume a lot of memory to
8616 represent the normally full matrix of edge (pred,succ) with a
8617 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8618 wasted space in the client code due to sparse flow graphs. */
8619
8620 /* This functions initializes the edge list. Basically the entire
8621 flowgraph is processed, and all edges are assigned a number,
8622 and the data structure is filled in. */
8623
8624 struct edge_list *
8625 create_edge_list ()
8626 {
8627 struct edge_list *elist;
8628 edge e;
8629 int num_edges;
8630 int x;
8631 int block_count;
8632
8633 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8634
8635 num_edges = 0;
8636
8637 /* Determine the number of edges in the flow graph by counting successor
8638 edges on each basic block. */
8639 for (x = 0; x < n_basic_blocks; x++)
8640 {
8641 basic_block bb = BASIC_BLOCK (x);
8642
8643 for (e = bb->succ; e; e = e->succ_next)
8644 num_edges++;
8645 }
8646 /* Don't forget successors of the entry block. */
8647 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8648 num_edges++;
8649
8650 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8651 elist->num_blocks = block_count;
8652 elist->num_edges = num_edges;
8653 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8654
8655 num_edges = 0;
8656
8657 /* Follow successors of the entry block, and register these edges. */
8658 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8659 {
8660 elist->index_to_edge[num_edges] = e;
8661 num_edges++;
8662 }
8663
8664 for (x = 0; x < n_basic_blocks; x++)
8665 {
8666 basic_block bb = BASIC_BLOCK (x);
8667
8668 /* Follow all successors of blocks, and register these edges. */
8669 for (e = bb->succ; e; e = e->succ_next)
8670 {
8671 elist->index_to_edge[num_edges] = e;
8672 num_edges++;
8673 }
8674 }
8675 return elist;
8676 }
8677
8678 /* This function free's memory associated with an edge list. */
8679
8680 void
8681 free_edge_list (elist)
8682 struct edge_list *elist;
8683 {
8684 if (elist)
8685 {
8686 free (elist->index_to_edge);
8687 free (elist);
8688 }
8689 }
8690
8691 /* This function provides debug output showing an edge list. */
8692
8693 void
8694 print_edge_list (f, elist)
8695 FILE *f;
8696 struct edge_list *elist;
8697 {
8698 int x;
8699 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8700 elist->num_blocks - 2, elist->num_edges);
8701
8702 for (x = 0; x < elist->num_edges; x++)
8703 {
8704 fprintf (f, " %-4d - edge(", x);
8705 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8706 fprintf (f, "entry,");
8707 else
8708 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8709
8710 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8711 fprintf (f, "exit)\n");
8712 else
8713 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8714 }
8715 }
8716
8717 /* This function provides an internal consistency check of an edge list,
8718 verifying that all edges are present, and that there are no
8719 extra edges. */
8720
8721 void
8722 verify_edge_list (f, elist)
8723 FILE *f;
8724 struct edge_list *elist;
8725 {
8726 int x, pred, succ, index;
8727 edge e;
8728
8729 for (x = 0; x < n_basic_blocks; x++)
8730 {
8731 basic_block bb = BASIC_BLOCK (x);
8732
8733 for (e = bb->succ; e; e = e->succ_next)
8734 {
8735 pred = e->src->index;
8736 succ = e->dest->index;
8737 index = EDGE_INDEX (elist, e->src, e->dest);
8738 if (index == EDGE_INDEX_NO_EDGE)
8739 {
8740 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8741 continue;
8742 }
8743 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8744 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8745 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8746 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8747 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8748 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8749 }
8750 }
8751 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8752 {
8753 pred = e->src->index;
8754 succ = e->dest->index;
8755 index = EDGE_INDEX (elist, e->src, e->dest);
8756 if (index == EDGE_INDEX_NO_EDGE)
8757 {
8758 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8759 continue;
8760 }
8761 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8762 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8763 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8764 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8765 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8766 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8767 }
8768 /* We've verified that all the edges are in the list, no lets make sure
8769 there are no spurious edges in the list. */
8770
8771 for (pred = 0; pred < n_basic_blocks; pred++)
8772 for (succ = 0; succ < n_basic_blocks; succ++)
8773 {
8774 basic_block p = BASIC_BLOCK (pred);
8775 basic_block s = BASIC_BLOCK (succ);
8776
8777 int found_edge = 0;
8778
8779 for (e = p->succ; e; e = e->succ_next)
8780 if (e->dest == s)
8781 {
8782 found_edge = 1;
8783 break;
8784 }
8785 for (e = s->pred; e; e = e->pred_next)
8786 if (e->src == p)
8787 {
8788 found_edge = 1;
8789 break;
8790 }
8791 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8792 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8793 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8794 pred, succ);
8795 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8796 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8797 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8798 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8799 BASIC_BLOCK (succ)));
8800 }
8801 for (succ = 0; succ < n_basic_blocks; succ++)
8802 {
8803 basic_block p = ENTRY_BLOCK_PTR;
8804 basic_block s = BASIC_BLOCK (succ);
8805
8806 int found_edge = 0;
8807
8808 for (e = p->succ; e; e = e->succ_next)
8809 if (e->dest == s)
8810 {
8811 found_edge = 1;
8812 break;
8813 }
8814 for (e = s->pred; e; e = e->pred_next)
8815 if (e->src == p)
8816 {
8817 found_edge = 1;
8818 break;
8819 }
8820 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8821 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8822 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8823 succ);
8824 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8825 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8826 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8827 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8828 BASIC_BLOCK (succ)));
8829 }
8830 for (pred = 0; pred < n_basic_blocks; pred++)
8831 {
8832 basic_block p = BASIC_BLOCK (pred);
8833 basic_block s = EXIT_BLOCK_PTR;
8834
8835 int found_edge = 0;
8836
8837 for (e = p->succ; e; e = e->succ_next)
8838 if (e->dest == s)
8839 {
8840 found_edge = 1;
8841 break;
8842 }
8843 for (e = s->pred; e; e = e->pred_next)
8844 if (e->src == p)
8845 {
8846 found_edge = 1;
8847 break;
8848 }
8849 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8850 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8851 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8852 pred);
8853 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8854 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8855 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8856 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8857 EXIT_BLOCK_PTR));
8858 }
8859 }
8860
8861 /* This routine will determine what, if any, edge there is between
8862 a specified predecessor and successor. */
8863
8864 int
8865 find_edge_index (edge_list, pred, succ)
8866 struct edge_list *edge_list;
8867 basic_block pred, succ;
8868 {
8869 int x;
8870 for (x = 0; x < NUM_EDGES (edge_list); x++)
8871 {
8872 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8873 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8874 return x;
8875 }
8876 return (EDGE_INDEX_NO_EDGE);
8877 }
8878
8879 /* This function will remove an edge from the flow graph. */
8880
8881 void
8882 remove_edge (e)
8883 edge e;
8884 {
8885 edge last_pred = NULL;
8886 edge last_succ = NULL;
8887 edge tmp;
8888 basic_block src, dest;
8889 src = e->src;
8890 dest = e->dest;
8891 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8892 last_succ = tmp;
8893
8894 if (!tmp)
8895 abort ();
8896 if (last_succ)
8897 last_succ->succ_next = e->succ_next;
8898 else
8899 src->succ = e->succ_next;
8900
8901 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8902 last_pred = tmp;
8903
8904 if (!tmp)
8905 abort ();
8906 if (last_pred)
8907 last_pred->pred_next = e->pred_next;
8908 else
8909 dest->pred = e->pred_next;
8910
8911 n_edges--;
8912 free (e);
8913 }
8914
8915 /* This routine will remove any fake successor edges for a basic block.
8916 When the edge is removed, it is also removed from whatever predecessor
8917 list it is in. */
8918
8919 static void
8920 remove_fake_successors (bb)
8921 basic_block bb;
8922 {
8923 edge e;
8924 for (e = bb->succ; e;)
8925 {
8926 edge tmp = e;
8927 e = e->succ_next;
8928 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8929 remove_edge (tmp);
8930 }
8931 }
8932
8933 /* This routine will remove all fake edges from the flow graph. If
8934 we remove all fake successors, it will automatically remove all
8935 fake predecessors. */
8936
8937 void
8938 remove_fake_edges ()
8939 {
8940 int x;
8941
8942 for (x = 0; x < n_basic_blocks; x++)
8943 remove_fake_successors (BASIC_BLOCK (x));
8944
8945 /* We've handled all successors except the entry block's. */
8946 remove_fake_successors (ENTRY_BLOCK_PTR);
8947 }
8948
8949 /* This function will add a fake edge between any block which has no
8950 successors, and the exit block. Some data flow equations require these
8951 edges to exist. */
8952
8953 void
8954 add_noreturn_fake_exit_edges ()
8955 {
8956 int x;
8957
8958 for (x = 0; x < n_basic_blocks; x++)
8959 if (BASIC_BLOCK (x)->succ == NULL)
8960 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8961 }
8962
8963 /* This function adds a fake edge between any infinite loops to the
8964 exit block. Some optimizations require a path from each node to
8965 the exit node.
8966
8967 See also Morgan, Figure 3.10, pp. 82-83.
8968
8969 The current implementation is ugly, not attempting to minimize the
8970 number of inserted fake edges. To reduce the number of fake edges
8971 to insert, add fake edges from _innermost_ loops containing only
8972 nodes not reachable from the exit block. */
8973
8974 void
8975 connect_infinite_loops_to_exit ()
8976 {
8977 basic_block unvisited_block;
8978
8979 /* Perform depth-first search in the reverse graph to find nodes
8980 reachable from the exit block. */
8981 struct depth_first_search_dsS dfs_ds;
8982
8983 flow_dfs_compute_reverse_init (&dfs_ds);
8984 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8985
8986 /* Repeatedly add fake edges, updating the unreachable nodes. */
8987 while (1)
8988 {
8989 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8990 if (!unvisited_block)
8991 break;
8992 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8993 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8994 }
8995
8996 flow_dfs_compute_reverse_finish (&dfs_ds);
8997
8998 return;
8999 }
9000
9001 /* Redirect an edge's successor from one block to another. */
9002
9003 void
9004 redirect_edge_succ (e, new_succ)
9005 edge e;
9006 basic_block new_succ;
9007 {
9008 edge *pe;
9009
9010 /* Disconnect the edge from the old successor block. */
9011 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
9012 continue;
9013 *pe = (*pe)->pred_next;
9014
9015 /* Reconnect the edge to the new successor block. */
9016 e->pred_next = new_succ->pred;
9017 new_succ->pred = e;
9018 e->dest = new_succ;
9019 }
9020
9021 /* Like previous but avoid possible dupplicate edge. */
9022
9023 void
9024 redirect_edge_succ_nodup (e, new_succ)
9025 edge e;
9026 basic_block new_succ;
9027 {
9028 edge s;
9029 /* Check whether the edge is already present. */
9030 for (s = e->src->succ; s; s = s->succ_next)
9031 if (s->dest == new_succ && s != e)
9032 break;
9033 if (s)
9034 {
9035 s->flags |= e->flags;
9036 s->probability += e->probability;
9037 s->count += e->count;
9038 remove_edge (e);
9039 }
9040 else
9041 redirect_edge_succ (e, new_succ);
9042 }
9043
9044 /* Redirect an edge's predecessor from one block to another. */
9045
9046 void
9047 redirect_edge_pred (e, new_pred)
9048 edge e;
9049 basic_block new_pred;
9050 {
9051 edge *pe;
9052
9053 /* Disconnect the edge from the old predecessor block. */
9054 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
9055 continue;
9056 *pe = (*pe)->succ_next;
9057
9058 /* Reconnect the edge to the new predecessor block. */
9059 e->succ_next = new_pred->succ;
9060 new_pred->succ = e;
9061 e->src = new_pred;
9062 }
9063 \f
9064 /* Dump the list of basic blocks in the bitmap NODES. */
9065
9066 static void
9067 flow_nodes_print (str, nodes, file)
9068 const char *str;
9069 const sbitmap nodes;
9070 FILE *file;
9071 {
9072 int node;
9073
9074 if (! nodes)
9075 return;
9076
9077 fprintf (file, "%s { ", str);
9078 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
9079 fputs ("}\n", file);
9080 }
9081
9082
9083 /* Dump the list of edges in the array EDGE_LIST. */
9084
9085 static void
9086 flow_edge_list_print (str, edge_list, num_edges, file)
9087 const char *str;
9088 const edge *edge_list;
9089 int num_edges;
9090 FILE *file;
9091 {
9092 int i;
9093
9094 if (! edge_list)
9095 return;
9096
9097 fprintf (file, "%s { ", str);
9098 for (i = 0; i < num_edges; i++)
9099 fprintf (file, "%d->%d ", edge_list[i]->src->index,
9100 edge_list[i]->dest->index);
9101 fputs ("}\n", file);
9102 }
9103
9104
9105 /* Dump loop related CFG information. */
9106
9107 static void
9108 flow_loops_cfg_dump (loops, file)
9109 const struct loops *loops;
9110 FILE *file;
9111 {
9112 int i;
9113
9114 if (! loops->num || ! file || ! loops->cfg.dom)
9115 return;
9116
9117 for (i = 0; i < n_basic_blocks; i++)
9118 {
9119 edge succ;
9120
9121 fprintf (file, ";; %d succs { ", i);
9122 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
9123 fprintf (file, "%d ", succ->dest->index);
9124 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
9125 }
9126
9127 /* Dump the DFS node order. */
9128 if (loops->cfg.dfs_order)
9129 {
9130 fputs (";; DFS order: ", file);
9131 for (i = 0; i < n_basic_blocks; i++)
9132 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
9133 fputs ("\n", file);
9134 }
9135 /* Dump the reverse completion node order. */
9136 if (loops->cfg.rc_order)
9137 {
9138 fputs (";; RC order: ", file);
9139 for (i = 0; i < n_basic_blocks; i++)
9140 fprintf (file, "%d ", loops->cfg.rc_order[i]);
9141 fputs ("\n", file);
9142 }
9143 }
9144
9145 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
9146
9147 static int
9148 flow_loop_nested_p (outer, loop)
9149 struct loop *outer;
9150 struct loop *loop;
9151 {
9152 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
9153 }
9154
9155
9156 /* Dump the loop information specified by LOOP to the stream FILE
9157 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9158 void
9159 flow_loop_dump (loop, file, loop_dump_aux, verbose)
9160 const struct loop *loop;
9161 FILE *file;
9162 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9163 int verbose;
9164 {
9165 if (! loop || ! loop->header)
9166 return;
9167
9168 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
9169 loop->num, INSN_UID (loop->first->head),
9170 INSN_UID (loop->last->end),
9171 loop->shared ? " shared" : "",
9172 loop->invalid ? " invalid" : "");
9173 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
9174 loop->header->index, loop->latch->index,
9175 loop->pre_header ? loop->pre_header->index : -1,
9176 loop->first->index, loop->last->index);
9177 fprintf (file, ";; depth %d, level %d, outer %ld\n",
9178 loop->depth, loop->level,
9179 (long) (loop->outer ? loop->outer->num : -1));
9180
9181 if (loop->pre_header_edges)
9182 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
9183 loop->num_pre_header_edges, file);
9184 flow_edge_list_print (";; entry edges", loop->entry_edges,
9185 loop->num_entries, file);
9186 fprintf (file, ";; %d", loop->num_nodes);
9187 flow_nodes_print (" nodes", loop->nodes, file);
9188 flow_edge_list_print (";; exit edges", loop->exit_edges,
9189 loop->num_exits, file);
9190 if (loop->exits_doms)
9191 flow_nodes_print (";; exit doms", loop->exits_doms, file);
9192 if (loop_dump_aux)
9193 loop_dump_aux (loop, file, verbose);
9194 }
9195
9196
9197 /* Dump the loop information specified by LOOPS to the stream FILE,
9198 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
9199 void
9200 flow_loops_dump (loops, file, loop_dump_aux, verbose)
9201 const struct loops *loops;
9202 FILE *file;
9203 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
9204 int verbose;
9205 {
9206 int i;
9207 int num_loops;
9208
9209 num_loops = loops->num;
9210 if (! num_loops || ! file)
9211 return;
9212
9213 fprintf (file, ";; %d loops found, %d levels\n",
9214 num_loops, loops->levels);
9215
9216 for (i = 0; i < num_loops; i++)
9217 {
9218 struct loop *loop = &loops->array[i];
9219
9220 flow_loop_dump (loop, file, loop_dump_aux, verbose);
9221
9222 if (loop->shared)
9223 {
9224 int j;
9225
9226 for (j = 0; j < i; j++)
9227 {
9228 struct loop *oloop = &loops->array[j];
9229
9230 if (loop->header == oloop->header)
9231 {
9232 int disjoint;
9233 int smaller;
9234
9235 smaller = loop->num_nodes < oloop->num_nodes;
9236
9237 /* If the union of LOOP and OLOOP is different than
9238 the larger of LOOP and OLOOP then LOOP and OLOOP
9239 must be disjoint. */
9240 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
9241 smaller ? oloop : loop);
9242 fprintf (file,
9243 ";; loop header %d shared by loops %d, %d %s\n",
9244 loop->header->index, i, j,
9245 disjoint ? "disjoint" : "nested");
9246 }
9247 }
9248 }
9249 }
9250
9251 if (verbose)
9252 flow_loops_cfg_dump (loops, file);
9253 }
9254
9255
9256 /* Free all the memory allocated for LOOPS. */
9257
9258 void
9259 flow_loops_free (loops)
9260 struct loops *loops;
9261 {
9262 if (loops->array)
9263 {
9264 int i;
9265
9266 if (! loops->num)
9267 abort ();
9268
9269 /* Free the loop descriptors. */
9270 for (i = 0; i < loops->num; i++)
9271 {
9272 struct loop *loop = &loops->array[i];
9273
9274 if (loop->pre_header_edges)
9275 free (loop->pre_header_edges);
9276 if (loop->nodes)
9277 sbitmap_free (loop->nodes);
9278 if (loop->entry_edges)
9279 free (loop->entry_edges);
9280 if (loop->exit_edges)
9281 free (loop->exit_edges);
9282 if (loop->exits_doms)
9283 sbitmap_free (loop->exits_doms);
9284 }
9285 free (loops->array);
9286 loops->array = NULL;
9287
9288 if (loops->cfg.dom)
9289 sbitmap_vector_free (loops->cfg.dom);
9290 if (loops->cfg.dfs_order)
9291 free (loops->cfg.dfs_order);
9292
9293 if (loops->shared_headers)
9294 sbitmap_free (loops->shared_headers);
9295 }
9296 }
9297
9298
9299 /* Find the entry edges into the loop with header HEADER and nodes
9300 NODES and store in ENTRY_EDGES array. Return the number of entry
9301 edges from the loop. */
9302
9303 static int
9304 flow_loop_entry_edges_find (header, nodes, entry_edges)
9305 basic_block header;
9306 const sbitmap nodes;
9307 edge **entry_edges;
9308 {
9309 edge e;
9310 int num_entries;
9311
9312 *entry_edges = NULL;
9313
9314 num_entries = 0;
9315 for (e = header->pred; e; e = e->pred_next)
9316 {
9317 basic_block src = e->src;
9318
9319 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9320 num_entries++;
9321 }
9322
9323 if (! num_entries)
9324 abort ();
9325
9326 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9327
9328 num_entries = 0;
9329 for (e = header->pred; e; e = e->pred_next)
9330 {
9331 basic_block src = e->src;
9332
9333 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9334 (*entry_edges)[num_entries++] = e;
9335 }
9336
9337 return num_entries;
9338 }
9339
9340
9341 /* Find the exit edges from the loop using the bitmap of loop nodes
9342 NODES and store in EXIT_EDGES array. Return the number of
9343 exit edges from the loop. */
9344
9345 static int
9346 flow_loop_exit_edges_find (nodes, exit_edges)
9347 const sbitmap nodes;
9348 edge **exit_edges;
9349 {
9350 edge e;
9351 int node;
9352 int num_exits;
9353
9354 *exit_edges = NULL;
9355
9356 /* Check all nodes within the loop to see if there are any
9357 successors not in the loop. Note that a node may have multiple
9358 exiting edges ????? A node can have one jumping edge and one fallthru
9359 edge so only one of these can exit the loop. */
9360 num_exits = 0;
9361 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9362 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9363 {
9364 basic_block dest = e->dest;
9365
9366 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9367 num_exits++;
9368 }
9369 });
9370
9371 if (! num_exits)
9372 return 0;
9373
9374 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9375
9376 /* Store all exiting edges into an array. */
9377 num_exits = 0;
9378 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9379 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9380 {
9381 basic_block dest = e->dest;
9382
9383 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9384 (*exit_edges)[num_exits++] = e;
9385 }
9386 });
9387
9388 return num_exits;
9389 }
9390
9391
9392 /* Find the nodes contained within the loop with header HEADER and
9393 latch LATCH and store in NODES. Return the number of nodes within
9394 the loop. */
9395
9396 static int
9397 flow_loop_nodes_find (header, latch, nodes)
9398 basic_block header;
9399 basic_block latch;
9400 sbitmap nodes;
9401 {
9402 basic_block *stack;
9403 int sp;
9404 int num_nodes = 0;
9405
9406 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9407 sp = 0;
9408
9409 /* Start with only the loop header in the set of loop nodes. */
9410 sbitmap_zero (nodes);
9411 SET_BIT (nodes, header->index);
9412 num_nodes++;
9413 header->loop_depth++;
9414
9415 /* Push the loop latch on to the stack. */
9416 if (! TEST_BIT (nodes, latch->index))
9417 {
9418 SET_BIT (nodes, latch->index);
9419 latch->loop_depth++;
9420 num_nodes++;
9421 stack[sp++] = latch;
9422 }
9423
9424 while (sp)
9425 {
9426 basic_block node;
9427 edge e;
9428
9429 node = stack[--sp];
9430 for (e = node->pred; e; e = e->pred_next)
9431 {
9432 basic_block ancestor = e->src;
9433
9434 /* If each ancestor not marked as part of loop, add to set of
9435 loop nodes and push on to stack. */
9436 if (ancestor != ENTRY_BLOCK_PTR
9437 && ! TEST_BIT (nodes, ancestor->index))
9438 {
9439 SET_BIT (nodes, ancestor->index);
9440 ancestor->loop_depth++;
9441 num_nodes++;
9442 stack[sp++] = ancestor;
9443 }
9444 }
9445 }
9446 free (stack);
9447 return num_nodes;
9448 }
9449
9450 /* Compute the depth first search order and store in the array
9451 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9452 RC_ORDER is non-zero, return the reverse completion number for each
9453 node. Returns the number of nodes visited. A depth first search
9454 tries to get as far away from the starting point as quickly as
9455 possible. */
9456
9457 int
9458 flow_depth_first_order_compute (dfs_order, rc_order)
9459 int *dfs_order;
9460 int *rc_order;
9461 {
9462 edge *stack;
9463 int sp;
9464 int dfsnum = 0;
9465 int rcnum = n_basic_blocks - 1;
9466 sbitmap visited;
9467
9468 /* Allocate stack for back-tracking up CFG. */
9469 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9470 sp = 0;
9471
9472 /* Allocate bitmap to track nodes that have been visited. */
9473 visited = sbitmap_alloc (n_basic_blocks);
9474
9475 /* None of the nodes in the CFG have been visited yet. */
9476 sbitmap_zero (visited);
9477
9478 /* Push the first edge on to the stack. */
9479 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9480
9481 while (sp)
9482 {
9483 edge e;
9484 basic_block src;
9485 basic_block dest;
9486
9487 /* Look at the edge on the top of the stack. */
9488 e = stack[sp - 1];
9489 src = e->src;
9490 dest = e->dest;
9491
9492 /* Check if the edge destination has been visited yet. */
9493 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9494 {
9495 /* Mark that we have visited the destination. */
9496 SET_BIT (visited, dest->index);
9497
9498 if (dfs_order)
9499 dfs_order[dfsnum++] = dest->index;
9500
9501 if (dest->succ)
9502 {
9503 /* Since the DEST node has been visited for the first
9504 time, check its successors. */
9505 stack[sp++] = dest->succ;
9506 }
9507 else
9508 {
9509 /* There are no successors for the DEST node so assign
9510 its reverse completion number. */
9511 if (rc_order)
9512 rc_order[rcnum--] = dest->index;
9513 }
9514 }
9515 else
9516 {
9517 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9518 {
9519 /* There are no more successors for the SRC node
9520 so assign its reverse completion number. */
9521 if (rc_order)
9522 rc_order[rcnum--] = src->index;
9523 }
9524
9525 if (e->succ_next)
9526 stack[sp - 1] = e->succ_next;
9527 else
9528 sp--;
9529 }
9530 }
9531
9532 free (stack);
9533 sbitmap_free (visited);
9534
9535 /* The number of nodes visited should not be greater than
9536 n_basic_blocks. */
9537 if (dfsnum > n_basic_blocks)
9538 abort ();
9539
9540 /* There are some nodes left in the CFG that are unreachable. */
9541 if (dfsnum < n_basic_blocks)
9542 abort ();
9543 return dfsnum;
9544 }
9545
9546 /* Compute the depth first search order on the _reverse_ graph and
9547 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9548 Returns the number of nodes visited.
9549
9550 The computation is split into three pieces:
9551
9552 flow_dfs_compute_reverse_init () creates the necessary data
9553 structures.
9554
9555 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9556 structures. The block will start the search.
9557
9558 flow_dfs_compute_reverse_execute () continues (or starts) the
9559 search using the block on the top of the stack, stopping when the
9560 stack is empty.
9561
9562 flow_dfs_compute_reverse_finish () destroys the necessary data
9563 structures.
9564
9565 Thus, the user will probably call ..._init(), call ..._add_bb() to
9566 add a beginning basic block to the stack, call ..._execute(),
9567 possibly add another bb to the stack and again call ..._execute(),
9568 ..., and finally call _finish(). */
9569
9570 /* Initialize the data structures used for depth-first search on the
9571 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9572 added to the basic block stack. DATA is the current depth-first
9573 search context. If INITIALIZE_STACK is non-zero, there is an
9574 element on the stack. */
9575
9576 static void
9577 flow_dfs_compute_reverse_init (data)
9578 depth_first_search_ds data;
9579 {
9580 /* Allocate stack for back-tracking up CFG. */
9581 data->stack =
9582 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9583 * sizeof (basic_block));
9584 data->sp = 0;
9585
9586 /* Allocate bitmap to track nodes that have been visited. */
9587 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9588
9589 /* None of the nodes in the CFG have been visited yet. */
9590 sbitmap_zero (data->visited_blocks);
9591
9592 return;
9593 }
9594
9595 /* Add the specified basic block to the top of the dfs data
9596 structures. When the search continues, it will start at the
9597 block. */
9598
9599 static void
9600 flow_dfs_compute_reverse_add_bb (data, bb)
9601 depth_first_search_ds data;
9602 basic_block bb;
9603 {
9604 data->stack[data->sp++] = bb;
9605 return;
9606 }
9607
9608 /* Continue the depth-first search through the reverse graph starting
9609 with the block at the stack's top and ending when the stack is
9610 empty. Visited nodes are marked. Returns an unvisited basic
9611 block, or NULL if there is none available. */
9612
9613 static basic_block
9614 flow_dfs_compute_reverse_execute (data)
9615 depth_first_search_ds data;
9616 {
9617 basic_block bb;
9618 edge e;
9619 int i;
9620
9621 while (data->sp > 0)
9622 {
9623 bb = data->stack[--data->sp];
9624
9625 /* Mark that we have visited this node. */
9626 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9627 {
9628 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9629
9630 /* Perform depth-first search on adjacent vertices. */
9631 for (e = bb->pred; e; e = e->pred_next)
9632 flow_dfs_compute_reverse_add_bb (data, e->src);
9633 }
9634 }
9635
9636 /* Determine if there are unvisited basic blocks. */
9637 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9638 if (!TEST_BIT (data->visited_blocks, i))
9639 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9640 return NULL;
9641 }
9642
9643 /* Destroy the data structures needed for depth-first search on the
9644 reverse graph. */
9645
9646 static void
9647 flow_dfs_compute_reverse_finish (data)
9648 depth_first_search_ds data;
9649 {
9650 free (data->stack);
9651 sbitmap_free (data->visited_blocks);
9652 return;
9653 }
9654
9655
9656 /* Find the root node of the loop pre-header extended basic block and
9657 the edges along the trace from the root node to the loop header. */
9658
9659 static void
9660 flow_loop_pre_header_scan (loop)
9661 struct loop *loop;
9662 {
9663 int num = 0;
9664 basic_block ebb;
9665
9666 loop->num_pre_header_edges = 0;
9667
9668 if (loop->num_entries != 1)
9669 return;
9670
9671 ebb = loop->entry_edges[0]->src;
9672
9673 if (ebb != ENTRY_BLOCK_PTR)
9674 {
9675 edge e;
9676
9677 /* Count number of edges along trace from loop header to
9678 root of pre-header extended basic block. Usually this is
9679 only one or two edges. */
9680 num++;
9681 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9682 {
9683 ebb = ebb->pred->src;
9684 num++;
9685 }
9686
9687 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9688 loop->num_pre_header_edges = num;
9689
9690 /* Store edges in order that they are followed. The source
9691 of the first edge is the root node of the pre-header extended
9692 basic block and the destination of the last last edge is
9693 the loop header. */
9694 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9695 {
9696 loop->pre_header_edges[--num] = e;
9697 }
9698 }
9699 }
9700
9701
9702 /* Return the block for the pre-header of the loop with header
9703 HEADER where DOM specifies the dominator information. Return NULL if
9704 there is no pre-header. */
9705
9706 static basic_block
9707 flow_loop_pre_header_find (header, dom)
9708 basic_block header;
9709 const sbitmap *dom;
9710 {
9711 basic_block pre_header;
9712 edge e;
9713
9714 /* If block p is a predecessor of the header and is the only block
9715 that the header does not dominate, then it is the pre-header. */
9716 pre_header = NULL;
9717 for (e = header->pred; e; e = e->pred_next)
9718 {
9719 basic_block node = e->src;
9720
9721 if (node != ENTRY_BLOCK_PTR
9722 && ! TEST_BIT (dom[node->index], header->index))
9723 {
9724 if (pre_header == NULL)
9725 pre_header = node;
9726 else
9727 {
9728 /* There are multiple edges into the header from outside
9729 the loop so there is no pre-header block. */
9730 pre_header = NULL;
9731 break;
9732 }
9733 }
9734 }
9735 return pre_header;
9736 }
9737
9738 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9739 previously added. The insertion algorithm assumes that the loops
9740 are added in the order found by a depth first search of the CFG. */
9741
9742 static void
9743 flow_loop_tree_node_add (prevloop, loop)
9744 struct loop *prevloop;
9745 struct loop *loop;
9746 {
9747
9748 if (flow_loop_nested_p (prevloop, loop))
9749 {
9750 prevloop->inner = loop;
9751 loop->outer = prevloop;
9752 return;
9753 }
9754
9755 while (prevloop->outer)
9756 {
9757 if (flow_loop_nested_p (prevloop->outer, loop))
9758 {
9759 prevloop->next = loop;
9760 loop->outer = prevloop->outer;
9761 return;
9762 }
9763 prevloop = prevloop->outer;
9764 }
9765
9766 prevloop->next = loop;
9767 loop->outer = NULL;
9768 }
9769
9770 /* Build the loop hierarchy tree for LOOPS. */
9771
9772 static void
9773 flow_loops_tree_build (loops)
9774 struct loops *loops;
9775 {
9776 int i;
9777 int num_loops;
9778
9779 num_loops = loops->num;
9780 if (! num_loops)
9781 return;
9782
9783 /* Root the loop hierarchy tree with the first loop found.
9784 Since we used a depth first search this should be the
9785 outermost loop. */
9786 loops->tree_root = &loops->array[0];
9787 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9788
9789 /* Add the remaining loops to the tree. */
9790 for (i = 1; i < num_loops; i++)
9791 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9792 }
9793
9794 /* Helper function to compute loop nesting depth and enclosed loop level
9795 for the natural loop specified by LOOP at the loop depth DEPTH.
9796 Returns the loop level. */
9797
9798 static int
9799 flow_loop_level_compute (loop, depth)
9800 struct loop *loop;
9801 int depth;
9802 {
9803 struct loop *inner;
9804 int level = 1;
9805
9806 if (! loop)
9807 return 0;
9808
9809 /* Traverse loop tree assigning depth and computing level as the
9810 maximum level of all the inner loops of this loop. The loop
9811 level is equivalent to the height of the loop in the loop tree
9812 and corresponds to the number of enclosed loop levels (including
9813 itself). */
9814 for (inner = loop->inner; inner; inner = inner->next)
9815 {
9816 int ilevel;
9817
9818 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9819
9820 if (ilevel > level)
9821 level = ilevel;
9822 }
9823 loop->level = level;
9824 loop->depth = depth;
9825 return level;
9826 }
9827
9828 /* Compute the loop nesting depth and enclosed loop level for the loop
9829 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9830 level. */
9831
9832 static int
9833 flow_loops_level_compute (loops)
9834 struct loops *loops;
9835 {
9836 struct loop *loop;
9837 int level;
9838 int levels = 0;
9839
9840 /* Traverse all the outer level loops. */
9841 for (loop = loops->tree_root; loop; loop = loop->next)
9842 {
9843 level = flow_loop_level_compute (loop, 1);
9844 if (level > levels)
9845 levels = level;
9846 }
9847 return levels;
9848 }
9849
9850
9851 /* Scan a single natural loop specified by LOOP collecting information
9852 about it specified by FLAGS. */
9853
9854 int
9855 flow_loop_scan (loops, loop, flags)
9856 struct loops *loops;
9857 struct loop *loop;
9858 int flags;
9859 {
9860 /* Determine prerequisites. */
9861 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9862 flags |= LOOP_EXIT_EDGES;
9863
9864 if (flags & LOOP_ENTRY_EDGES)
9865 {
9866 /* Find edges which enter the loop header.
9867 Note that the entry edges should only
9868 enter the header of a natural loop. */
9869 loop->num_entries
9870 = flow_loop_entry_edges_find (loop->header,
9871 loop->nodes,
9872 &loop->entry_edges);
9873 }
9874
9875 if (flags & LOOP_EXIT_EDGES)
9876 {
9877 /* Find edges which exit the loop. */
9878 loop->num_exits
9879 = flow_loop_exit_edges_find (loop->nodes,
9880 &loop->exit_edges);
9881 }
9882
9883 if (flags & LOOP_EXITS_DOMS)
9884 {
9885 int j;
9886
9887 /* Determine which loop nodes dominate all the exits
9888 of the loop. */
9889 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9890 sbitmap_copy (loop->exits_doms, loop->nodes);
9891 for (j = 0; j < loop->num_exits; j++)
9892 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9893 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9894
9895 /* The header of a natural loop must dominate
9896 all exits. */
9897 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9898 abort ();
9899 }
9900
9901 if (flags & LOOP_PRE_HEADER)
9902 {
9903 /* Look to see if the loop has a pre-header node. */
9904 loop->pre_header
9905 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9906
9907 /* Find the blocks within the extended basic block of
9908 the loop pre-header. */
9909 flow_loop_pre_header_scan (loop);
9910 }
9911 return 1;
9912 }
9913
9914
9915 /* Find all the natural loops in the function and save in LOOPS structure
9916 and recalculate loop_depth information in basic block structures.
9917 FLAGS controls which loop information is collected.
9918 Return the number of natural loops found. */
9919
9920 int
9921 flow_loops_find (loops, flags)
9922 struct loops *loops;
9923 int flags;
9924 {
9925 int i;
9926 int b;
9927 int num_loops;
9928 edge e;
9929 sbitmap headers;
9930 sbitmap *dom;
9931 int *dfs_order;
9932 int *rc_order;
9933
9934 /* This function cannot be repeatedly called with different
9935 flags to build up the loop information. The loop tree
9936 must always be built if this function is called. */
9937 if (! (flags & LOOP_TREE))
9938 abort ();
9939
9940 memset (loops, 0, sizeof (*loops));
9941
9942 /* Taking care of this degenerate case makes the rest of
9943 this code simpler. */
9944 if (n_basic_blocks == 0)
9945 return 0;
9946
9947 dfs_order = NULL;
9948 rc_order = NULL;
9949
9950 /* Compute the dominators. */
9951 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9952 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9953
9954 /* Count the number of loop edges (back edges). This should be the
9955 same as the number of natural loops. */
9956
9957 num_loops = 0;
9958 for (b = 0; b < n_basic_blocks; b++)
9959 {
9960 basic_block header;
9961
9962 header = BASIC_BLOCK (b);
9963 header->loop_depth = 0;
9964
9965 for (e = header->pred; e; e = e->pred_next)
9966 {
9967 basic_block latch = e->src;
9968
9969 /* Look for back edges where a predecessor is dominated
9970 by this block. A natural loop has a single entry
9971 node (header) that dominates all the nodes in the
9972 loop. It also has single back edge to the header
9973 from a latch node. Note that multiple natural loops
9974 may share the same header. */
9975 if (b != header->index)
9976 abort ();
9977
9978 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9979 num_loops++;
9980 }
9981 }
9982
9983 if (num_loops)
9984 {
9985 /* Compute depth first search order of the CFG so that outer
9986 natural loops will be found before inner natural loops. */
9987 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9988 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9989 flow_depth_first_order_compute (dfs_order, rc_order);
9990
9991 /* Save CFG derived information to avoid recomputing it. */
9992 loops->cfg.dom = dom;
9993 loops->cfg.dfs_order = dfs_order;
9994 loops->cfg.rc_order = rc_order;
9995
9996 /* Allocate loop structures. */
9997 loops->array
9998 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9999
10000 headers = sbitmap_alloc (n_basic_blocks);
10001 sbitmap_zero (headers);
10002
10003 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
10004 sbitmap_zero (loops->shared_headers);
10005
10006 /* Find and record information about all the natural loops
10007 in the CFG. */
10008 num_loops = 0;
10009 for (b = 0; b < n_basic_blocks; b++)
10010 {
10011 basic_block header;
10012
10013 /* Search the nodes of the CFG in reverse completion order
10014 so that we can find outer loops first. */
10015 header = BASIC_BLOCK (rc_order[b]);
10016
10017 /* Look for all the possible latch blocks for this header. */
10018 for (e = header->pred; e; e = e->pred_next)
10019 {
10020 basic_block latch = e->src;
10021
10022 /* Look for back edges where a predecessor is dominated
10023 by this block. A natural loop has a single entry
10024 node (header) that dominates all the nodes in the
10025 loop. It also has single back edge to the header
10026 from a latch node. Note that multiple natural loops
10027 may share the same header. */
10028 if (latch != ENTRY_BLOCK_PTR
10029 && TEST_BIT (dom[latch->index], header->index))
10030 {
10031 struct loop *loop;
10032
10033 loop = loops->array + num_loops;
10034
10035 loop->header = header;
10036 loop->latch = latch;
10037 loop->num = num_loops;
10038
10039 num_loops++;
10040 }
10041 }
10042 }
10043
10044 for (i = 0; i < num_loops; i++)
10045 {
10046 struct loop *loop = &loops->array[i];
10047
10048 /* Keep track of blocks that are loop headers so
10049 that we can tell which loops should be merged. */
10050 if (TEST_BIT (headers, loop->header->index))
10051 SET_BIT (loops->shared_headers, loop->header->index);
10052 SET_BIT (headers, loop->header->index);
10053
10054 /* Find nodes contained within the loop. */
10055 loop->nodes = sbitmap_alloc (n_basic_blocks);
10056 loop->num_nodes
10057 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
10058
10059 /* Compute first and last blocks within the loop.
10060 These are often the same as the loop header and
10061 loop latch respectively, but this is not always
10062 the case. */
10063 loop->first
10064 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
10065 loop->last
10066 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
10067
10068 flow_loop_scan (loops, loop, flags);
10069 }
10070
10071 /* Natural loops with shared headers may either be disjoint or
10072 nested. Disjoint loops with shared headers cannot be inner
10073 loops and should be merged. For now just mark loops that share
10074 headers. */
10075 for (i = 0; i < num_loops; i++)
10076 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
10077 loops->array[i].shared = 1;
10078
10079 sbitmap_free (headers);
10080 }
10081 else
10082 {
10083 sbitmap_vector_free (dom);
10084 }
10085
10086 loops->num = num_loops;
10087
10088 /* Build the loop hierarchy tree. */
10089 flow_loops_tree_build (loops);
10090
10091 /* Assign the loop nesting depth and enclosed loop level for each
10092 loop. */
10093 loops->levels = flow_loops_level_compute (loops);
10094
10095 return num_loops;
10096 }
10097
10098
10099 /* Update the information regarding the loops in the CFG
10100 specified by LOOPS. */
10101 int
10102 flow_loops_update (loops, flags)
10103 struct loops *loops;
10104 int flags;
10105 {
10106 /* One day we may want to update the current loop data. For now
10107 throw away the old stuff and rebuild what we need. */
10108 if (loops->array)
10109 flow_loops_free (loops);
10110
10111 return flow_loops_find (loops, flags);
10112 }
10113
10114
10115 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
10116
10117 int
10118 flow_loop_outside_edge_p (loop, e)
10119 const struct loop *loop;
10120 edge e;
10121 {
10122 if (e->dest != loop->header)
10123 abort ();
10124 return (e->src == ENTRY_BLOCK_PTR)
10125 || ! TEST_BIT (loop->nodes, e->src->index);
10126 }
10127
10128 /* Clear LOG_LINKS fields of insns in a chain.
10129 Also clear the global_live_at_{start,end} fields of the basic block
10130 structures. */
10131
10132 void
10133 clear_log_links (insns)
10134 rtx insns;
10135 {
10136 rtx i;
10137 int b;
10138
10139 for (i = insns; i; i = NEXT_INSN (i))
10140 if (INSN_P (i))
10141 LOG_LINKS (i) = 0;
10142
10143 for (b = 0; b < n_basic_blocks; b++)
10144 {
10145 basic_block bb = BASIC_BLOCK (b);
10146
10147 bb->global_live_at_start = NULL;
10148 bb->global_live_at_end = NULL;
10149 }
10150
10151 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
10152 EXIT_BLOCK_PTR->global_live_at_start = NULL;
10153 }
10154
10155 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
10156 correspond to the hard registers, if any, set in that map. This
10157 could be done far more efficiently by having all sorts of special-cases
10158 with moving single words, but probably isn't worth the trouble. */
10159
10160 void
10161 reg_set_to_hard_reg_set (to, from)
10162 HARD_REG_SET *to;
10163 bitmap from;
10164 {
10165 int i;
10166
10167 EXECUTE_IF_SET_IN_BITMAP
10168 (from, 0, i,
10169 {
10170 if (i >= FIRST_PSEUDO_REGISTER)
10171 return;
10172 SET_HARD_REG_BIT (*to, i);
10173 });
10174 }
10175
10176 /* Called once at intialization time. */
10177
10178 void
10179 init_flow ()
10180 {
10181 static int initialized;
10182
10183 if (!initialized)
10184 {
10185 gcc_obstack_init (&flow_obstack);
10186 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10187 initialized = 1;
10188 }
10189 else
10190 {
10191 obstack_free (&flow_obstack, flow_firstobj);
10192 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
10193 }
10194 }
10195
10196 /* Assume that the preceeding pass has possibly eliminated jump instructions
10197 or converted the unconditional jumps. Eliminate the edges from CFG.
10198 Return true if any edges are eliminated. */
10199
10200 bool
10201 purge_dead_edges (bb)
10202 basic_block bb;
10203 {
10204 edge e, next;
10205 rtx insn = bb->end;
10206 bool purged = false;
10207
10208 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
10209 return false;
10210 if (GET_CODE (insn) == JUMP_INSN)
10211 {
10212 rtx note;
10213 edge b,f;
10214 /* We do care only about conditional jumps and simplejumps. */
10215 if (!any_condjump_p (insn)
10216 && !returnjump_p (insn)
10217 && !simplejump_p (insn))
10218 return false;
10219 for (e = bb->succ; e; e = next)
10220 {
10221 next = e->succ_next;
10222
10223 /* Check purposes we can have edge. */
10224 if ((e->flags & EDGE_FALLTHRU)
10225 && any_condjump_p (insn))
10226 continue;
10227 if (e->dest != EXIT_BLOCK_PTR
10228 && e->dest->head == JUMP_LABEL (insn))
10229 continue;
10230 if (e->dest == EXIT_BLOCK_PTR
10231 && returnjump_p (insn))
10232 continue;
10233 purged = true;
10234 remove_edge (e);
10235 }
10236 if (!bb->succ || !purged)
10237 return false;
10238 if (rtl_dump_file)
10239 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
10240 if (!optimize)
10241 return purged;
10242
10243 /* Redistribute probabilities. */
10244 if (!bb->succ->succ_next)
10245 {
10246 bb->succ->probability = REG_BR_PROB_BASE;
10247 bb->succ->count = bb->count;
10248 }
10249 else
10250 {
10251 note = find_reg_note (insn, REG_BR_PROB, NULL);
10252 if (!note)
10253 return purged;
10254 b = BRANCH_EDGE (bb);
10255 f = FALLTHRU_EDGE (bb);
10256 b->probability = INTVAL (XEXP (note, 0));
10257 f->probability = REG_BR_PROB_BASE - b->probability;
10258 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
10259 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
10260 }
10261 return purged;
10262 }
10263
10264 /* Cleanup abnormal edges caused by throwing insns that have been
10265 eliminated. */
10266 if (! can_throw_internal (bb->end))
10267 for (e = bb->succ; e; e = next)
10268 {
10269 next = e->succ_next;
10270 if (e->flags & EDGE_EH)
10271 {
10272 remove_edge (e);
10273 purged = true;
10274 }
10275 }
10276
10277 /* If we don't see a jump insn, we don't know exactly why the block would
10278 have been broken at this point. Look for a simple, non-fallthru edge,
10279 as these are only created by conditional branches. If we find such an
10280 edge we know that there used to be a jump here and can then safely
10281 remove all non-fallthru edges. */
10282 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
10283 e = e->succ_next);
10284 if (!e)
10285 return purged;
10286 for (e = bb->succ; e; e = next)
10287 {
10288 next = e->succ_next;
10289 if (!(e->flags & EDGE_FALLTHRU))
10290 remove_edge (e), purged = true;
10291 }
10292 if (!bb->succ || bb->succ->succ_next)
10293 abort ();
10294 bb->succ->probability = REG_BR_PROB_BASE;
10295 bb->succ->count = bb->count;
10296
10297 if (rtl_dump_file)
10298 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
10299 bb->index);
10300 return purged;
10301 }
10302
10303 /* Search all basic blocks for potentionally dead edges and purge them.
10304
10305 Return true ifif some edge has been elliminated.
10306 */
10307
10308 bool
10309 purge_all_dead_edges ()
10310 {
10311 int i, purged = false;
10312 for (i = 0; i < n_basic_blocks; i++)
10313 purged |= purge_dead_edges (BASIC_BLOCK (i));
10314 return purged;
10315 }