dbxout.c: Consistently use putc instead of fputc.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "expr.h"
137 #include "ssa.h"
138 #include "timevar.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 #ifndef LOCAL_REGNO
165 #define LOCAL_REGNO(REGNO) 0
166 #endif
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
169 #endif
170
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
174 #endif
175 #endif
176
177 /* The obstack on which the flow graph components are allocated. */
178
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
181
182 /* Number of basic blocks in the current function. */
183
184 int n_basic_blocks;
185
186 /* Number of edges in the current function. */
187
188 int n_edges;
189
190 /* The basic block array. */
191
192 varray_type basic_block_info;
193
194 /* The special entry and exit blocks. */
195
196 struct basic_block_def entry_exit_blocks[2]
197 = {{NULL, /* head */
198 NULL, /* end */
199 NULL, /* head_tree */
200 NULL, /* end_tree */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 ENTRY_BLOCK, /* index */
209 0, /* loop_depth */
210 0, /* count */
211 0 /* frequency */
212 },
213 {
214 NULL, /* head */
215 NULL, /* end */
216 NULL, /* head_tree */
217 NULL, /* end_tree */
218 NULL, /* pred */
219 NULL, /* succ */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
224 NULL, /* aux */
225 EXIT_BLOCK, /* index */
226 0, /* loop_depth */
227 0, /* count */
228 0 /* frequency */
229 }
230 };
231
232 /* Nonzero if the second flow pass has completed. */
233 int flow2_completed;
234
235 /* Maximum register number used in this function, plus one. */
236
237 int max_regno;
238
239 /* Indexed by n, giving various register information */
240
241 varray_type reg_n_info;
242
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
245
246 int regset_bytes;
247 int regset_size;
248
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
251
252 regset regs_live_at_setjmp;
253
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
258 rtx regs_may_share;
259
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
263
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
266
267 static HARD_REG_SET elim_reg_set;
268
269 /* The basic block structure for every insn, indexed by uid. */
270
271 varray_type basic_block_for_insn;
272
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
276
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
279
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
282 {
283 /* A boolean expression of conditions under which a register is dead. */
284 rtx condition;
285 /* Conditions under which a register is dead at the basic block end. */
286 rtx orig_condition;
287
288 /* A boolean expression of conditions under which a register has been
289 stored into. */
290 rtx stores;
291
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
294 };
295
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
298
299 struct propagate_block_info
300 {
301 /* The basic block we're considering. */
302 basic_block bb;
303
304 /* Bit N is set if register N is conditionally or unconditionally live. */
305 regset reg_live;
306
307 /* Bit N is set if register N is set this insn. */
308 regset new_set;
309
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
312 rtx *reg_next_use;
313
314 /* Contains a list of all the MEMs we are tracking for dead store
315 elimination. */
316 rtx mem_set_list;
317
318 /* If non-null, record the set of registers set unconditionally in the
319 basic block. */
320 regset local_set;
321
322 /* If non-null, record the set of registers set conditionally in the
323 basic block. */
324 regset cond_local_set;
325
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
330
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
332 regset reg_cond_reg;
333 #endif
334
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
337
338 /* Non-zero if the value of CC0 is live. */
339 int cc0_live;
340
341 /* Flags controling the set of information propagate_block collects. */
342 int flags;
343 };
344
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
348
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
352 basic_block *stack;
353
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
355 have edges. */
356 unsigned int sp;
357
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
360 };
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
362
363 /* Have print_rtl_and_abort give the same information that fancy_abort
364 does. */
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
367
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
373 rtx *, rtx *));
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx, int, int, int));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
379 rtx, int));
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
381
382 static void commit_one_edge_insertion PARAMS ((edge));
383
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
390 basic_block));
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
392 basic_block));
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
394 int));
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool can_fallthru PARAMS ((basic_block, basic_block));
397 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
398 static bool try_simplify_condjump PARAMS ((basic_block));
399 static bool try_forward_edges PARAMS ((basic_block));
400 static void tidy_fallthru_edges PARAMS ((void));
401 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
402 static void verify_wide_reg PARAMS ((int, rtx, rtx));
403 static void verify_local_live_at_start PARAMS ((regset, basic_block));
404 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
405 static void notice_stack_pointer_modification PARAMS ((rtx));
406 static void mark_reg PARAMS ((rtx, void *));
407 static void mark_regs_live_at_end PARAMS ((regset));
408 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
409 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
410 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
411 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
412 static int insn_dead_p PARAMS ((struct propagate_block_info *,
413 rtx, int, rtx));
414 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
415 rtx, rtx));
416 static void mark_set_regs PARAMS ((struct propagate_block_info *,
417 rtx, rtx));
418 static void mark_set_1 PARAMS ((struct propagate_block_info *,
419 enum rtx_code, rtx, rtx,
420 rtx, int));
421 #ifdef HAVE_conditional_execution
422 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
423 int, rtx));
424 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
425 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
426 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
427 int));
428 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
429 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
430 static rtx not_reg_cond PARAMS ((rtx));
431 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
432 #endif
433 #ifdef AUTO_INC_DEC
434 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
435 rtx, rtx, rtx, rtx, rtx));
436 static void find_auto_inc PARAMS ((struct propagate_block_info *,
437 rtx, rtx));
438 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
439 rtx));
440 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
441 #endif
442 static void mark_used_reg PARAMS ((struct propagate_block_info *,
443 rtx, rtx, rtx));
444 static void mark_used_regs PARAMS ((struct propagate_block_info *,
445 rtx, rtx, rtx));
446 void dump_flow_info PARAMS ((FILE *));
447 void debug_flow_info PARAMS ((void));
448 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
449 const char *))
450 ATTRIBUTE_NORETURN;
451
452 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
453 rtx));
454 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
455 rtx));
456 static void remove_fake_successors PARAMS ((basic_block));
457 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
458 FILE *));
459 static void flow_edge_list_print PARAMS ((const char *, const edge *,
460 int, FILE *));
461 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
462 FILE *));
463 static int flow_loop_nested_p PARAMS ((struct loop *,
464 struct loop *));
465 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
466 edge **));
467 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
468 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
469 static void flow_dfs_compute_reverse_init
470 PARAMS ((depth_first_search_ds));
471 static void flow_dfs_compute_reverse_add_bb
472 PARAMS ((depth_first_search_ds, basic_block));
473 static basic_block flow_dfs_compute_reverse_execute
474 PARAMS ((depth_first_search_ds));
475 static void flow_dfs_compute_reverse_finish
476 PARAMS ((depth_first_search_ds));
477 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
478 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
479 const sbitmap *));
480 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
481 static void flow_loops_tree_build PARAMS ((struct loops *));
482 static int flow_loop_level_compute PARAMS ((struct loop *, int));
483 static int flow_loops_level_compute PARAMS ((struct loops *));
484 static void delete_dead_jumptables PARAMS ((void));
485 \f
486 /* Find basic blocks of the current function.
487 F is the first insn of the function and NREGS the number of register
488 numbers in use. */
489
490 void
491 find_basic_blocks (f, nregs, file)
492 rtx f;
493 int nregs ATTRIBUTE_UNUSED;
494 FILE *file ATTRIBUTE_UNUSED;
495 {
496 int max_uid;
497 timevar_push (TV_CFG);
498
499 /* Flush out existing data. */
500 if (basic_block_info != NULL)
501 {
502 int i;
503
504 clear_edges ();
505
506 /* Clear bb->aux on all extant basic blocks. We'll use this as a
507 tag for reuse during create_basic_block, just in case some pass
508 copies around basic block notes improperly. */
509 for (i = 0; i < n_basic_blocks; ++i)
510 BASIC_BLOCK (i)->aux = NULL;
511
512 VARRAY_FREE (basic_block_info);
513 }
514
515 n_basic_blocks = count_basic_blocks (f);
516
517 /* Size the basic block table. The actual structures will be allocated
518 by find_basic_blocks_1, since we want to keep the structure pointers
519 stable across calls to find_basic_blocks. */
520 /* ??? This whole issue would be much simpler if we called find_basic_blocks
521 exactly once, and thereafter we don't have a single long chain of
522 instructions at all until close to the end of compilation when we
523 actually lay them out. */
524
525 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
526
527 find_basic_blocks_1 (f);
528
529 /* Record the block to which an insn belongs. */
530 /* ??? This should be done another way, by which (perhaps) a label is
531 tagged directly with the basic block that it starts. It is used for
532 more than that currently, but IMO that is the only valid use. */
533
534 max_uid = get_max_uid ();
535 #ifdef AUTO_INC_DEC
536 /* Leave space for insns life_analysis makes in some cases for auto-inc.
537 These cases are rare, so we don't need too much space. */
538 max_uid += max_uid / 10;
539 #endif
540
541 compute_bb_for_insn (max_uid);
542
543 /* Discover the edges of our cfg. */
544 make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
545
546 /* Do very simple cleanup now, for the benefit of code that runs between
547 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
548 tidy_fallthru_edges ();
549
550 mark_critical_edges ();
551
552 #ifdef ENABLE_CHECKING
553 verify_flow_info ();
554 #endif
555 timevar_pop (TV_CFG);
556 }
557
558 void
559 check_function_return_warnings ()
560 {
561 if (warn_missing_noreturn
562 && !TREE_THIS_VOLATILE (cfun->decl)
563 && EXIT_BLOCK_PTR->pred == NULL
564 && (lang_missing_noreturn_ok_p
565 && !lang_missing_noreturn_ok_p (cfun->decl)))
566 warning ("function might be possible candidate for attribute `noreturn'");
567
568 /* If we have a path to EXIT, then we do return. */
569 if (TREE_THIS_VOLATILE (cfun->decl)
570 && EXIT_BLOCK_PTR->pred != NULL)
571 warning ("`noreturn' function does return");
572
573 /* If the clobber_return_insn appears in some basic block, then we
574 do reach the end without returning a value. */
575 else if (warn_return_type
576 && cfun->x_clobber_return_insn != NULL
577 && EXIT_BLOCK_PTR->pred != NULL)
578 {
579 int max_uid = get_max_uid ();
580
581 /* If clobber_return_insn was excised by jump1, then renumber_insns
582 can make max_uid smaller than the number still recorded in our rtx.
583 That's fine, since this is a quick way of verifying that the insn
584 is no longer in the chain. */
585 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
586 {
587 /* Recompute insn->block mapping, since the initial mapping is
588 set before we delete unreachable blocks. */
589 compute_bb_for_insn (max_uid);
590
591 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
592 warning ("control reaches end of non-void function");
593 }
594 }
595 }
596
597 /* Count the basic blocks of the function. */
598
599 static int
600 count_basic_blocks (f)
601 rtx f;
602 {
603 register rtx insn;
604 register RTX_CODE prev_code;
605 register int count = 0;
606 int saw_abnormal_edge = 0;
607
608 prev_code = JUMP_INSN;
609 for (insn = f; insn; insn = NEXT_INSN (insn))
610 {
611 enum rtx_code code = GET_CODE (insn);
612
613 if (code == CODE_LABEL
614 || (GET_RTX_CLASS (code) == 'i'
615 && (prev_code == JUMP_INSN
616 || prev_code == BARRIER
617 || saw_abnormal_edge)))
618 {
619 saw_abnormal_edge = 0;
620 count++;
621 }
622
623 /* Record whether this insn created an edge. */
624 if (code == CALL_INSN)
625 {
626 rtx note;
627
628 /* If there is a nonlocal goto label and the specified
629 region number isn't -1, we have an edge. */
630 if (nonlocal_goto_handler_labels
631 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
632 || INTVAL (XEXP (note, 0)) >= 0))
633 saw_abnormal_edge = 1;
634
635 else if (can_throw_internal (insn))
636 saw_abnormal_edge = 1;
637 }
638 else if (flag_non_call_exceptions
639 && code == INSN
640 && can_throw_internal (insn))
641 saw_abnormal_edge = 1;
642
643 if (code != NOTE)
644 prev_code = code;
645 }
646
647 /* The rest of the compiler works a bit smoother when we don't have to
648 check for the edge case of do-nothing functions with no basic blocks. */
649 if (count == 0)
650 {
651 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
652 count = 1;
653 }
654
655 return count;
656 }
657
658 /* Scan a list of insns for labels referred to other than by jumps.
659 This is used to scan the alternatives of a call placeholder. */
660 static rtx
661 find_label_refs (f, lvl)
662 rtx f;
663 rtx lvl;
664 {
665 rtx insn;
666
667 for (insn = f; insn; insn = NEXT_INSN (insn))
668 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
669 {
670 rtx note;
671
672 /* Make a list of all labels referred to other than by jumps
673 (which just don't have the REG_LABEL notes).
674
675 Make a special exception for labels followed by an ADDR*VEC,
676 as this would be a part of the tablejump setup code.
677
678 Make a special exception to registers loaded with label
679 values just before jump insns that use them. */
680
681 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
682 if (REG_NOTE_KIND (note) == REG_LABEL)
683 {
684 rtx lab = XEXP (note, 0), next;
685
686 if ((next = next_nonnote_insn (lab)) != NULL
687 && GET_CODE (next) == JUMP_INSN
688 && (GET_CODE (PATTERN (next)) == ADDR_VEC
689 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
690 ;
691 else if (GET_CODE (lab) == NOTE)
692 ;
693 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
694 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
695 ;
696 else
697 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
698 }
699 }
700
701 return lvl;
702 }
703
704 /* Assume that someone emitted code with control flow instructions to the
705 basic block. Update the data structure. */
706 void
707 find_sub_basic_blocks (bb)
708 basic_block bb;
709 {
710 rtx insn = bb->head;
711 rtx end = bb->end;
712 rtx jump_insn = NULL_RTX;
713 edge falltru = 0;
714 basic_block first_bb = bb;
715
716 if (insn == bb->end)
717 return;
718
719 if (GET_CODE (insn) == CODE_LABEL)
720 insn = NEXT_INSN (insn);
721
722 /* Scan insn chain and try to find new basic block boundaries. */
723 while (1)
724 {
725 enum rtx_code code = GET_CODE (insn);
726 switch (code)
727 {
728 case BARRIER:
729 if (!jump_insn)
730 abort ();
731 break;
732 /* On code label, split current basic block. */
733 case CODE_LABEL:
734 falltru = split_block (bb, PREV_INSN (insn));
735 if (jump_insn)
736 bb->end = jump_insn;
737 bb = falltru->dest;
738 remove_edge (falltru);
739 jump_insn = 0;
740 if (LABEL_ALTERNATE_NAME (insn))
741 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
742 break;
743 case INSN:
744 case JUMP_INSN:
745 /* In case we've previously split insn on the JUMP_INSN, move the
746 block header to proper place. */
747 if (jump_insn)
748 {
749 falltru = split_block (bb, PREV_INSN (insn));
750 bb->end = jump_insn;
751 bb = falltru->dest;
752 remove_edge (falltru);
753 jump_insn = 0;
754 }
755 /* We need some special care for those expressions. */
756 if (GET_CODE (insn) == JUMP_INSN)
757 {
758 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
759 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
760 abort();
761 jump_insn = insn;
762 }
763 break;
764 default:
765 break;
766 }
767 if (insn == end)
768 break;
769 insn = NEXT_INSN (insn);
770 }
771
772 /* In case expander replaced normal insn by sequence terminating by
773 return and barrier, or possibly other sequence not behaving like
774 ordinary jump, we need to take care and move basic block boundary. */
775 if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
776 bb->end = jump_insn;
777
778 /* We've possibly replaced the conditional jump by conditional jump
779 followed by cleanup at fallthru edge, so the outgoing edges may
780 be dead. */
781 purge_dead_edges (bb);
782
783 /* Now re-scan and wire in all edges. This expect simple (conditional)
784 jumps at the end of each new basic blocks. */
785 make_edges (NULL, first_bb->index, bb->index, 1);
786 }
787
788 /* Find all basic blocks of the function whose first insn is F.
789
790 Collect and return a list of labels whose addresses are taken. This
791 will be used in make_edges for use with computed gotos. */
792
793 static void
794 find_basic_blocks_1 (f)
795 rtx f;
796 {
797 register rtx insn, next;
798 int i = 0;
799 rtx bb_note = NULL_RTX;
800 rtx lvl = NULL_RTX;
801 rtx trll = NULL_RTX;
802 rtx head = NULL_RTX;
803 rtx end = NULL_RTX;
804
805 /* We process the instructions in a slightly different way than we did
806 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
807 closed out the previous block, so that it gets attached at the proper
808 place. Since this form should be equivalent to the previous,
809 count_basic_blocks continues to use the old form as a check. */
810
811 for (insn = f; insn; insn = next)
812 {
813 enum rtx_code code = GET_CODE (insn);
814
815 next = NEXT_INSN (insn);
816
817 switch (code)
818 {
819 case NOTE:
820 {
821 int kind = NOTE_LINE_NUMBER (insn);
822
823 /* Look for basic block notes with which to keep the
824 basic_block_info pointers stable. Unthread the note now;
825 we'll put it back at the right place in create_basic_block.
826 Or not at all if we've already found a note in this block. */
827 if (kind == NOTE_INSN_BASIC_BLOCK)
828 {
829 if (bb_note == NULL_RTX)
830 bb_note = insn;
831 else
832 next = flow_delete_insn (insn);
833 }
834 break;
835 }
836
837 case CODE_LABEL:
838 /* A basic block starts at a label. If we've closed one off due
839 to a barrier or some such, no need to do it again. */
840 if (head != NULL_RTX)
841 {
842 create_basic_block (i++, head, end, bb_note);
843 bb_note = NULL_RTX;
844 }
845
846 head = end = insn;
847 break;
848
849 case JUMP_INSN:
850 /* A basic block ends at a jump. */
851 if (head == NULL_RTX)
852 head = insn;
853 else
854 {
855 /* ??? Make a special check for table jumps. The way this
856 happens is truly and amazingly gross. We are about to
857 create a basic block that contains just a code label and
858 an addr*vec jump insn. Worse, an addr_diff_vec creates
859 its own natural loop.
860
861 Prevent this bit of brain damage, pasting things together
862 correctly in make_edges.
863
864 The correct solution involves emitting the table directly
865 on the tablejump instruction as a note, or JUMP_LABEL. */
866
867 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
868 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
869 {
870 head = end = NULL;
871 n_basic_blocks--;
872 break;
873 }
874 }
875 end = insn;
876 goto new_bb_inclusive;
877
878 case BARRIER:
879 /* A basic block ends at a barrier. It may be that an unconditional
880 jump already closed the basic block -- no need to do it again. */
881 if (head == NULL_RTX)
882 break;
883 goto new_bb_exclusive;
884
885 case CALL_INSN:
886 {
887 /* Record whether this call created an edge. */
888 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
889 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
890
891 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
892 {
893 /* Scan each of the alternatives for label refs. */
894 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
895 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
896 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
897 /* Record its tail recursion label, if any. */
898 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
899 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
900 }
901
902 /* A basic block ends at a call that can either throw or
903 do a non-local goto. */
904 if ((nonlocal_goto_handler_labels && region >= 0)
905 || can_throw_internal (insn))
906 {
907 new_bb_inclusive:
908 if (head == NULL_RTX)
909 head = insn;
910 end = insn;
911
912 new_bb_exclusive:
913 create_basic_block (i++, head, end, bb_note);
914 head = end = NULL_RTX;
915 bb_note = NULL_RTX;
916 break;
917 }
918 }
919 /* Fall through. */
920
921 case INSN:
922 /* Non-call exceptions generate new blocks just like calls. */
923 if (flag_non_call_exceptions && can_throw_internal (insn))
924 goto new_bb_inclusive;
925
926 if (head == NULL_RTX)
927 head = insn;
928 end = insn;
929 break;
930
931 default:
932 abort ();
933 }
934
935 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
936 {
937 rtx note;
938
939 /* Make a list of all labels referred to other than by jumps.
940
941 Make a special exception for labels followed by an ADDR*VEC,
942 as this would be a part of the tablejump setup code.
943
944 Make a special exception to registers loaded with label
945 values just before jump insns that use them. */
946
947 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
948 if (REG_NOTE_KIND (note) == REG_LABEL)
949 {
950 rtx lab = XEXP (note, 0), next;
951
952 if ((next = next_nonnote_insn (lab)) != NULL
953 && GET_CODE (next) == JUMP_INSN
954 && (GET_CODE (PATTERN (next)) == ADDR_VEC
955 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
956 ;
957 else if (GET_CODE (lab) == NOTE)
958 ;
959 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
960 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
961 ;
962 else
963 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
964 }
965 }
966 }
967
968 if (head != NULL_RTX)
969 create_basic_block (i++, head, end, bb_note);
970 else if (bb_note)
971 flow_delete_insn (bb_note);
972
973 if (i != n_basic_blocks)
974 abort ();
975
976 label_value_list = lvl;
977 tail_recursion_label_list = trll;
978 }
979
980 /* Tidy the CFG by deleting unreachable code and whatnot. */
981
982 void
983 cleanup_cfg (mode)
984 int mode;
985 {
986 timevar_push (TV_CLEANUP_CFG);
987 delete_unreachable_blocks ();
988 if (try_optimize_cfg (mode))
989 delete_unreachable_blocks ();
990 mark_critical_edges ();
991
992 /* Kill the data we won't maintain. */
993 free_EXPR_LIST_list (&label_value_list);
994 free_EXPR_LIST_list (&tail_recursion_label_list);
995 timevar_pop (TV_CLEANUP_CFG);
996 }
997
998 /* Create a new basic block consisting of the instructions between
999 HEAD and END inclusive. Reuses the note and basic block struct
1000 in BB_NOTE, if any. */
1001
1002 void
1003 create_basic_block (index, head, end, bb_note)
1004 int index;
1005 rtx head, end, bb_note;
1006 {
1007 basic_block bb;
1008
1009 if (bb_note
1010 && ! RTX_INTEGRATED_P (bb_note)
1011 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1012 && bb->aux == NULL)
1013 {
1014 /* If we found an existing note, thread it back onto the chain. */
1015
1016 rtx after;
1017
1018 if (GET_CODE (head) == CODE_LABEL)
1019 after = head;
1020 else
1021 {
1022 after = PREV_INSN (head);
1023 head = bb_note;
1024 }
1025
1026 if (after != bb_note && NEXT_INSN (after) != bb_note)
1027 reorder_insns (bb_note, bb_note, after);
1028 }
1029 else
1030 {
1031 /* Otherwise we must create a note and a basic block structure.
1032 Since we allow basic block structs in rtl, give the struct
1033 the same lifetime by allocating it off the function obstack
1034 rather than using malloc. */
1035
1036 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1037 memset (bb, 0, sizeof (*bb));
1038
1039 if (GET_CODE (head) == CODE_LABEL)
1040 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1041 else
1042 {
1043 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1044 head = bb_note;
1045 }
1046 NOTE_BASIC_BLOCK (bb_note) = bb;
1047 }
1048
1049 /* Always include the bb note in the block. */
1050 if (NEXT_INSN (end) == bb_note)
1051 end = bb_note;
1052
1053 bb->head = head;
1054 bb->end = end;
1055 bb->index = index;
1056 BASIC_BLOCK (index) = bb;
1057
1058 /* Tag the block so that we know it has been used when considering
1059 other basic block notes. */
1060 bb->aux = bb;
1061 }
1062 \f
1063 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1064 note associated with the BLOCK. */
1065
1066 rtx
1067 first_insn_after_basic_block_note (block)
1068 basic_block block;
1069 {
1070 rtx insn;
1071
1072 /* Get the first instruction in the block. */
1073 insn = block->head;
1074
1075 if (insn == NULL_RTX)
1076 return NULL_RTX;
1077 if (GET_CODE (insn) == CODE_LABEL)
1078 insn = NEXT_INSN (insn);
1079 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1080 abort ();
1081
1082 return NEXT_INSN (insn);
1083 }
1084
1085 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1086 indexed by INSN_UID. MAX is the size of the array. */
1087
1088 void
1089 compute_bb_for_insn (max)
1090 int max;
1091 {
1092 int i;
1093
1094 if (basic_block_for_insn)
1095 VARRAY_FREE (basic_block_for_insn);
1096 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1097
1098 for (i = 0; i < n_basic_blocks; ++i)
1099 {
1100 basic_block bb = BASIC_BLOCK (i);
1101 rtx insn, end;
1102
1103 end = bb->end;
1104 insn = bb->head;
1105 while (1)
1106 {
1107 int uid = INSN_UID (insn);
1108 if (uid < max)
1109 VARRAY_BB (basic_block_for_insn, uid) = bb;
1110 if (insn == end)
1111 break;
1112 insn = NEXT_INSN (insn);
1113 }
1114 }
1115 }
1116
1117 /* Free the memory associated with the edge structures. */
1118
1119 void
1120 clear_edges ()
1121 {
1122 int i;
1123 edge n, e;
1124
1125 for (i = 0; i < n_basic_blocks; ++i)
1126 {
1127 basic_block bb = BASIC_BLOCK (i);
1128
1129 for (e = bb->succ; e; e = n)
1130 {
1131 n = e->succ_next;
1132 free (e);
1133 }
1134
1135 bb->succ = 0;
1136 bb->pred = 0;
1137 }
1138
1139 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1140 {
1141 n = e->succ_next;
1142 free (e);
1143 }
1144
1145 ENTRY_BLOCK_PTR->succ = 0;
1146 EXIT_BLOCK_PTR->pred = 0;
1147
1148 n_edges = 0;
1149 }
1150
1151 /* Identify the edges between basic blocks MIN to MAX.
1152
1153 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1154 that are otherwise unreachable may be reachable with a non-local goto.
1155
1156 BB_EH_END is an array indexed by basic block number in which we record
1157 the list of exception regions active at the end of the basic block. */
1158
1159 static void
1160 make_edges (label_value_list, min, max, update_p)
1161 rtx label_value_list;
1162 int min, max, update_p;
1163 {
1164 int i;
1165 sbitmap *edge_cache = NULL;
1166
1167 /* Assume no computed jump; revise as we create edges. */
1168 current_function_has_computed_jump = 0;
1169
1170 /* Heavy use of computed goto in machine-generated code can lead to
1171 nearly fully-connected CFGs. In that case we spend a significant
1172 amount of time searching the edge lists for duplicates. */
1173 if (forced_labels || label_value_list)
1174 {
1175 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1176 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1177
1178 if (update_p)
1179 for (i = min; i <= max; ++i)
1180 {
1181 edge e;
1182 for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
1183 if (e->dest != EXIT_BLOCK_PTR)
1184 SET_BIT (edge_cache[i], e->dest->index);
1185 }
1186 }
1187
1188 /* By nature of the way these get numbered, block 0 is always the entry. */
1189 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1190
1191 for (i = min; i <= max; ++i)
1192 {
1193 basic_block bb = BASIC_BLOCK (i);
1194 rtx insn, x;
1195 enum rtx_code code;
1196 int force_fallthru = 0;
1197
1198 if (GET_CODE (bb->head) == CODE_LABEL
1199 && LABEL_ALTERNATE_NAME (bb->head))
1200 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1201
1202 /* Examine the last instruction of the block, and discover the
1203 ways we can leave the block. */
1204
1205 insn = bb->end;
1206 code = GET_CODE (insn);
1207
1208 /* A branch. */
1209 if (code == JUMP_INSN)
1210 {
1211 rtx tmp;
1212
1213 /* Recognize exception handling placeholders. */
1214 if (GET_CODE (PATTERN (insn)) == RESX)
1215 make_eh_edge (edge_cache, bb, insn);
1216
1217 /* Recognize a non-local goto as a branch outside the
1218 current function. */
1219 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1220 ;
1221
1222 /* ??? Recognize a tablejump and do the right thing. */
1223 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1224 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1225 && GET_CODE (tmp) == JUMP_INSN
1226 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1227 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1228 {
1229 rtvec vec;
1230 int j;
1231
1232 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1233 vec = XVEC (PATTERN (tmp), 0);
1234 else
1235 vec = XVEC (PATTERN (tmp), 1);
1236
1237 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1238 make_label_edge (edge_cache, bb,
1239 XEXP (RTVEC_ELT (vec, j), 0), 0);
1240
1241 /* Some targets (eg, ARM) emit a conditional jump that also
1242 contains the out-of-range target. Scan for these and
1243 add an edge if necessary. */
1244 if ((tmp = single_set (insn)) != NULL
1245 && SET_DEST (tmp) == pc_rtx
1246 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1247 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1248 make_label_edge (edge_cache, bb,
1249 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1250
1251 #ifdef CASE_DROPS_THROUGH
1252 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1253 us naturally detecting fallthru into the next block. */
1254 force_fallthru = 1;
1255 #endif
1256 }
1257
1258 /* If this is a computed jump, then mark it as reaching
1259 everything on the label_value_list and forced_labels list. */
1260 else if (computed_jump_p (insn))
1261 {
1262 current_function_has_computed_jump = 1;
1263
1264 for (x = label_value_list; x; x = XEXP (x, 1))
1265 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1266
1267 for (x = forced_labels; x; x = XEXP (x, 1))
1268 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1269 }
1270
1271 /* Returns create an exit out. */
1272 else if (returnjump_p (insn))
1273 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1274
1275 /* Otherwise, we have a plain conditional or unconditional jump. */
1276 else
1277 {
1278 if (! JUMP_LABEL (insn))
1279 abort ();
1280 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1281 }
1282 }
1283
1284 /* If this is a sibling call insn, then this is in effect a
1285 combined call and return, and so we need an edge to the
1286 exit block. No need to worry about EH edges, since we
1287 wouldn't have created the sibling call in the first place. */
1288
1289 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1290 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1291 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1292
1293 /* If this is a CALL_INSN, then mark it as reaching the active EH
1294 handler for this CALL_INSN. If we're handling non-call
1295 exceptions then any insn can reach any of the active handlers.
1296
1297 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1298
1299 else if (code == CALL_INSN || flag_non_call_exceptions)
1300 {
1301 /* Add any appropriate EH edges. */
1302 make_eh_edge (edge_cache, bb, insn);
1303
1304 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1305 {
1306 /* ??? This could be made smarter: in some cases it's possible
1307 to tell that certain calls will not do a nonlocal goto.
1308
1309 For example, if the nested functions that do the nonlocal
1310 gotos do not have their addresses taken, then only calls to
1311 those functions or to other nested functions that use them
1312 could possibly do nonlocal gotos. */
1313 /* We do know that a REG_EH_REGION note with a value less
1314 than 0 is guaranteed not to perform a non-local goto. */
1315 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1316 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1317 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1318 make_label_edge (edge_cache, bb, XEXP (x, 0),
1319 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1320 }
1321 }
1322
1323 /* Find out if we can drop through to the next block. */
1324 insn = next_nonnote_insn (insn);
1325 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1326 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1327 else if (i + 1 < n_basic_blocks)
1328 {
1329 rtx tmp = BLOCK_HEAD (i + 1);
1330 if (GET_CODE (tmp) == NOTE)
1331 tmp = next_nonnote_insn (tmp);
1332 if (force_fallthru || insn == tmp)
1333 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1334 }
1335 }
1336
1337 if (edge_cache)
1338 sbitmap_vector_free (edge_cache);
1339 }
1340
1341 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1342 about the edge that is accumulated between calls. */
1343
1344 void
1345 make_edge (edge_cache, src, dst, flags)
1346 sbitmap *edge_cache;
1347 basic_block src, dst;
1348 int flags;
1349 {
1350 int use_edge_cache;
1351 edge e;
1352
1353 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1354 many edges to them, and we didn't allocate memory for it. */
1355 use_edge_cache = (edge_cache
1356 && src != ENTRY_BLOCK_PTR
1357 && dst != EXIT_BLOCK_PTR);
1358
1359 /* Make sure we don't add duplicate edges. */
1360 switch (use_edge_cache)
1361 {
1362 default:
1363 /* Quick test for non-existance of the edge. */
1364 if (! TEST_BIT (edge_cache[src->index], dst->index))
1365 break;
1366
1367 /* The edge exists; early exit if no work to do. */
1368 if (flags == 0)
1369 return;
1370
1371 /* FALLTHRU */
1372 case 0:
1373 for (e = src->succ; e; e = e->succ_next)
1374 if (e->dest == dst)
1375 {
1376 e->flags |= flags;
1377 return;
1378 }
1379 break;
1380 }
1381
1382 e = (edge) xcalloc (1, sizeof (*e));
1383 n_edges++;
1384
1385 e->succ_next = src->succ;
1386 e->pred_next = dst->pred;
1387 e->src = src;
1388 e->dest = dst;
1389 e->flags = flags;
1390
1391 src->succ = e;
1392 dst->pred = e;
1393
1394 if (use_edge_cache)
1395 SET_BIT (edge_cache[src->index], dst->index);
1396 }
1397
1398 /* Create an edge from a basic block to a label. */
1399
1400 static void
1401 make_label_edge (edge_cache, src, label, flags)
1402 sbitmap *edge_cache;
1403 basic_block src;
1404 rtx label;
1405 int flags;
1406 {
1407 if (GET_CODE (label) != CODE_LABEL)
1408 abort ();
1409
1410 /* If the label was never emitted, this insn is junk, but avoid a
1411 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1412 as a result of a syntax error and a diagnostic has already been
1413 printed. */
1414
1415 if (INSN_UID (label) == 0)
1416 return;
1417
1418 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1419 }
1420
1421 /* Create the edges generated by INSN in REGION. */
1422
1423 static void
1424 make_eh_edge (edge_cache, src, insn)
1425 sbitmap *edge_cache;
1426 basic_block src;
1427 rtx insn;
1428 {
1429 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1430 rtx handlers, i;
1431
1432 handlers = reachable_handlers (insn);
1433
1434 for (i = handlers; i; i = XEXP (i, 1))
1435 make_label_edge (edge_cache, src, XEXP (i, 0),
1436 EDGE_ABNORMAL | EDGE_EH | is_call);
1437
1438 free_INSN_LIST_list (&handlers);
1439 }
1440
1441 /* Identify critical edges and set the bits appropriately. */
1442
1443 void
1444 mark_critical_edges ()
1445 {
1446 int i, n = n_basic_blocks;
1447 basic_block bb;
1448
1449 /* We begin with the entry block. This is not terribly important now,
1450 but could be if a front end (Fortran) implemented alternate entry
1451 points. */
1452 bb = ENTRY_BLOCK_PTR;
1453 i = -1;
1454
1455 while (1)
1456 {
1457 edge e;
1458
1459 /* (1) Critical edges must have a source with multiple successors. */
1460 if (bb->succ && bb->succ->succ_next)
1461 {
1462 for (e = bb->succ; e; e = e->succ_next)
1463 {
1464 /* (2) Critical edges must have a destination with multiple
1465 predecessors. Note that we know there is at least one
1466 predecessor -- the edge we followed to get here. */
1467 if (e->dest->pred->pred_next)
1468 e->flags |= EDGE_CRITICAL;
1469 else
1470 e->flags &= ~EDGE_CRITICAL;
1471 }
1472 }
1473 else
1474 {
1475 for (e = bb->succ; e; e = e->succ_next)
1476 e->flags &= ~EDGE_CRITICAL;
1477 }
1478
1479 if (++i >= n)
1480 break;
1481 bb = BASIC_BLOCK (i);
1482 }
1483 }
1484 \f
1485 /* Split a block BB after insn INSN creating a new fallthru edge.
1486 Return the new edge. Note that to keep other parts of the compiler happy,
1487 this function renumbers all the basic blocks so that the new
1488 one has a number one greater than the block split. */
1489
1490 edge
1491 split_block (bb, insn)
1492 basic_block bb;
1493 rtx insn;
1494 {
1495 basic_block new_bb;
1496 edge new_edge;
1497 edge e;
1498 rtx bb_note;
1499 int i, j;
1500
1501 /* There is no point splitting the block after its end. */
1502 if (bb->end == insn)
1503 return 0;
1504
1505 /* Create the new structures. */
1506 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1507 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1508 n_edges++;
1509
1510 memset (new_bb, 0, sizeof (*new_bb));
1511
1512 new_bb->head = NEXT_INSN (insn);
1513 new_bb->end = bb->end;
1514 bb->end = insn;
1515
1516 new_bb->succ = bb->succ;
1517 bb->succ = new_edge;
1518 new_bb->pred = new_edge;
1519 new_bb->count = bb->count;
1520 new_bb->frequency = bb->frequency;
1521 new_bb->loop_depth = bb->loop_depth;
1522
1523 new_edge->src = bb;
1524 new_edge->dest = new_bb;
1525 new_edge->flags = EDGE_FALLTHRU;
1526 new_edge->probability = REG_BR_PROB_BASE;
1527 new_edge->count = bb->count;
1528
1529 /* Redirect the src of the successor edges of bb to point to new_bb. */
1530 for (e = new_bb->succ; e; e = e->succ_next)
1531 e->src = new_bb;
1532
1533 /* Place the new block just after the block being split. */
1534 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1535
1536 /* Some parts of the compiler expect blocks to be number in
1537 sequential order so insert the new block immediately after the
1538 block being split.. */
1539 j = bb->index;
1540 for (i = n_basic_blocks - 1; i > j + 1; --i)
1541 {
1542 basic_block tmp = BASIC_BLOCK (i - 1);
1543 BASIC_BLOCK (i) = tmp;
1544 tmp->index = i;
1545 }
1546
1547 BASIC_BLOCK (i) = new_bb;
1548 new_bb->index = i;
1549
1550 if (GET_CODE (new_bb->head) == CODE_LABEL)
1551 {
1552 /* Create the basic block note. */
1553 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1554 new_bb->head);
1555 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1556
1557 /* If the only thing in this new block was the label, make sure
1558 the block note gets included. */
1559 if (new_bb->head == new_bb->end)
1560 new_bb->end = bb_note;
1561 }
1562 else
1563 {
1564 /* Create the basic block note. */
1565 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1566 new_bb->head);
1567 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1568 new_bb->head = bb_note;
1569 }
1570
1571 update_bb_for_insn (new_bb);
1572
1573 if (bb->global_live_at_start)
1574 {
1575 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1576 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1577 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1578
1579 /* We now have to calculate which registers are live at the end
1580 of the split basic block and at the start of the new basic
1581 block. Start with those registers that are known to be live
1582 at the end of the original basic block and get
1583 propagate_block to determine which registers are live. */
1584 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1585 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1586 COPY_REG_SET (bb->global_live_at_end,
1587 new_bb->global_live_at_start);
1588 }
1589
1590 return new_edge;
1591 }
1592
1593 /* Return label in the head of basic block. Create one if it doesn't exist. */
1594 rtx
1595 block_label (block)
1596 basic_block block;
1597 {
1598 if (block == EXIT_BLOCK_PTR)
1599 return NULL_RTX;
1600 if (GET_CODE (block->head) != CODE_LABEL)
1601 {
1602 block->head = emit_label_before (gen_label_rtx (), block->head);
1603 if (basic_block_for_insn)
1604 set_block_for_insn (block->head, block);
1605 }
1606 return block->head;
1607 }
1608
1609 /* Return true if the block has no effect and only forwards control flow to
1610 its single destination. */
1611 bool
1612 forwarder_block_p (bb)
1613 basic_block bb;
1614 {
1615 rtx insn = bb->head;
1616 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1617 || !bb->succ || bb->succ->succ_next)
1618 return false;
1619
1620 while (insn != bb->end)
1621 {
1622 if (active_insn_p (insn))
1623 return false;
1624 insn = NEXT_INSN (insn);
1625 }
1626 return (!active_insn_p (insn)
1627 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1628 }
1629
1630 /* Return nonzero if we can reach target from src by falling trought. */
1631 static bool
1632 can_fallthru (src, target)
1633 basic_block src, target;
1634 {
1635 rtx insn = src->end;
1636 rtx insn2 = target->head;
1637
1638 if (src->index + 1 == target->index && !active_insn_p (insn2))
1639 insn2 = next_active_insn (insn2);
1640 /* ??? Later we may add code to move jump tables offline. */
1641 return next_active_insn (insn) == insn2;
1642 }
1643
1644 /* Attempt to perform edge redirection by replacing possibly complex jump
1645 instruction by unconditional jump or removing jump completely.
1646 This can apply only if all edges now point to the same block.
1647
1648 The parameters and return values are equivalent to redirect_edge_and_branch.
1649 */
1650 static bool
1651 try_redirect_by_replacing_jump (e, target)
1652 edge e;
1653 basic_block target;
1654 {
1655 basic_block src = e->src;
1656 rtx insn = src->end, kill_from;
1657 edge tmp;
1658 rtx set;
1659 int fallthru = 0;
1660
1661 /* Verify that all targets will be TARGET. */
1662 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1663 if (tmp->dest != target && tmp != e)
1664 break;
1665 if (tmp || !onlyjump_p (insn))
1666 return false;
1667
1668 /* Avoid removing branch with side effects. */
1669 set = single_set (insn);
1670 if (!set || side_effects_p (set))
1671 return false;
1672
1673 /* In case we zap a conditional jump, we'll need to kill
1674 the cc0 setter too. */
1675 kill_from = insn;
1676 #ifdef HAVE_cc0
1677 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1678 kill_from = PREV_INSN (insn);
1679 #endif
1680
1681 /* See if we can create the fallthru edge. */
1682 if (can_fallthru (src, target))
1683 {
1684 src->end = PREV_INSN (kill_from);
1685 if (rtl_dump_file)
1686 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1687 fallthru = 1;
1688
1689 /* Selectivly unlink whole insn chain. */
1690 flow_delete_insn_chain (kill_from, PREV_INSN (target->head));
1691 }
1692 /* If this already is simplejump, redirect it. */
1693 else if (simplejump_p (insn))
1694 {
1695 if (e->dest == target)
1696 return false;
1697 if (rtl_dump_file)
1698 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1699 INSN_UID (insn), e->dest->index, target->index);
1700 redirect_jump (insn, block_label (target), 0);
1701 }
1702 /* Or replace possibly complicated jump insn by simple jump insn. */
1703 else
1704 {
1705 rtx target_label = block_label (target);
1706 rtx barrier;
1707
1708 src->end = emit_jump_insn_before (gen_jump (target_label), kill_from);
1709 JUMP_LABEL (src->end) = target_label;
1710 LABEL_NUSES (target_label)++;
1711 if (basic_block_for_insn)
1712 set_block_for_new_insns (src->end, src);
1713 if (rtl_dump_file)
1714 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1715 INSN_UID (insn), INSN_UID (src->end));
1716
1717 flow_delete_insn_chain (kill_from, insn);
1718
1719 barrier = next_nonnote_insn (src->end);
1720 if (!barrier || GET_CODE (barrier) != BARRIER)
1721 emit_barrier_after (src->end);
1722 }
1723
1724 /* Keep only one edge out and set proper flags. */
1725 while (src->succ->succ_next)
1726 remove_edge (src->succ);
1727 e = src->succ;
1728 if (fallthru)
1729 e->flags = EDGE_FALLTHRU;
1730 else
1731 e->flags = 0;
1732 e->probability = REG_BR_PROB_BASE;
1733 e->count = src->count;
1734
1735 /* We don't want a block to end on a line-number note since that has
1736 the potential of changing the code between -g and not -g. */
1737 while (GET_CODE (e->src->end) == NOTE
1738 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1739 {
1740 rtx prev = PREV_INSN (e->src->end);
1741 flow_delete_insn (e->src->end);
1742 e->src->end = prev;
1743 }
1744
1745 if (e->dest != target)
1746 redirect_edge_succ (e, target);
1747 return true;
1748 }
1749
1750 /* Attempt to change code to redirect edge E to TARGET.
1751 Don't do that on expense of adding new instructions or reordering
1752 basic blocks.
1753
1754 Function can be also called with edge destionation equivalent to the
1755 TARGET. Then it should try the simplifications and do nothing if
1756 none is possible.
1757
1758 Return true if transformation suceeded. We still return flase in case
1759 E already destinated TARGET and we didn't managed to simplify instruction
1760 stream. */
1761 bool
1762 redirect_edge_and_branch (e, target)
1763 edge e;
1764 basic_block target;
1765 {
1766 rtx tmp;
1767 rtx old_label = e->dest->head;
1768 basic_block src = e->src;
1769 rtx insn = src->end;
1770
1771 if (e->flags & EDGE_COMPLEX)
1772 return false;
1773
1774 if (try_redirect_by_replacing_jump (e, target))
1775 return true;
1776 /* Do this fast path late, as we want above code to simplify for cases
1777 where called on single edge leaving basic block containing nontrivial
1778 jump insn. */
1779 else if (e->dest == target)
1780 return false;
1781
1782 /* We can only redirect non-fallthru edges of jump insn. */
1783 if (e->flags & EDGE_FALLTHRU)
1784 return false;
1785 if (GET_CODE (insn) != JUMP_INSN)
1786 return false;
1787
1788 /* Recognize a tablejump and adjust all matching cases. */
1789 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1790 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1791 && GET_CODE (tmp) == JUMP_INSN
1792 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1793 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1794 {
1795 rtvec vec;
1796 int j;
1797 rtx new_label = block_label (target);
1798
1799 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1800 vec = XVEC (PATTERN (tmp), 0);
1801 else
1802 vec = XVEC (PATTERN (tmp), 1);
1803
1804 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1805 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1806 {
1807 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1808 --LABEL_NUSES (old_label);
1809 ++LABEL_NUSES (new_label);
1810 }
1811
1812 /* Handle casesi dispatch insns */
1813 if ((tmp = single_set (insn)) != NULL
1814 && SET_DEST (tmp) == pc_rtx
1815 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1816 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1817 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1818 {
1819 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1820 new_label);
1821 --LABEL_NUSES (old_label);
1822 ++LABEL_NUSES (new_label);
1823 }
1824 }
1825 else
1826 {
1827 /* ?? We may play the games with moving the named labels from
1828 one basic block to the other in case only one computed_jump is
1829 available. */
1830 if (computed_jump_p (insn))
1831 return false;
1832
1833 /* A return instruction can't be redirected. */
1834 if (returnjump_p (insn))
1835 return false;
1836
1837 /* If the insn doesn't go where we think, we're confused. */
1838 if (JUMP_LABEL (insn) != old_label)
1839 abort ();
1840 redirect_jump (insn, block_label (target), 0);
1841 }
1842
1843 if (rtl_dump_file)
1844 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1845 e->src->index, e->dest->index, target->index);
1846 if (e->dest != target)
1847 redirect_edge_succ_nodup (e, target);
1848 return true;
1849 }
1850
1851 /* Redirect edge even at the expense of creating new jump insn or
1852 basic block. Return new basic block if created, NULL otherwise.
1853 Abort if converison is impossible. */
1854 basic_block
1855 redirect_edge_and_branch_force (e, target)
1856 edge e;
1857 basic_block target;
1858 {
1859 basic_block new_bb;
1860 edge new_edge;
1861 rtx label;
1862 rtx bb_note;
1863 int i, j;
1864
1865 if (redirect_edge_and_branch (e, target))
1866 return NULL;
1867 if (e->dest == target)
1868 return NULL;
1869 if (e->flags & EDGE_ABNORMAL)
1870 abort ();
1871 if (!(e->flags & EDGE_FALLTHRU))
1872 abort ();
1873
1874 e->flags &= ~EDGE_FALLTHRU;
1875 label = block_label (target);
1876 /* Case of the fallthru block. */
1877 if (!e->src->succ->succ_next)
1878 {
1879 e->src->end = emit_jump_insn_after (gen_jump (label), e->src->end);
1880 JUMP_LABEL (e->src->end) = label;
1881 LABEL_NUSES (label)++;
1882 if (basic_block_for_insn)
1883 set_block_for_new_insns (e->src->end, e->src);
1884 emit_barrier_after (e->src->end);
1885 if (rtl_dump_file)
1886 fprintf (rtl_dump_file,
1887 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
1888 INSN_UID (e->src->end), e->src->index, e->dest->index,
1889 target->index);
1890 redirect_edge_succ (e, target);
1891 return NULL;
1892 }
1893 /* Redirecting fallthru edge of the conditional needs extra work. */
1894
1895 if (rtl_dump_file)
1896 fprintf (rtl_dump_file,
1897 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
1898 INSN_UID (e->src->end), e->src->index, e->dest->index,
1899 target->index);
1900
1901 /* Create the new structures. */
1902 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1903 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1904 n_edges++;
1905
1906 memset (new_bb, 0, sizeof (*new_bb));
1907
1908 new_bb->end = new_bb->head = e->src->end;
1909 new_bb->succ = NULL;
1910 new_bb->pred = new_edge;
1911 new_bb->count = e->count;
1912 new_bb->frequency = e->probability * e->src->frequency / REG_BR_PROB_BASE;
1913 new_bb->loop_depth = e->dest->loop_depth;
1914
1915 new_edge->flags = EDGE_FALLTHRU;
1916 new_edge->probability = e->probability;
1917 new_edge->count = e->count;
1918
1919 if (e->dest->global_live_at_start)
1920 {
1921 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1922 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1923 COPY_REG_SET (new_bb->global_live_at_start,
1924 target->global_live_at_start);
1925 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
1926 }
1927
1928 /* Wire edge in. */
1929 new_edge->src = e->src;
1930 new_edge->dest = new_bb;
1931 new_edge->succ_next = e->src->succ;
1932 e->src->succ = new_edge;
1933 new_edge->pred_next = NULL;
1934
1935 /* Redirect old edge. */
1936 redirect_edge_succ (e, target);
1937 redirect_edge_pred (e, new_bb);
1938 e->probability = REG_BR_PROB_BASE;
1939
1940 /* Place the new block just after the block being split. */
1941 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1942
1943 /* Some parts of the compiler expect blocks to be number in
1944 sequential order so insert the new block immediately after the
1945 block being split.. */
1946 j = new_edge->src->index;
1947 for (i = n_basic_blocks - 1; i > j + 1; --i)
1948 {
1949 basic_block tmp = BASIC_BLOCK (i - 1);
1950 BASIC_BLOCK (i) = tmp;
1951 tmp->index = i;
1952 }
1953
1954 BASIC_BLOCK (i) = new_bb;
1955 new_bb->index = i;
1956
1957 /* Create the basic block note. */
1958 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
1959 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1960 new_bb->head = bb_note;
1961
1962 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
1963 JUMP_LABEL (new_bb->end) = label;
1964 LABEL_NUSES (label)++;
1965 if (basic_block_for_insn)
1966 set_block_for_new_insns (new_bb->end, new_bb);
1967 emit_barrier_after (new_bb->end);
1968 return new_bb;
1969 }
1970
1971 /* Split a (typically critical) edge. Return the new block.
1972 Abort on abnormal edges.
1973
1974 ??? The code generally expects to be called on critical edges.
1975 The case of a block ending in an unconditional jump to a
1976 block with multiple predecessors is not handled optimally. */
1977
1978 basic_block
1979 split_edge (edge_in)
1980 edge edge_in;
1981 {
1982 basic_block old_pred, bb, old_succ;
1983 edge edge_out;
1984 rtx bb_note;
1985 int i, j;
1986
1987 /* Abnormal edges cannot be split. */
1988 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1989 abort ();
1990
1991 old_pred = edge_in->src;
1992 old_succ = edge_in->dest;
1993
1994 /* Create the new structures. */
1995 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1996 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1997 n_edges++;
1998
1999 memset (bb, 0, sizeof (*bb));
2000
2001 /* ??? This info is likely going to be out of date very soon. */
2002 if (old_succ->global_live_at_start)
2003 {
2004 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2005 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2006 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2007 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2008 }
2009
2010 /* Wire them up. */
2011 bb->succ = edge_out;
2012 bb->count = edge_in->count;
2013 bb->frequency = (edge_in->probability * edge_in->src->frequency
2014 / REG_BR_PROB_BASE);
2015
2016 edge_in->flags &= ~EDGE_CRITICAL;
2017
2018 edge_out->pred_next = old_succ->pred;
2019 edge_out->succ_next = NULL;
2020 edge_out->src = bb;
2021 edge_out->dest = old_succ;
2022 edge_out->flags = EDGE_FALLTHRU;
2023 edge_out->probability = REG_BR_PROB_BASE;
2024 edge_out->count = edge_in->count;
2025
2026 old_succ->pred = edge_out;
2027
2028 /* Tricky case -- if there existed a fallthru into the successor
2029 (and we're not it) we must add a new unconditional jump around
2030 the new block we're actually interested in.
2031
2032 Further, if that edge is critical, this means a second new basic
2033 block must be created to hold it. In order to simplify correct
2034 insn placement, do this before we touch the existing basic block
2035 ordering for the block we were really wanting. */
2036 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2037 {
2038 edge e;
2039 for (e = edge_out->pred_next; e; e = e->pred_next)
2040 if (e->flags & EDGE_FALLTHRU)
2041 break;
2042
2043 if (e)
2044 {
2045 basic_block jump_block;
2046 rtx pos;
2047
2048 if ((e->flags & EDGE_CRITICAL) == 0
2049 && e->src != ENTRY_BLOCK_PTR)
2050 {
2051 /* Non critical -- we can simply add a jump to the end
2052 of the existing predecessor. */
2053 jump_block = e->src;
2054 }
2055 else
2056 {
2057 /* We need a new block to hold the jump. The simplest
2058 way to do the bulk of the work here is to recursively
2059 call ourselves. */
2060 jump_block = split_edge (e);
2061 e = jump_block->succ;
2062 }
2063
2064 /* Now add the jump insn ... */
2065 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2066 jump_block->end);
2067 jump_block->end = pos;
2068 if (basic_block_for_insn)
2069 set_block_for_new_insns (pos, jump_block);
2070 emit_barrier_after (pos);
2071
2072 /* ... let jump know that label is in use, ... */
2073 JUMP_LABEL (pos) = old_succ->head;
2074 ++LABEL_NUSES (old_succ->head);
2075
2076 /* ... and clear fallthru on the outgoing edge. */
2077 e->flags &= ~EDGE_FALLTHRU;
2078
2079 /* Continue splitting the interesting edge. */
2080 }
2081 }
2082
2083 /* Place the new block just in front of the successor. */
2084 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2085 if (old_succ == EXIT_BLOCK_PTR)
2086 j = n_basic_blocks - 1;
2087 else
2088 j = old_succ->index;
2089 for (i = n_basic_blocks - 1; i > j; --i)
2090 {
2091 basic_block tmp = BASIC_BLOCK (i - 1);
2092 BASIC_BLOCK (i) = tmp;
2093 tmp->index = i;
2094 }
2095 BASIC_BLOCK (i) = bb;
2096 bb->index = i;
2097
2098 /* Create the basic block note.
2099
2100 Where we place the note can have a noticable impact on the generated
2101 code. Consider this cfg:
2102
2103 E
2104 |
2105 0
2106 / \
2107 +->1-->2--->E
2108 | |
2109 +--+
2110
2111 If we need to insert an insn on the edge from block 0 to block 1,
2112 we want to ensure the instructions we insert are outside of any
2113 loop notes that physically sit between block 0 and block 1. Otherwise
2114 we confuse the loop optimizer into thinking the loop is a phony. */
2115 if (old_succ != EXIT_BLOCK_PTR
2116 && PREV_INSN (old_succ->head)
2117 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2118 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
2119 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2120 PREV_INSN (old_succ->head));
2121 else if (old_succ != EXIT_BLOCK_PTR)
2122 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2123 else
2124 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2125 NOTE_BASIC_BLOCK (bb_note) = bb;
2126 bb->head = bb->end = bb_note;
2127
2128 /* For non-fallthry edges, we must adjust the predecessor's
2129 jump instruction to target our new block. */
2130 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2131 {
2132 if (!redirect_edge_and_branch (edge_in, bb))
2133 abort ();
2134 }
2135 else
2136 redirect_edge_succ (edge_in, bb);
2137
2138 return bb;
2139 }
2140
2141 /* Queue instructions for insertion on an edge between two basic blocks.
2142 The new instructions and basic blocks (if any) will not appear in the
2143 CFG until commit_edge_insertions is called. */
2144
2145 void
2146 insert_insn_on_edge (pattern, e)
2147 rtx pattern;
2148 edge e;
2149 {
2150 /* We cannot insert instructions on an abnormal critical edge.
2151 It will be easier to find the culprit if we die now. */
2152 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2153 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2154 abort ();
2155
2156 if (e->insns == NULL_RTX)
2157 start_sequence ();
2158 else
2159 push_to_sequence (e->insns);
2160
2161 emit_insn (pattern);
2162
2163 e->insns = get_insns ();
2164 end_sequence ();
2165 }
2166
2167 /* Update the CFG for the instructions queued on edge E. */
2168
2169 static void
2170 commit_one_edge_insertion (e)
2171 edge e;
2172 {
2173 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2174 basic_block bb;
2175
2176 /* Pull the insns off the edge now since the edge might go away. */
2177 insns = e->insns;
2178 e->insns = NULL_RTX;
2179
2180 /* Figure out where to put these things. If the destination has
2181 one predecessor, insert there. Except for the exit block. */
2182 if (e->dest->pred->pred_next == NULL
2183 && e->dest != EXIT_BLOCK_PTR)
2184 {
2185 bb = e->dest;
2186
2187 /* Get the location correct wrt a code label, and "nice" wrt
2188 a basic block note, and before everything else. */
2189 tmp = bb->head;
2190 if (GET_CODE (tmp) == CODE_LABEL)
2191 tmp = NEXT_INSN (tmp);
2192 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2193 tmp = NEXT_INSN (tmp);
2194 if (tmp == bb->head)
2195 before = tmp;
2196 else
2197 after = PREV_INSN (tmp);
2198 }
2199
2200 /* If the source has one successor and the edge is not abnormal,
2201 insert there. Except for the entry block. */
2202 else if ((e->flags & EDGE_ABNORMAL) == 0
2203 && e->src->succ->succ_next == NULL
2204 && e->src != ENTRY_BLOCK_PTR)
2205 {
2206 bb = e->src;
2207 /* It is possible to have a non-simple jump here. Consider a target
2208 where some forms of unconditional jumps clobber a register. This
2209 happens on the fr30 for example.
2210
2211 We know this block has a single successor, so we can just emit
2212 the queued insns before the jump. */
2213 if (GET_CODE (bb->end) == JUMP_INSN)
2214 {
2215 before = bb->end;
2216 }
2217 else
2218 {
2219 /* We'd better be fallthru, or we've lost track of what's what. */
2220 if ((e->flags & EDGE_FALLTHRU) == 0)
2221 abort ();
2222
2223 after = bb->end;
2224 }
2225 }
2226
2227 /* Otherwise we must split the edge. */
2228 else
2229 {
2230 bb = split_edge (e);
2231 after = bb->end;
2232 }
2233
2234 /* Now that we've found the spot, do the insertion. */
2235
2236 /* Set the new block number for these insns, if structure is allocated. */
2237 if (basic_block_for_insn)
2238 {
2239 rtx i;
2240 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2241 set_block_for_insn (i, bb);
2242 }
2243
2244 if (before)
2245 {
2246 emit_insns_before (insns, before);
2247 if (before == bb->head)
2248 bb->head = insns;
2249
2250 last = prev_nonnote_insn (before);
2251 }
2252 else
2253 {
2254 last = emit_insns_after (insns, after);
2255 if (after == bb->end)
2256 bb->end = last;
2257 }
2258
2259 if (returnjump_p (last))
2260 {
2261 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2262 This is not currently a problem because this only happens
2263 for the (single) epilogue, which already has a fallthru edge
2264 to EXIT. */
2265
2266 e = bb->succ;
2267 if (e->dest != EXIT_BLOCK_PTR
2268 || e->succ_next != NULL
2269 || (e->flags & EDGE_FALLTHRU) == 0)
2270 abort ();
2271 e->flags &= ~EDGE_FALLTHRU;
2272
2273 emit_barrier_after (last);
2274 bb->end = last;
2275
2276 if (before)
2277 flow_delete_insn (before);
2278 }
2279 else if (GET_CODE (last) == JUMP_INSN)
2280 abort ();
2281 find_sub_basic_blocks (bb);
2282 }
2283
2284 /* Update the CFG for all queued instructions. */
2285
2286 void
2287 commit_edge_insertions ()
2288 {
2289 int i;
2290 basic_block bb;
2291 compute_bb_for_insn (get_max_uid ());
2292
2293 #ifdef ENABLE_CHECKING
2294 verify_flow_info ();
2295 #endif
2296
2297 i = -1;
2298 bb = ENTRY_BLOCK_PTR;
2299 while (1)
2300 {
2301 edge e, next;
2302
2303 for (e = bb->succ; e; e = next)
2304 {
2305 next = e->succ_next;
2306 if (e->insns)
2307 commit_one_edge_insertion (e);
2308 }
2309
2310 if (++i >= n_basic_blocks)
2311 break;
2312 bb = BASIC_BLOCK (i);
2313 }
2314 }
2315
2316 /* Add fake edges to the function exit for any non constant calls in
2317 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2318 BLOCKS is zero. Return the nuber of blocks that were split. */
2319
2320 int
2321 flow_call_edges_add (blocks)
2322 sbitmap blocks;
2323 {
2324 int i;
2325 int blocks_split = 0;
2326 int bb_num = 0;
2327 basic_block *bbs;
2328
2329 /* Map bb indicies into basic block pointers since split_block
2330 will renumber the basic blocks. */
2331
2332 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2333
2334 if (! blocks)
2335 {
2336 for (i = 0; i < n_basic_blocks; i++)
2337 bbs[bb_num++] = BASIC_BLOCK (i);
2338 }
2339 else
2340 {
2341 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2342 {
2343 bbs[bb_num++] = BASIC_BLOCK (i);
2344 });
2345 }
2346
2347
2348 /* Now add fake edges to the function exit for any non constant
2349 calls since there is no way that we can determine if they will
2350 return or not... */
2351
2352 for (i = 0; i < bb_num; i++)
2353 {
2354 basic_block bb = bbs[i];
2355 rtx insn;
2356 rtx prev_insn;
2357
2358 for (insn = bb->end; ; insn = prev_insn)
2359 {
2360 prev_insn = PREV_INSN (insn);
2361 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2362 {
2363 edge e;
2364
2365 /* Note that the following may create a new basic block
2366 and renumber the existing basic blocks. */
2367 e = split_block (bb, insn);
2368 if (e)
2369 blocks_split++;
2370
2371 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2372 }
2373 if (insn == bb->head)
2374 break;
2375 }
2376 }
2377
2378 if (blocks_split)
2379 verify_flow_info ();
2380
2381 free (bbs);
2382 return blocks_split;
2383 }
2384 \f
2385 /* Find unreachable blocks. An unreachable block will have NULL in
2386 block->aux, a non-NULL value indicates the block is reachable. */
2387
2388 void
2389 find_unreachable_blocks ()
2390 {
2391 edge e;
2392 int i, n;
2393 basic_block *tos, *worklist;
2394
2395 n = n_basic_blocks;
2396 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2397
2398 /* Use basic_block->aux as a marker. Clear them all. */
2399
2400 for (i = 0; i < n; ++i)
2401 BASIC_BLOCK (i)->aux = NULL;
2402
2403 /* Add our starting points to the worklist. Almost always there will
2404 be only one. It isn't inconcievable that we might one day directly
2405 support Fortran alternate entry points. */
2406
2407 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2408 {
2409 *tos++ = e->dest;
2410
2411 /* Mark the block with a handy non-null value. */
2412 e->dest->aux = e;
2413 }
2414
2415 /* Iterate: find everything reachable from what we've already seen. */
2416
2417 while (tos != worklist)
2418 {
2419 basic_block b = *--tos;
2420
2421 for (e = b->succ; e; e = e->succ_next)
2422 if (!e->dest->aux)
2423 {
2424 *tos++ = e->dest;
2425 e->dest->aux = e;
2426 }
2427 }
2428
2429 free (worklist);
2430 }
2431
2432 /* Delete all unreachable basic blocks. */
2433 static void
2434 delete_unreachable_blocks ()
2435 {
2436 int i;
2437
2438 find_unreachable_blocks ();
2439
2440 /* Delete all unreachable basic blocks. Count down so that we
2441 don't interfere with the block renumbering that happens in
2442 flow_delete_block. */
2443
2444 for (i = n_basic_blocks - 1; i >= 0; --i)
2445 {
2446 basic_block b = BASIC_BLOCK (i);
2447
2448 if (b->aux != NULL)
2449 /* This block was found. Tidy up the mark. */
2450 b->aux = NULL;
2451 else
2452 flow_delete_block (b);
2453 }
2454
2455 tidy_fallthru_edges ();
2456 }
2457
2458 /* Return true if NOTE is not one of the ones that must be kept paired,
2459 so that we may simply delete them. */
2460
2461 static int
2462 can_delete_note_p (note)
2463 rtx note;
2464 {
2465 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2466 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2467 }
2468
2469 /* Unlink a chain of insns between START and FINISH, leaving notes
2470 that must be paired. */
2471
2472 void
2473 flow_delete_insn_chain (start, finish)
2474 rtx start, finish;
2475 {
2476 /* Unchain the insns one by one. It would be quicker to delete all
2477 of these with a single unchaining, rather than one at a time, but
2478 we need to keep the NOTE's. */
2479
2480 rtx next;
2481
2482 while (1)
2483 {
2484 next = NEXT_INSN (start);
2485 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2486 ;
2487 else if (GET_CODE (start) == CODE_LABEL
2488 && ! can_delete_label_p (start))
2489 {
2490 const char *name = LABEL_NAME (start);
2491 PUT_CODE (start, NOTE);
2492 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2493 NOTE_SOURCE_FILE (start) = name;
2494 }
2495 else
2496 next = flow_delete_insn (start);
2497
2498 if (start == finish)
2499 break;
2500 start = next;
2501 }
2502 }
2503
2504 /* Delete the insns in a (non-live) block. We physically delete every
2505 non-deleted-note insn, and update the flow graph appropriately.
2506
2507 Return nonzero if we deleted an exception handler. */
2508
2509 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2510 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2511
2512 int
2513 flow_delete_block (b)
2514 basic_block b;
2515 {
2516 int deleted_handler = 0;
2517 rtx insn, end, tmp;
2518
2519 /* If the head of this block is a CODE_LABEL, then it might be the
2520 label for an exception handler which can't be reached.
2521
2522 We need to remove the label from the exception_handler_label list
2523 and remove the associated NOTE_INSN_EH_REGION_BEG and
2524 NOTE_INSN_EH_REGION_END notes. */
2525
2526 insn = b->head;
2527
2528 never_reached_warning (insn);
2529
2530 if (GET_CODE (insn) == CODE_LABEL)
2531 maybe_remove_eh_handler (insn);
2532
2533 /* Include any jump table following the basic block. */
2534 end = b->end;
2535 if (GET_CODE (end) == JUMP_INSN
2536 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2537 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2538 && GET_CODE (tmp) == JUMP_INSN
2539 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2540 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2541 end = tmp;
2542
2543 /* Include any barrier that may follow the basic block. */
2544 tmp = next_nonnote_insn (end);
2545 if (tmp && GET_CODE (tmp) == BARRIER)
2546 end = tmp;
2547
2548 /* Selectively delete the entire chain. */
2549 flow_delete_insn_chain (insn, end);
2550
2551 /* Remove the edges into and out of this block. Note that there may
2552 indeed be edges in, if we are removing an unreachable loop. */
2553 {
2554 edge e, next, *q;
2555
2556 for (e = b->pred; e; e = next)
2557 {
2558 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2559 continue;
2560 *q = e->succ_next;
2561 next = e->pred_next;
2562 n_edges--;
2563 free (e);
2564 }
2565 for (e = b->succ; e; e = next)
2566 {
2567 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2568 continue;
2569 *q = e->pred_next;
2570 next = e->succ_next;
2571 n_edges--;
2572 free (e);
2573 }
2574
2575 b->pred = NULL;
2576 b->succ = NULL;
2577 }
2578
2579 /* Remove the basic block from the array, and compact behind it. */
2580 expunge_block (b);
2581
2582 return deleted_handler;
2583 }
2584
2585 /* Remove block B from the basic block array and compact behind it. */
2586
2587 static void
2588 expunge_block (b)
2589 basic_block b;
2590 {
2591 int i, n = n_basic_blocks;
2592
2593 for (i = b->index; i + 1 < n; ++i)
2594 {
2595 basic_block x = BASIC_BLOCK (i + 1);
2596 BASIC_BLOCK (i) = x;
2597 x->index = i;
2598 }
2599
2600 basic_block_info->num_elements--;
2601 n_basic_blocks--;
2602 }
2603
2604 /* Delete INSN by patching it out. Return the next insn. */
2605
2606 rtx
2607 flow_delete_insn (insn)
2608 rtx insn;
2609 {
2610 rtx prev = PREV_INSN (insn);
2611 rtx next = NEXT_INSN (insn);
2612 rtx note;
2613
2614 PREV_INSN (insn) = NULL_RTX;
2615 NEXT_INSN (insn) = NULL_RTX;
2616 INSN_DELETED_P (insn) = 1;
2617
2618 if (prev)
2619 NEXT_INSN (prev) = next;
2620 if (next)
2621 PREV_INSN (next) = prev;
2622 else
2623 set_last_insn (prev);
2624
2625 if (GET_CODE (insn) == CODE_LABEL)
2626 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2627
2628 /* If deleting a jump, decrement the use count of the label. Deleting
2629 the label itself should happen in the normal course of block merging. */
2630 if (GET_CODE (insn) == JUMP_INSN
2631 && JUMP_LABEL (insn)
2632 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2633 LABEL_NUSES (JUMP_LABEL (insn))--;
2634
2635 /* Also if deleting an insn that references a label. */
2636 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2637 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2638 LABEL_NUSES (XEXP (note, 0))--;
2639
2640 if (GET_CODE (insn) == JUMP_INSN
2641 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2642 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2643 {
2644 rtx pat = PATTERN (insn);
2645 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2646 int len = XVECLEN (pat, diff_vec_p);
2647 int i;
2648
2649 for (i = 0; i < len; i++)
2650 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2651 }
2652
2653 return next;
2654 }
2655
2656 /* True if a given label can be deleted. */
2657
2658 static int
2659 can_delete_label_p (label)
2660 rtx label;
2661 {
2662 rtx x;
2663
2664 if (LABEL_PRESERVE_P (label))
2665 return 0;
2666
2667 for (x = forced_labels; x; x = XEXP (x, 1))
2668 if (label == XEXP (x, 0))
2669 return 0;
2670 for (x = label_value_list; x; x = XEXP (x, 1))
2671 if (label == XEXP (x, 0))
2672 return 0;
2673 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2674 if (label == XEXP (x, 0))
2675 return 0;
2676
2677 /* User declared labels must be preserved. */
2678 if (LABEL_NAME (label) != 0)
2679 return 0;
2680
2681 return 1;
2682 }
2683
2684 static int
2685 tail_recursion_label_p (label)
2686 rtx label;
2687 {
2688 rtx x;
2689
2690 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2691 if (label == XEXP (x, 0))
2692 return 1;
2693
2694 return 0;
2695 }
2696
2697 /* Blocks A and B are to be merged into a single block A. The insns
2698 are already contiguous, hence `nomove'. */
2699
2700 void
2701 merge_blocks_nomove (a, b)
2702 basic_block a, b;
2703 {
2704 edge e;
2705 rtx b_head, b_end, a_end;
2706 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2707 int b_empty = 0;
2708
2709 /* If there was a CODE_LABEL beginning B, delete it. */
2710 b_head = b->head;
2711 b_end = b->end;
2712 if (GET_CODE (b_head) == CODE_LABEL)
2713 {
2714 /* Detect basic blocks with nothing but a label. This can happen
2715 in particular at the end of a function. */
2716 if (b_head == b_end)
2717 b_empty = 1;
2718 del_first = del_last = b_head;
2719 b_head = NEXT_INSN (b_head);
2720 }
2721
2722 /* Delete the basic block note. */
2723 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2724 {
2725 if (b_head == b_end)
2726 b_empty = 1;
2727 if (! del_last)
2728 del_first = b_head;
2729 del_last = b_head;
2730 b_head = NEXT_INSN (b_head);
2731 }
2732
2733 /* If there was a jump out of A, delete it. */
2734 a_end = a->end;
2735 if (GET_CODE (a_end) == JUMP_INSN)
2736 {
2737 rtx prev;
2738
2739 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2740 if (GET_CODE (prev) != NOTE
2741 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2742 || prev == a->head)
2743 break;
2744
2745 del_first = a_end;
2746
2747 #ifdef HAVE_cc0
2748 /* If this was a conditional jump, we need to also delete
2749 the insn that set cc0. */
2750 if (prev && sets_cc0_p (prev))
2751 {
2752 rtx tmp = prev;
2753 prev = prev_nonnote_insn (prev);
2754 if (!prev)
2755 prev = a->head;
2756 del_first = tmp;
2757 }
2758 #endif
2759
2760 a_end = prev;
2761 }
2762 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2763 del_first = NEXT_INSN (a_end);
2764
2765 /* Delete everything marked above as well as crap that might be
2766 hanging out between the two blocks. */
2767 flow_delete_insn_chain (del_first, del_last);
2768
2769 /* Normally there should only be one successor of A and that is B, but
2770 partway though the merge of blocks for conditional_execution we'll
2771 be merging a TEST block with THEN and ELSE successors. Free the
2772 whole lot of them and hope the caller knows what they're doing. */
2773 while (a->succ)
2774 remove_edge (a->succ);
2775
2776 /* Adjust the edges out of B for the new owner. */
2777 for (e = b->succ; e; e = e->succ_next)
2778 e->src = a;
2779 a->succ = b->succ;
2780
2781 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2782 b->pred = b->succ = NULL;
2783
2784 /* Reassociate the insns of B with A. */
2785 if (!b_empty)
2786 {
2787 if (basic_block_for_insn)
2788 {
2789 BLOCK_FOR_INSN (b_head) = a;
2790 while (b_head != b_end)
2791 {
2792 b_head = NEXT_INSN (b_head);
2793 BLOCK_FOR_INSN (b_head) = a;
2794 }
2795 }
2796 a_end = b_end;
2797 }
2798 a->end = a_end;
2799
2800 expunge_block (b);
2801 }
2802
2803 /* Blocks A and B are to be merged into a single block. A has no incoming
2804 fallthru edge, so it can be moved before B without adding or modifying
2805 any jumps (aside from the jump from A to B). */
2806
2807 static int
2808 merge_blocks_move_predecessor_nojumps (a, b)
2809 basic_block a, b;
2810 {
2811 rtx start, end, barrier;
2812 int index;
2813
2814 start = a->head;
2815 end = a->end;
2816
2817 barrier = next_nonnote_insn (end);
2818 if (GET_CODE (barrier) != BARRIER)
2819 abort ();
2820 flow_delete_insn (barrier);
2821
2822 /* Move block and loop notes out of the chain so that we do not
2823 disturb their order.
2824
2825 ??? A better solution would be to squeeze out all the non-nested notes
2826 and adjust the block trees appropriately. Even better would be to have
2827 a tighter connection between block trees and rtl so that this is not
2828 necessary. */
2829 start = squeeze_notes (start, end);
2830
2831 /* Scramble the insn chain. */
2832 if (end != PREV_INSN (b->head))
2833 reorder_insns (start, end, PREV_INSN (b->head));
2834
2835 if (rtl_dump_file)
2836 {
2837 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2838 a->index, b->index);
2839 }
2840
2841 /* Swap the records for the two blocks around. Although we are deleting B,
2842 A is now where B was and we want to compact the BB array from where
2843 A used to be. */
2844 BASIC_BLOCK (a->index) = b;
2845 BASIC_BLOCK (b->index) = a;
2846 index = a->index;
2847 a->index = b->index;
2848 b->index = index;
2849
2850 /* Now blocks A and B are contiguous. Merge them. */
2851 merge_blocks_nomove (a, b);
2852
2853 return 1;
2854 }
2855
2856 /* Blocks A and B are to be merged into a single block. B has no outgoing
2857 fallthru edge, so it can be moved after A without adding or modifying
2858 any jumps (aside from the jump from A to B). */
2859
2860 static int
2861 merge_blocks_move_successor_nojumps (a, b)
2862 basic_block a, b;
2863 {
2864 rtx start, end, barrier;
2865
2866 start = b->head;
2867 end = b->end;
2868 barrier = NEXT_INSN (end);
2869
2870 /* Recognize a jump table following block B. */
2871 if (barrier
2872 && GET_CODE (barrier) == CODE_LABEL
2873 && NEXT_INSN (barrier)
2874 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2875 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2876 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2877 {
2878 end = NEXT_INSN (barrier);
2879 barrier = NEXT_INSN (end);
2880 }
2881
2882 /* There had better have been a barrier there. Delete it. */
2883 if (barrier && GET_CODE (barrier) == BARRIER)
2884 flow_delete_insn (barrier);
2885
2886 /* Move block and loop notes out of the chain so that we do not
2887 disturb their order.
2888
2889 ??? A better solution would be to squeeze out all the non-nested notes
2890 and adjust the block trees appropriately. Even better would be to have
2891 a tighter connection between block trees and rtl so that this is not
2892 necessary. */
2893 start = squeeze_notes (start, end);
2894
2895 /* Scramble the insn chain. */
2896 reorder_insns (start, end, a->end);
2897
2898 /* Now blocks A and B are contiguous. Merge them. */
2899 merge_blocks_nomove (a, b);
2900
2901 if (rtl_dump_file)
2902 {
2903 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2904 b->index, a->index);
2905 }
2906
2907 return 1;
2908 }
2909
2910 /* Attempt to merge basic blocks that are potentially non-adjacent.
2911 Return true iff the attempt succeeded. */
2912
2913 static int
2914 merge_blocks (e, b, c, mode)
2915 edge e;
2916 basic_block b, c;
2917 int mode;
2918 {
2919 /* If C has a tail recursion label, do not merge. There is no
2920 edge recorded from the call_placeholder back to this label, as
2921 that would make optimize_sibling_and_tail_recursive_calls more
2922 complex for no gain. */
2923 if (GET_CODE (c->head) == CODE_LABEL
2924 && tail_recursion_label_p (c->head))
2925 return 0;
2926
2927 /* If B has a fallthru edge to C, no need to move anything. */
2928 if (e->flags & EDGE_FALLTHRU)
2929 {
2930 merge_blocks_nomove (b, c);
2931
2932 if (rtl_dump_file)
2933 {
2934 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2935 b->index, c->index);
2936 }
2937
2938 return 1;
2939 }
2940 /* Otherwise we will need to move code around. Do that only if expensive
2941 transformations are allowed. */
2942 else if (mode & CLEANUP_EXPENSIVE)
2943 {
2944 edge tmp_edge, c_fallthru_edge;
2945 int c_has_outgoing_fallthru;
2946 int b_has_incoming_fallthru;
2947
2948 /* Avoid overactive code motion, as the forwarder blocks should be
2949 eliminated by edge redirection instead. One exception might have
2950 been if B is a forwarder block and C has no fallthru edge, but
2951 that should be cleaned up by bb-reorder instead. */
2952 if (forwarder_block_p (b) || forwarder_block_p (c))
2953 return 0;
2954
2955 /* We must make sure to not munge nesting of lexical blocks,
2956 and loop notes. This is done by squeezing out all the notes
2957 and leaving them there to lie. Not ideal, but functional. */
2958
2959 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2960 if (tmp_edge->flags & EDGE_FALLTHRU)
2961 break;
2962 c_has_outgoing_fallthru = (tmp_edge != NULL);
2963 c_fallthru_edge = tmp_edge;
2964
2965 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2966 if (tmp_edge->flags & EDGE_FALLTHRU)
2967 break;
2968 b_has_incoming_fallthru = (tmp_edge != NULL);
2969
2970 /* If B does not have an incoming fallthru, then it can be moved
2971 immediately before C without introducing or modifying jumps.
2972 C cannot be the first block, so we do not have to worry about
2973 accessing a non-existent block. */
2974 if (! b_has_incoming_fallthru)
2975 return merge_blocks_move_predecessor_nojumps (b, c);
2976
2977 /* Otherwise, we're going to try to move C after B. If C does
2978 not have an outgoing fallthru, then it can be moved
2979 immediately after B without introducing or modifying jumps. */
2980 if (! c_has_outgoing_fallthru)
2981 return merge_blocks_move_successor_nojumps (b, c);
2982
2983 /* Otherwise, we'll need to insert an extra jump, and possibly
2984 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
2985 as we don't have explicit return instructions before epilogues
2986 are generated, so give up on that case. */
2987
2988 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
2989 && merge_blocks_move_successor_nojumps (b, c))
2990 {
2991 basic_block target = c_fallthru_edge->dest;
2992 rtx barrier;
2993 basic_block new;
2994
2995 /* This is a dirty hack to avoid code duplication.
2996
2997 Set edge to point to wrong basic block, so
2998 redirect_edge_and_branch_force will do the trick
2999 and rewire edge back to the original location. */
3000 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3001 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3002
3003 /* We've just created barrier, but another barrier is
3004 already present in the stream. Avoid the duplicate. */
3005 barrier = next_nonnote_insn (new ? new->end : b->end);
3006 if (GET_CODE (barrier) != BARRIER)
3007 abort ();
3008 flow_delete_insn (barrier);
3009 }
3010
3011 return 0;
3012 }
3013 return 0;
3014 }
3015
3016 /* Simplify a conditional jump around an unconditional jump.
3017 Return true if something changed. */
3018
3019 static bool
3020 try_simplify_condjump (cbranch_block)
3021 basic_block cbranch_block;
3022 {
3023 basic_block jump_block, jump_dest_block, cbranch_dest_block;
3024 edge cbranch_jump_edge, cbranch_fallthru_edge;
3025 rtx cbranch_insn;
3026
3027 /* Verify that there are exactly two successors. */
3028 if (!cbranch_block->succ
3029 || !cbranch_block->succ->succ_next
3030 || cbranch_block->succ->succ_next->succ_next)
3031 return false;
3032
3033 /* Verify that we've got a normal conditional branch at the end
3034 of the block. */
3035 cbranch_insn = cbranch_block->end;
3036 if (!any_condjump_p (cbranch_insn))
3037 return false;
3038
3039 cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
3040 cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
3041
3042 /* The next block must not have multiple predecessors, must not
3043 be the last block in the function, and must contain just the
3044 unconditional jump. */
3045 jump_block = cbranch_fallthru_edge->dest;
3046 if (jump_block->pred->pred_next
3047 || jump_block->index == n_basic_blocks - 1
3048 || !forwarder_block_p (jump_block))
3049 return false;
3050 jump_dest_block = jump_block->succ->dest;
3051
3052 /* The conditional branch must target the block after the
3053 unconditional branch. */
3054 cbranch_dest_block = cbranch_jump_edge->dest;
3055
3056 if (!can_fallthru (jump_block, cbranch_dest_block))
3057 return false;
3058
3059 /* Invert the conditional branch. Prevent jump.c from deleting
3060 "unreachable" instructions. */
3061 LABEL_NUSES (JUMP_LABEL (cbranch_insn))++;
3062 if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 1))
3063 {
3064 LABEL_NUSES (JUMP_LABEL (cbranch_insn))--;
3065 return false;
3066 }
3067
3068 if (rtl_dump_file)
3069 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3070 INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
3071
3072 /* Success. Update the CFG to match. Note that after this point
3073 the edge variable names appear backwards; the redirection is done
3074 this way to preserve edge profile data. */
3075 redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
3076 redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
3077 cbranch_jump_edge->flags |= EDGE_FALLTHRU;
3078 cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
3079
3080 /* Delete the block with the unconditional jump, and clean up the mess. */
3081 flow_delete_block (jump_block);
3082 tidy_fallthru_edge (cbranch_jump_edge, cbranch_block, cbranch_dest_block);
3083
3084 return true;
3085 }
3086
3087 /* Attempt to forward edges leaving basic block B.
3088 Return true if sucessful. */
3089
3090 static bool
3091 try_forward_edges (b)
3092 basic_block b;
3093 {
3094 bool changed = false;
3095 edge e, next;
3096
3097 for (e = b->succ; e ; e = next)
3098 {
3099 basic_block target, first;
3100 int counter;
3101
3102 next = e->succ_next;
3103
3104 /* Skip complex edges because we don't know how to update them.
3105
3106 Still handle fallthru edges, as we can suceed to forward fallthru
3107 edge to the same place as the branch edge of conditional branch
3108 and turn conditional branch to an unconditonal branch. */
3109 if (e->flags & EDGE_COMPLEX)
3110 continue;
3111
3112 target = first = e->dest;
3113 counter = 0;
3114
3115 /* Look for the real destination of the jump.
3116 Avoid inifinite loop in the infinite empty loop by counting
3117 up to n_basic_blocks. */
3118 while (forwarder_block_p (target)
3119 && target->succ->dest != EXIT_BLOCK_PTR
3120 && counter < n_basic_blocks)
3121 {
3122 /* Bypass trivial infinite loops. */
3123 if (target == target->succ->dest)
3124 counter = n_basic_blocks;
3125 target = target->succ->dest, counter++;
3126 }
3127
3128 if (counter >= n_basic_blocks)
3129 {
3130 if (rtl_dump_file)
3131 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n",
3132 target->index);
3133 }
3134 else if (target == first)
3135 ; /* We didn't do anything. */
3136 else if (redirect_edge_and_branch (e, target))
3137 {
3138 /* We successfully forwarded the edge. Now update profile
3139 data: for each edge we traversed in the chain, remove
3140 the original edge's execution count. */
3141 do
3142 {
3143 first->count -= e->count;
3144 first->succ->count -= e->count;
3145 first->frequency -= ((e->probability * b->frequency
3146 + REG_BR_PROB_BASE / 2)
3147 / REG_BR_PROB_BASE);
3148 first = first->succ->dest;
3149 }
3150 while (first != target);
3151
3152 changed = true;
3153 }
3154 else
3155 {
3156 if (rtl_dump_file)
3157 fprintf (rtl_dump_file, "Forwarding edge %i->%i to %i failed.\n",
3158 b->index, e->dest->index, target->index);
3159 }
3160 }
3161
3162 return changed;
3163 }
3164
3165 /* Look through the insns at the end of BB1 and BB2 and find the longest
3166 sequence that are equivalent. Store the first insns for that sequence
3167 in *F1 and *F2 and return the sequence length.
3168
3169 To simplify callers of this function, if the blocks match exactly,
3170 store the head of the blocks in *F1 and *F2. */
3171
3172 static int
3173 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3174 int mode ATTRIBUTE_UNUSED;
3175 basic_block bb1, bb2;
3176 rtx *f1, *f2;
3177 {
3178 rtx i1, i2, p1, p2, last1, last2, afterlast1, afterlast2;
3179 int ninsns = 0;
3180
3181 /* Skip simple jumps at the end of the blocks. Complex jumps still
3182 need to be compared for equivalence, which we'll do below. */
3183
3184 i1 = bb1->end;
3185 if (onlyjump_p (i1))
3186 i1 = PREV_INSN (i1);
3187 i2 = bb2->end;
3188 if (onlyjump_p (i2))
3189 i2 = PREV_INSN (i2);
3190
3191 last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
3192 while (true)
3193 {
3194 /* Ignore notes. */
3195 while ((GET_CODE (i1) == NOTE && i1 != bb1->head))
3196 i1 = PREV_INSN (i1);
3197 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3198 i2 = PREV_INSN (i2);
3199
3200 if (i1 == bb1->head || i2 == bb2->head)
3201 break;
3202
3203 /* Verify that I1 and I2 are equivalent. */
3204
3205 if (GET_CODE (i1) != GET_CODE (i2))
3206 break;
3207
3208 p1 = PATTERN (i1);
3209 p2 = PATTERN (i2);
3210
3211 /* If this is a CALL_INSN, compare register usage information.
3212 If we don't check this on stack register machines, the two
3213 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3214 numbers of stack registers in the same basic block.
3215 If we don't check this on machines with delay slots, a delay slot may
3216 be filled that clobbers a parameter expected by the subroutine.
3217
3218 ??? We take the simple route for now and assume that if they're
3219 equal, they were constructed identically. */
3220
3221 if (GET_CODE (i1) == CALL_INSN
3222 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3223 CALL_INSN_FUNCTION_USAGE (i2)))
3224 break;
3225
3226 #ifdef STACK_REGS
3227 /* If cross_jump_death_matters is not 0, the insn's mode
3228 indicates whether or not the insn contains any stack-like
3229 regs. */
3230
3231 if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
3232 {
3233 /* If register stack conversion has already been done, then
3234 death notes must also be compared before it is certain that
3235 the two instruction streams match. */
3236
3237 rtx note;
3238 HARD_REG_SET i1_regset, i2_regset;
3239
3240 CLEAR_HARD_REG_SET (i1_regset);
3241 CLEAR_HARD_REG_SET (i2_regset);
3242
3243 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3244 if (REG_NOTE_KIND (note) == REG_DEAD
3245 && STACK_REG_P (XEXP (note, 0)))
3246 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3247
3248 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3249 if (REG_NOTE_KIND (note) == REG_DEAD
3250 && STACK_REG_P (XEXP (note, 0)))
3251 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3252
3253 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3254
3255 break;
3256
3257 done:
3258 ;
3259 }
3260 #endif
3261
3262 if (GET_CODE (p1) != GET_CODE (p2))
3263 break;
3264
3265 if (! rtx_renumbered_equal_p (p1, p2))
3266 {
3267 /* The following code helps take care of G++ cleanups. */
3268 rtx equiv1 = find_reg_equal_equiv_note (i1);
3269 rtx equiv2 = find_reg_equal_equiv_note (i2);
3270
3271 if (equiv1 && equiv2
3272 /* If the equivalences are not to a constant, they may
3273 reference pseudos that no longer exist, so we can't
3274 use them. */
3275 && CONSTANT_P (XEXP (equiv1, 0))
3276 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3277 {
3278 rtx s1 = single_set (i1);
3279 rtx s2 = single_set (i2);
3280 if (s1 != 0 && s2 != 0
3281 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3282 {
3283 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3284 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3285 if (! rtx_renumbered_equal_p (p1, p2))
3286 cancel_changes (0);
3287 else if (apply_change_group ())
3288 goto win;
3289 }
3290 }
3291 break;
3292 }
3293
3294 win:
3295 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
3296 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3297 {
3298 afterlast1 = last1, afterlast2 = last2;
3299 last1 = i1, last2 = i2;
3300 ninsns++;
3301 }
3302 i1 = PREV_INSN (i1);
3303 i2 = PREV_INSN (i2);
3304 }
3305
3306 #ifdef HAVE_cc0
3307 if (ninsns)
3308 {
3309 /* Don't allow the insn after a compare to be shared by
3310 cross-jumping unless the compare is also shared. */
3311 if (reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
3312 last1 = afterlast1, last2 = afterlast2, ninsns--;
3313 }
3314 #endif
3315
3316 /* Include preceeding notes and labels in the cross-jump. One,
3317 this may bring us to the head of the blocks as requested above.
3318 Two, it keeps line number notes as matched as may be. */
3319 if (ninsns)
3320 {
3321 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3322 last1 = PREV_INSN (last1);
3323 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3324 last1 = PREV_INSN (last1);
3325 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3326 last2 = PREV_INSN (last2);
3327 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3328 last2 = PREV_INSN (last2);
3329
3330 *f1 = last1;
3331 *f2 = last2;
3332 }
3333
3334 return ninsns;
3335 }
3336
3337 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3338 the branch instruction. This means that if we commonize the control
3339 flow before end of the basic block, the semantic remains unchanged.
3340
3341 We may assume that there exists one edge with a common destination. */
3342
3343 static bool
3344 outgoing_edges_match (bb1, bb2)
3345 basic_block bb1;
3346 basic_block bb2;
3347 {
3348 /* If BB1 has only one successor, we must be looking at an unconditional
3349 jump. Which, by the assumption above, means that we only need to check
3350 that BB2 has one successor. */
3351 if (bb1->succ && !bb1->succ->succ_next)
3352 return (bb2->succ && !bb2->succ->succ_next);
3353
3354 /* Match conditional jumps - this may get tricky when fallthru and branch
3355 edges are crossed. */
3356 if (bb1->succ
3357 && bb1->succ->succ_next
3358 && !bb1->succ->succ_next->succ_next
3359 && any_condjump_p (bb1->end))
3360 {
3361 edge b1, f1, b2, f2;
3362 bool reverse, match;
3363 rtx set1, set2, cond1, cond2;
3364 enum rtx_code code1, code2;
3365
3366 if (!bb2->succ
3367 || !bb2->succ->succ_next
3368 || bb1->succ->succ_next->succ_next
3369 || !any_condjump_p (bb2->end))
3370 return false;
3371
3372 b1 = BRANCH_EDGE (bb1);
3373 b2 = BRANCH_EDGE (bb2);
3374 f1 = FALLTHRU_EDGE (bb1);
3375 f2 = FALLTHRU_EDGE (bb2);
3376
3377 /* Get around possible forwarders on fallthru edges. Other cases
3378 should be optimized out already. */
3379 if (forwarder_block_p (f1->dest))
3380 f1 = f1->dest->succ;
3381 if (forwarder_block_p (f2->dest))
3382 f2 = f2->dest->succ;
3383
3384 /* To simplify use of this function, return false if there are
3385 unneeded forwarder blocks. These will get eliminated later
3386 during cleanup_cfg. */
3387 if (forwarder_block_p (f1->dest)
3388 || forwarder_block_p (f2->dest)
3389 || forwarder_block_p (b1->dest)
3390 || forwarder_block_p (b2->dest))
3391 return false;
3392
3393 if (f1->dest == f2->dest && b1->dest == b2->dest)
3394 reverse = false;
3395 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3396 reverse = true;
3397 else
3398 return false;
3399
3400 set1 = pc_set (bb1->end);
3401 set2 = pc_set (bb2->end);
3402 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3403 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3404 reverse = !reverse;
3405
3406 cond1 = XEXP (SET_SRC (set1), 0);
3407 cond2 = XEXP (SET_SRC (set2), 0);
3408 code1 = GET_CODE (cond1);
3409 if (reverse)
3410 code2 = reversed_comparison_code (cond2, bb2->end);
3411 else
3412 code2 = GET_CODE (cond2);
3413 if (code2 == UNKNOWN)
3414 return false;
3415
3416 /* Verify codes and operands match. */
3417 match = ((code1 == code2
3418 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3419 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3420 || (code1 == swap_condition (code2)
3421 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3422 XEXP (cond2, 0))
3423 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3424 XEXP (cond2, 1))));
3425
3426 /* If we return true, we will join the blocks. Which means that
3427 we will only have one branch prediction bit to work with. Thus
3428 we require the existing branches to have probabilities that are
3429 roughly similar. */
3430 /* ??? We should use bb->frequency to allow merging in infrequently
3431 executed blocks, but at the moment it is not available when
3432 cleanup_cfg is run. */
3433 if (match && !optimize_size)
3434 {
3435 rtx note1, note2;
3436 int prob1, prob2;
3437 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3438 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3439
3440 if (note1 && note2)
3441 {
3442 prob1 = INTVAL (XEXP (note1, 0));
3443 prob2 = INTVAL (XEXP (note2, 0));
3444 if (reverse)
3445 prob2 = REG_BR_PROB_BASE - prob2;
3446
3447 /* Fail if the difference in probabilities is
3448 greater than 5%. */
3449 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 20)
3450 return false;
3451 }
3452 else if (note1 || note2)
3453 return false;
3454 }
3455
3456 if (rtl_dump_file && match)
3457 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3458 bb1->index, bb2->index);
3459
3460 return match;
3461 }
3462
3463 /* ??? We can handle computed jumps too. This may be important for
3464 inlined functions containing switch statements. Also jumps w/o
3465 fallthru edges can be handled by simply matching whole insn. */
3466 return false;
3467 }
3468
3469 /* E1 and E2 are edges with the same destination block. Search their
3470 predecessors for common code. If found, redirect control flow from
3471 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
3472
3473 static bool
3474 try_crossjump_to_edge (mode, e1, e2)
3475 int mode;
3476 edge e1, e2;
3477 {
3478 int nmatch;
3479 basic_block src1 = e1->src, src2 = e2->src;
3480 basic_block redirect_to;
3481 rtx newpos1, newpos2;
3482 edge s;
3483 rtx last;
3484 rtx label;
3485
3486 /* Search backward through forwarder blocks. We don't need to worry
3487 about multiple entry or chained forwarders, as they will be optimized
3488 away. We do this to look past the unconditional jump following a
3489 conditional jump that is required due to the current CFG shape. */
3490 if (src1->pred
3491 && !src1->pred->pred_next
3492 && forwarder_block_p (src1))
3493 {
3494 e1 = src1->pred;
3495 src1 = e1->src;
3496 }
3497 if (src2->pred
3498 && !src2->pred->pred_next
3499 && forwarder_block_p (src2))
3500 {
3501 e2 = src2->pred;
3502 src2 = e2->src;
3503 }
3504
3505 /* Nothing to do if we reach ENTRY, or a common source block. */
3506 if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
3507 return false;
3508 if (src1 == src2)
3509 return false;
3510
3511 /* Seeing more than 1 forwarder blocks would confuse us later... */
3512 if (forwarder_block_p (e1->dest)
3513 && forwarder_block_p (e1->dest->succ->dest))
3514 return false;
3515 if (forwarder_block_p (e2->dest)
3516 && forwarder_block_p (e2->dest->succ->dest))
3517 return false;
3518
3519 /* Likewise with dead code (possibly newly created by the other optimizations
3520 of cfg_cleanup). */
3521 if (!src1->pred || !src2->pred)
3522 return false;
3523
3524 /* Likewise with complex edges.
3525 ??? We should be able to handle most complex edges later with some
3526 care. */
3527 if (e1->flags & EDGE_COMPLEX)
3528 return false;
3529
3530 /* Look for the common insn sequence, part the first ... */
3531 if (!outgoing_edges_match (src1, src2))
3532 return false;
3533
3534 /* ... and part the second. */
3535 nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
3536 if (!nmatch)
3537 return false;
3538
3539 /* Avoid splitting if possible. */
3540 if (newpos2 == src2->head)
3541 redirect_to = src2;
3542 else
3543 {
3544 if (rtl_dump_file)
3545 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3546 src2->index, nmatch);
3547 redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
3548 }
3549
3550 if (rtl_dump_file)
3551 fprintf (rtl_dump_file,
3552 "Cross jumping from bb %i to bb %i; %i common insns\n",
3553 src1->index, src2->index, nmatch);
3554
3555 redirect_to->count += src1->count;
3556 redirect_to->frequency += src1->frequency;
3557
3558 /* Recompute the frequencies and counts of outgoing edges. */
3559 for (s = redirect_to->succ; s; s = s->succ_next)
3560 {
3561 edge s2;
3562 basic_block d = s->dest;
3563
3564 if (forwarder_block_p (d))
3565 d = d->succ->dest;
3566 for (s2 = src1->succ; ; s2 = s2->succ_next)
3567 {
3568 basic_block d2 = s2->dest;
3569 if (forwarder_block_p (d2))
3570 d2 = d2->succ->dest;
3571 if (d == d2)
3572 break;
3573 }
3574 s->count += s2->count;
3575
3576 /* Take care to update possible forwarder blocks. We verified
3577 that there is no more than one in the chain, so we can't run
3578 into infinite loop. */
3579 if (forwarder_block_p (s->dest))
3580 {
3581 s->dest->succ->count += s2->count;
3582 s->dest->count += s2->count;
3583 s->dest->frequency += ((s->probability * s->src->frequency)
3584 / REG_BR_PROB_BASE);
3585 }
3586 if (forwarder_block_p (s2->dest))
3587 {
3588 s2->dest->succ->count -= s2->count;
3589 s2->dest->count -= s2->count;
3590 s2->dest->frequency -= ((s->probability * s->src->frequency)
3591 / REG_BR_PROB_BASE);
3592 }
3593 if (!redirect_to->frequency && !src1->frequency)
3594 s->probability = (s->probability + s2->probability) / 2;
3595 else
3596 s->probability =
3597 ((s->probability * redirect_to->frequency +
3598 s2->probability * src1->frequency)
3599 / (redirect_to->frequency + src1->frequency));
3600 }
3601
3602 /* FIXME: enable once probabilities are fetched properly at CFG build. */
3603 #if 0
3604 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3605 if (note)
3606 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3607 #endif
3608
3609 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
3610
3611 /* Skip possible basic block header. */
3612 if (GET_CODE (newpos1) == CODE_LABEL)
3613 newpos1 = NEXT_INSN (newpos1);
3614 if (GET_CODE (newpos1) == NOTE)
3615 newpos1 = NEXT_INSN (newpos1);
3616 last = src1->end;
3617
3618 /* Emit the jump insn. */
3619 label = block_label (redirect_to);
3620 src1->end = emit_jump_insn_before (gen_jump (label), newpos1);
3621 JUMP_LABEL (src1->end) = label;
3622 LABEL_NUSES (label)++;
3623 if (basic_block_for_insn)
3624 set_block_for_new_insns (src1->end, src1);
3625
3626 /* Delete the now unreachable instructions. */
3627 flow_delete_insn_chain (newpos1, last);
3628
3629 /* Make sure there is a barrier after the new jump. */
3630 last = next_nonnote_insn (src1->end);
3631 if (!last || GET_CODE (last) != BARRIER)
3632 emit_barrier_after (src1->end);
3633
3634 /* Update CFG. */
3635 while (src1->succ)
3636 remove_edge (src1->succ);
3637 make_edge (NULL, src1, redirect_to, 0);
3638
3639 return true;
3640 }
3641
3642 /* Search the predecessors of BB for common insn sequences. When found,
3643 share code between them by redirecting control flow. Return true if
3644 any changes made. */
3645
3646 static bool
3647 try_crossjump_bb (mode, bb)
3648 int mode;
3649 basic_block bb;
3650 {
3651 edge e, e2, nexte2, nexte, fallthru;
3652 bool changed;
3653
3654 /* Nothing to do if there is not at least two incomming edges. */
3655 if (!bb->pred || !bb->pred->pred_next)
3656 return false;
3657
3658 /* It is always cheapest to redirect a block that ends in a branch to
3659 a block that falls through into BB, as that adds no branches to the
3660 program. We'll try that combination first. */
3661 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3662 if (fallthru->flags & EDGE_FALLTHRU)
3663 break;
3664
3665 changed = false;
3666 for (e = bb->pred; e; e = nexte)
3667 {
3668 nexte = e->pred_next;
3669
3670 /* Elide complex edges now, as neither try_crossjump_to_edge
3671 nor outgoing_edges_match can handle them. */
3672 if (e->flags & EDGE_COMPLEX)
3673 continue;
3674
3675 /* As noted above, first try with the fallthru predecessor. */
3676 if (fallthru)
3677 {
3678 /* Don't combine the fallthru edge into anything else.
3679 If there is a match, we'll do it the other way around. */
3680 if (e == fallthru)
3681 continue;
3682
3683 if (try_crossjump_to_edge (mode, e, fallthru))
3684 {
3685 changed = true;
3686 nexte = bb->pred;
3687 continue;
3688 }
3689 }
3690
3691 /* Non-obvious work limiting check: Recognize that we're going
3692 to call try_crossjump_bb on every basic block. So if we have
3693 two blocks with lots of outgoing edges (a switch) and they
3694 share lots of common destinations, then we would do the
3695 cross-jump check once for each common destination.
3696
3697 Now, if the blocks actually are cross-jump candidates, then
3698 all of their destinations will be shared. Which means that
3699 we only need check them for cross-jump candidacy once. We
3700 can eliminate redundant checks of crossjump(A,B) by arbitrarily
3701 choosing to do the check from the block for which the edge
3702 in question is the first successor of A. */
3703 if (e->src->succ != e)
3704 continue;
3705
3706 for (e2 = bb->pred; e2; e2 = nexte2)
3707 {
3708 nexte2 = e2->pred_next;
3709
3710 if (e2 == e)
3711 continue;
3712
3713 /* We've already checked the fallthru edge above. */
3714 if (e2 == fallthru)
3715 continue;
3716
3717 /* Again, neither try_crossjump_to_edge nor outgoing_edges_match
3718 can handle complex edges. */
3719 if (e2->flags & EDGE_COMPLEX)
3720 continue;
3721
3722 /* The "first successor" check above only prevents multiple
3723 checks of crossjump(A,B). In order to prevent redundant
3724 checks of crossjump(B,A), require that A be the block
3725 with the lowest index. */
3726 if (e->src->index > e2->src->index)
3727 continue;
3728
3729 if (try_crossjump_to_edge (mode, e, e2))
3730 {
3731 changed = true;
3732 nexte = bb->pred;
3733 break;
3734 }
3735 }
3736 }
3737
3738 return changed;
3739 }
3740
3741 /* Do simple CFG optimizations - basic block merging, simplifying of jump
3742 instructions etc. Return nonzero if changes were made. */
3743
3744 static bool
3745 try_optimize_cfg (mode)
3746 int mode;
3747 {
3748 int i;
3749 bool changed_overall = false;
3750 bool changed;
3751 int iterations = 0;
3752
3753 /* Attempt to merge blocks as made possible by edge removal. If a block
3754 has only one successor, and the successor has only one predecessor,
3755 they may be combined. */
3756
3757 do
3758 {
3759 changed = false;
3760 iterations++;
3761
3762 if (rtl_dump_file)
3763 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
3764 iterations);
3765
3766 for (i = 0; i < n_basic_blocks;)
3767 {
3768 basic_block c, b = BASIC_BLOCK (i);
3769 edge s;
3770 bool changed_here = false;
3771
3772 /* Delete trivially dead basic blocks. */
3773 while (b->pred == NULL)
3774 {
3775 c = BASIC_BLOCK (b->index - 1);
3776 if (rtl_dump_file)
3777 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3778 flow_delete_block (b);
3779 changed = true;
3780 b = c;
3781 }
3782
3783 /* Remove code labels no longer used. Don't do this before
3784 CALL_PLACEHOLDER is removed, as some branches may be hidden
3785 within. */
3786 if (b->pred->pred_next == NULL
3787 && (b->pred->flags & EDGE_FALLTHRU)
3788 && !(b->pred->flags & EDGE_COMPLEX)
3789 && GET_CODE (b->head) == CODE_LABEL
3790 && (!(mode & CLEANUP_PRE_SIBCALL)
3791 || !tail_recursion_label_p (b->head))
3792 /* If previous block ends with condjump jumping to next BB,
3793 we can't delete the label. */
3794 && (b->pred->src == ENTRY_BLOCK_PTR
3795 || !reg_mentioned_p (b->head, b->pred->src->end)))
3796 {
3797 rtx label = b->head;
3798 b->head = NEXT_INSN (b->head);
3799 flow_delete_insn_chain (label, label);
3800 if (rtl_dump_file)
3801 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
3802 b->index);
3803 }
3804
3805 /* If we fall through an empty block, we can remove it. */
3806 if (b->pred->pred_next == NULL
3807 && (b->pred->flags & EDGE_FALLTHRU)
3808 && GET_CODE (b->head) != CODE_LABEL
3809 && forwarder_block_p (b)
3810 /* Note that forwarder_block_p true ensures that there
3811 is a successor for this block. */
3812 && (b->succ->flags & EDGE_FALLTHRU)
3813 && n_basic_blocks > 1)
3814 {
3815 if (rtl_dump_file)
3816 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3817 b->index);
3818 c = BASIC_BLOCK (b->index ? b->index - 1 : 1);
3819 redirect_edge_succ_nodup (b->pred, b->succ->dest);
3820 flow_delete_block (b);
3821 changed = true;
3822 b = c;
3823 }
3824
3825 /* Merge blocks. Loop because chains of blocks might be
3826 combineable. */
3827 while ((s = b->succ) != NULL
3828 && s->succ_next == NULL
3829 && !(s->flags & EDGE_COMPLEX)
3830 && (c = s->dest) != EXIT_BLOCK_PTR
3831 && c->pred->pred_next == NULL
3832 /* If the jump insn has side effects,
3833 we can't kill the edge. */
3834 && (GET_CODE (b->end) != JUMP_INSN
3835 || onlyjump_p (b->end))
3836 && merge_blocks (s, b, c, mode))
3837 changed_here = true;
3838
3839 /* Simplify branch over branch. */
3840 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
3841 changed_here = true;
3842
3843 /* If B has a single outgoing edge, but uses a non-trivial jump
3844 instruction without side-effects, we can either delete the
3845 jump entirely, or replace it with a simple unconditional jump.
3846 Use redirect_edge_and_branch to do the dirty work. */
3847 if (b->succ
3848 && ! b->succ->succ_next
3849 && b->succ->dest != EXIT_BLOCK_PTR
3850 && onlyjump_p (b->end)
3851 && redirect_edge_and_branch (b->succ, b->succ->dest))
3852 changed_here = true;
3853
3854 /* Simplify branch to branch. */
3855 if (try_forward_edges (b))
3856 changed_here = true;
3857
3858 /* Look for shared code between blocks. */
3859 if ((mode & CLEANUP_CROSSJUMP)
3860 && try_crossjump_bb (mode, b))
3861 changed_here = true;
3862
3863 /* Don't get confused by the index shift caused by deleting
3864 blocks. */
3865 if (!changed_here)
3866 i = b->index + 1;
3867 else
3868 changed = true;
3869 }
3870
3871 if ((mode & CLEANUP_CROSSJUMP)
3872 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
3873 changed = true;
3874
3875 #ifdef ENABLE_CHECKING
3876 if (changed)
3877 verify_flow_info ();
3878 #endif
3879
3880 changed_overall |= changed;
3881 }
3882 while (changed);
3883 return changed_overall;
3884 }
3885
3886 /* The given edge should potentially be a fallthru edge. If that is in
3887 fact true, delete the jump and barriers that are in the way. */
3888
3889 void
3890 tidy_fallthru_edge (e, b, c)
3891 edge e;
3892 basic_block b, c;
3893 {
3894 rtx q;
3895
3896 /* ??? In a late-running flow pass, other folks may have deleted basic
3897 blocks by nopping out blocks, leaving multiple BARRIERs between here
3898 and the target label. They ought to be chastized and fixed.
3899
3900 We can also wind up with a sequence of undeletable labels between
3901 one block and the next.
3902
3903 So search through a sequence of barriers, labels, and notes for
3904 the head of block C and assert that we really do fall through. */
3905
3906 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3907 return;
3908
3909 /* Remove what will soon cease being the jump insn from the source block.
3910 If block B consisted only of this single jump, turn it into a deleted
3911 note. */
3912 q = b->end;
3913 if (GET_CODE (q) == JUMP_INSN
3914 && onlyjump_p (q)
3915 && (any_uncondjump_p (q)
3916 || (b->succ == e && e->succ_next == NULL)))
3917 {
3918 #ifdef HAVE_cc0
3919 /* If this was a conditional jump, we need to also delete
3920 the insn that set cc0. */
3921 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3922 q = PREV_INSN (q);
3923 #endif
3924
3925 if (b->head == q)
3926 {
3927 PUT_CODE (q, NOTE);
3928 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3929 NOTE_SOURCE_FILE (q) = 0;
3930 }
3931 else
3932 {
3933 q = PREV_INSN (q);
3934
3935 /* We don't want a block to end on a line-number note since that has
3936 the potential of changing the code between -g and not -g. */
3937 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3938 q = PREV_INSN (q);
3939 }
3940
3941 b->end = q;
3942 }
3943
3944 /* Selectively unlink the sequence. */
3945 if (q != PREV_INSN (c->head))
3946 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3947
3948 e->flags |= EDGE_FALLTHRU;
3949 }
3950
3951 /* Fix up edges that now fall through, or rather should now fall through
3952 but previously required a jump around now deleted blocks. Simplify
3953 the search by only examining blocks numerically adjacent, since this
3954 is how find_basic_blocks created them. */
3955
3956 static void
3957 tidy_fallthru_edges ()
3958 {
3959 int i;
3960
3961 for (i = 1; i < n_basic_blocks; ++i)
3962 {
3963 basic_block b = BASIC_BLOCK (i - 1);
3964 basic_block c = BASIC_BLOCK (i);
3965 edge s;
3966
3967 /* We care about simple conditional or unconditional jumps with
3968 a single successor.
3969
3970 If we had a conditional branch to the next instruction when
3971 find_basic_blocks was called, then there will only be one
3972 out edge for the block which ended with the conditional
3973 branch (since we do not create duplicate edges).
3974
3975 Furthermore, the edge will be marked as a fallthru because we
3976 merge the flags for the duplicate edges. So we do not want to
3977 check that the edge is not a FALLTHRU edge. */
3978 if ((s = b->succ) != NULL
3979 && ! (s->flags & EDGE_COMPLEX)
3980 && s->succ_next == NULL
3981 && s->dest == c
3982 /* If the jump insn has side effects, we can't tidy the edge. */
3983 && (GET_CODE (b->end) != JUMP_INSN
3984 || onlyjump_p (b->end)))
3985 tidy_fallthru_edge (s, b, c);
3986 }
3987 }
3988 \f
3989 /* Perform data flow analysis.
3990 F is the first insn of the function; FLAGS is a set of PROP_* flags
3991 to be used in accumulating flow info. */
3992
3993 void
3994 life_analysis (f, file, flags)
3995 rtx f;
3996 FILE *file;
3997 int flags;
3998 {
3999 #ifdef ELIMINABLE_REGS
4000 register int i;
4001 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
4002 #endif
4003
4004 /* Record which registers will be eliminated. We use this in
4005 mark_used_regs. */
4006
4007 CLEAR_HARD_REG_SET (elim_reg_set);
4008
4009 #ifdef ELIMINABLE_REGS
4010 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4011 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4012 #else
4013 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4014 #endif
4015
4016 if (! optimize)
4017 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
4018
4019 /* The post-reload life analysis have (on a global basis) the same
4020 registers live as was computed by reload itself. elimination
4021 Otherwise offsets and such may be incorrect.
4022
4023 Reload will make some registers as live even though they do not
4024 appear in the rtl.
4025
4026 We don't want to create new auto-incs after reload, since they
4027 are unlikely to be useful and can cause problems with shared
4028 stack slots. */
4029 if (reload_completed)
4030 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
4031
4032 /* We want alias analysis information for local dead store elimination. */
4033 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4034 init_alias_analysis ();
4035
4036 /* Always remove no-op moves. Do this before other processing so
4037 that we don't have to keep re-scanning them. */
4038 delete_noop_moves (f);
4039
4040 /* Some targets can emit simpler epilogues if they know that sp was
4041 not ever modified during the function. After reload, of course,
4042 we've already emitted the epilogue so there's no sense searching. */
4043 if (! reload_completed)
4044 notice_stack_pointer_modification (f);
4045
4046 /* Allocate and zero out data structures that will record the
4047 data from lifetime analysis. */
4048 allocate_reg_life_data ();
4049 allocate_bb_life_data ();
4050
4051 /* Find the set of registers live on function exit. */
4052 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
4053
4054 /* "Update" life info from zero. It'd be nice to begin the
4055 relaxation with just the exit and noreturn blocks, but that set
4056 is not immediately handy. */
4057
4058 if (flags & PROP_REG_INFO)
4059 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4060 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
4061
4062 /* Clean up. */
4063 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4064 end_alias_analysis ();
4065
4066 if (file)
4067 dump_flow_info (file);
4068
4069 free_basic_block_vars (1);
4070
4071 #ifdef ENABLE_CHECKING
4072 {
4073 rtx insn;
4074
4075 /* Search for any REG_LABEL notes which reference deleted labels. */
4076 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4077 {
4078 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4079
4080 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
4081 abort ();
4082 }
4083 }
4084 #endif
4085 /* Removing dead insns should've made jumptables really dead. */
4086 delete_dead_jumptables ();
4087 }
4088
4089 /* A subroutine of verify_wide_reg, called through for_each_rtx.
4090 Search for REGNO. If found, abort if it is not wider than word_mode. */
4091
4092 static int
4093 verify_wide_reg_1 (px, pregno)
4094 rtx *px;
4095 void *pregno;
4096 {
4097 rtx x = *px;
4098 unsigned int regno = *(int *) pregno;
4099
4100 if (GET_CODE (x) == REG && REGNO (x) == regno)
4101 {
4102 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4103 abort ();
4104 return 1;
4105 }
4106 return 0;
4107 }
4108
4109 /* A subroutine of verify_local_live_at_start. Search through insns
4110 between HEAD and END looking for register REGNO. */
4111
4112 static void
4113 verify_wide_reg (regno, head, end)
4114 int regno;
4115 rtx head, end;
4116 {
4117 while (1)
4118 {
4119 if (INSN_P (head)
4120 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
4121 return;
4122 if (head == end)
4123 break;
4124 head = NEXT_INSN (head);
4125 }
4126
4127 /* We didn't find the register at all. Something's way screwy. */
4128 if (rtl_dump_file)
4129 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4130 print_rtl_and_abort ();
4131 }
4132
4133 /* A subroutine of update_life_info. Verify that there are no untoward
4134 changes in live_at_start during a local update. */
4135
4136 static void
4137 verify_local_live_at_start (new_live_at_start, bb)
4138 regset new_live_at_start;
4139 basic_block bb;
4140 {
4141 if (reload_completed)
4142 {
4143 /* After reload, there are no pseudos, nor subregs of multi-word
4144 registers. The regsets should exactly match. */
4145 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4146 {
4147 if (rtl_dump_file)
4148 {
4149 fprintf (rtl_dump_file,
4150 "live_at_start mismatch in bb %d, aborting\n",
4151 bb->index);
4152 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4153 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4154 }
4155 print_rtl_and_abort ();
4156 }
4157 }
4158 else
4159 {
4160 int i;
4161
4162 /* Find the set of changed registers. */
4163 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4164
4165 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4166 {
4167 /* No registers should die. */
4168 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4169 {
4170 if (rtl_dump_file)
4171 fprintf (rtl_dump_file,
4172 "Register %d died unexpectedly in block %d\n", i,
4173 bb->index);
4174 print_rtl_and_abort ();
4175 }
4176
4177 /* Verify that the now-live register is wider than word_mode. */
4178 verify_wide_reg (i, bb->head, bb->end);
4179 });
4180 }
4181 }
4182
4183 /* Updates life information starting with the basic blocks set in BLOCKS.
4184 If BLOCKS is null, consider it to be the universal set.
4185
4186 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4187 we are only expecting local modifications to basic blocks. If we find
4188 extra registers live at the beginning of a block, then we either killed
4189 useful data, or we have a broken split that wants data not provided.
4190 If we find registers removed from live_at_start, that means we have
4191 a broken peephole that is killing a register it shouldn't.
4192
4193 ??? This is not true in one situation -- when a pre-reload splitter
4194 generates subregs of a multi-word pseudo, current life analysis will
4195 lose the kill. So we _can_ have a pseudo go live. How irritating.
4196
4197 Including PROP_REG_INFO does not properly refresh regs_ever_live
4198 unless the caller resets it to zero. */
4199
4200 void
4201 update_life_info (blocks, extent, prop_flags)
4202 sbitmap blocks;
4203 enum update_life_extent extent;
4204 int prop_flags;
4205 {
4206 regset tmp;
4207 regset_head tmp_head;
4208 int i;
4209
4210 tmp = INITIALIZE_REG_SET (tmp_head);
4211
4212 /* For a global update, we go through the relaxation process again. */
4213 if (extent != UPDATE_LIFE_LOCAL)
4214 {
4215 calculate_global_regs_live (blocks, blocks,
4216 prop_flags & PROP_SCAN_DEAD_CODE);
4217
4218 /* If asked, remove notes from the blocks we'll update. */
4219 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4220 count_or_remove_death_notes (blocks, 1);
4221 }
4222
4223 if (blocks)
4224 {
4225 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4226 {
4227 basic_block bb = BASIC_BLOCK (i);
4228
4229 COPY_REG_SET (tmp, bb->global_live_at_end);
4230 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4231
4232 if (extent == UPDATE_LIFE_LOCAL)
4233 verify_local_live_at_start (tmp, bb);
4234 });
4235 }
4236 else
4237 {
4238 for (i = n_basic_blocks - 1; i >= 0; --i)
4239 {
4240 basic_block bb = BASIC_BLOCK (i);
4241
4242 COPY_REG_SET (tmp, bb->global_live_at_end);
4243 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4244
4245 if (extent == UPDATE_LIFE_LOCAL)
4246 verify_local_live_at_start (tmp, bb);
4247 }
4248 }
4249
4250 FREE_REG_SET (tmp);
4251
4252 if (prop_flags & PROP_REG_INFO)
4253 {
4254 /* The only pseudos that are live at the beginning of the function
4255 are those that were not set anywhere in the function. local-alloc
4256 doesn't know how to handle these correctly, so mark them as not
4257 local to any one basic block. */
4258 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4259 FIRST_PSEUDO_REGISTER, i,
4260 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4261
4262 /* We have a problem with any pseudoreg that lives across the setjmp.
4263 ANSI says that if a user variable does not change in value between
4264 the setjmp and the longjmp, then the longjmp preserves it. This
4265 includes longjmp from a place where the pseudo appears dead.
4266 (In principle, the value still exists if it is in scope.)
4267 If the pseudo goes in a hard reg, some other value may occupy
4268 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4269 Conclusion: such a pseudo must not go in a hard reg. */
4270 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4271 FIRST_PSEUDO_REGISTER, i,
4272 {
4273 if (regno_reg_rtx[i] != 0)
4274 {
4275 REG_LIVE_LENGTH (i) = -1;
4276 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4277 }
4278 });
4279 }
4280 }
4281
4282 /* Free the variables allocated by find_basic_blocks.
4283
4284 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4285
4286 void
4287 free_basic_block_vars (keep_head_end_p)
4288 int keep_head_end_p;
4289 {
4290 if (basic_block_for_insn)
4291 {
4292 VARRAY_FREE (basic_block_for_insn);
4293 basic_block_for_insn = NULL;
4294 }
4295
4296 if (! keep_head_end_p)
4297 {
4298 if (basic_block_info)
4299 {
4300 clear_edges ();
4301 VARRAY_FREE (basic_block_info);
4302 }
4303 n_basic_blocks = 0;
4304
4305 ENTRY_BLOCK_PTR->aux = NULL;
4306 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4307 EXIT_BLOCK_PTR->aux = NULL;
4308 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4309 }
4310 }
4311
4312 /* Delete any insns that copy a register to itself. */
4313
4314 void
4315 delete_noop_moves (f)
4316 rtx f ATTRIBUTE_UNUSED;
4317 {
4318 int i;
4319 rtx insn, next;
4320 basic_block bb;
4321
4322 for (i = 0; i < n_basic_blocks; i++)
4323 {
4324 bb = BASIC_BLOCK (i);
4325 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
4326 {
4327 next = NEXT_INSN (insn);
4328 if (INSN_P (insn) && noop_move_p (insn))
4329 {
4330 /* Do not call flow_delete_insn here to not confuse backward
4331 pointers of LIBCALL block. */
4332 PUT_CODE (insn, NOTE);
4333 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4334 NOTE_SOURCE_FILE (insn) = 0;
4335 }
4336 }
4337 }
4338 }
4339
4340 /* Delete any jump tables never referenced. We can't delete them at the
4341 time of removing tablejump insn as they are referenced by the preceeding
4342 insns computing the destination, so we delay deleting and garbagecollect
4343 them once life information is computed. */
4344 static void
4345 delete_dead_jumptables ()
4346 {
4347 rtx insn, next;
4348 for (insn = get_insns (); insn; insn = next)
4349 {
4350 next = NEXT_INSN (insn);
4351 if (GET_CODE (insn) == CODE_LABEL
4352 && LABEL_NUSES (insn) == 0
4353 && GET_CODE (next) == JUMP_INSN
4354 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4355 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4356 {
4357 if (rtl_dump_file)
4358 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
4359 flow_delete_insn (NEXT_INSN (insn));
4360 flow_delete_insn (insn);
4361 next = NEXT_INSN (next);
4362 }
4363 }
4364 }
4365
4366 /* Determine if the stack pointer is constant over the life of the function.
4367 Only useful before prologues have been emitted. */
4368
4369 static void
4370 notice_stack_pointer_modification_1 (x, pat, data)
4371 rtx x;
4372 rtx pat ATTRIBUTE_UNUSED;
4373 void *data ATTRIBUTE_UNUSED;
4374 {
4375 if (x == stack_pointer_rtx
4376 /* The stack pointer is only modified indirectly as the result
4377 of a push until later in flow. See the comments in rtl.texi
4378 regarding Embedded Side-Effects on Addresses. */
4379 || (GET_CODE (x) == MEM
4380 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4381 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4382 current_function_sp_is_unchanging = 0;
4383 }
4384
4385 static void
4386 notice_stack_pointer_modification (f)
4387 rtx f;
4388 {
4389 rtx insn;
4390
4391 /* Assume that the stack pointer is unchanging if alloca hasn't
4392 been used. */
4393 current_function_sp_is_unchanging = !current_function_calls_alloca;
4394 if (! current_function_sp_is_unchanging)
4395 return;
4396
4397 for (insn = f; insn; insn = NEXT_INSN (insn))
4398 {
4399 if (INSN_P (insn))
4400 {
4401 /* Check if insn modifies the stack pointer. */
4402 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4403 NULL);
4404 if (! current_function_sp_is_unchanging)
4405 return;
4406 }
4407 }
4408 }
4409
4410 /* Mark a register in SET. Hard registers in large modes get all
4411 of their component registers set as well. */
4412
4413 static void
4414 mark_reg (reg, xset)
4415 rtx reg;
4416 void *xset;
4417 {
4418 regset set = (regset) xset;
4419 int regno = REGNO (reg);
4420
4421 if (GET_MODE (reg) == BLKmode)
4422 abort ();
4423
4424 SET_REGNO_REG_SET (set, regno);
4425 if (regno < FIRST_PSEUDO_REGISTER)
4426 {
4427 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4428 while (--n > 0)
4429 SET_REGNO_REG_SET (set, regno + n);
4430 }
4431 }
4432
4433 /* Mark those regs which are needed at the end of the function as live
4434 at the end of the last basic block. */
4435
4436 static void
4437 mark_regs_live_at_end (set)
4438 regset set;
4439 {
4440 unsigned int i;
4441
4442 /* If exiting needs the right stack value, consider the stack pointer
4443 live at the end of the function. */
4444 if ((HAVE_epilogue && reload_completed)
4445 || ! EXIT_IGNORE_STACK
4446 || (! FRAME_POINTER_REQUIRED
4447 && ! current_function_calls_alloca
4448 && flag_omit_frame_pointer)
4449 || current_function_sp_is_unchanging)
4450 {
4451 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4452 }
4453
4454 /* Mark the frame pointer if needed at the end of the function. If
4455 we end up eliminating it, it will be removed from the live list
4456 of each basic block by reload. */
4457
4458 if (! reload_completed || frame_pointer_needed)
4459 {
4460 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4461 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4462 /* If they are different, also mark the hard frame pointer as live. */
4463 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4464 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4465 #endif
4466 }
4467
4468 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4469 /* Many architectures have a GP register even without flag_pic.
4470 Assume the pic register is not in use, or will be handled by
4471 other means, if it is not fixed. */
4472 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4473 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4474 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4475 #endif
4476
4477 /* Mark all global registers, and all registers used by the epilogue
4478 as being live at the end of the function since they may be
4479 referenced by our caller. */
4480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4481 if (global_regs[i] || EPILOGUE_USES (i))
4482 SET_REGNO_REG_SET (set, i);
4483
4484 if (HAVE_epilogue && reload_completed)
4485 {
4486 /* Mark all call-saved registers that we actually used. */
4487 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4488 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4489 SET_REGNO_REG_SET (set, i);
4490 }
4491
4492 #ifdef EH_RETURN_DATA_REGNO
4493 /* Mark the registers that will contain data for the handler. */
4494 if (reload_completed && current_function_calls_eh_return)
4495 for (i = 0; ; ++i)
4496 {
4497 unsigned regno = EH_RETURN_DATA_REGNO(i);
4498 if (regno == INVALID_REGNUM)
4499 break;
4500 SET_REGNO_REG_SET (set, regno);
4501 }
4502 #endif
4503 #ifdef EH_RETURN_STACKADJ_RTX
4504 if ((! HAVE_epilogue || ! reload_completed)
4505 && current_function_calls_eh_return)
4506 {
4507 rtx tmp = EH_RETURN_STACKADJ_RTX;
4508 if (tmp && REG_P (tmp))
4509 mark_reg (tmp, set);
4510 }
4511 #endif
4512 #ifdef EH_RETURN_HANDLER_RTX
4513 if ((! HAVE_epilogue || ! reload_completed)
4514 && current_function_calls_eh_return)
4515 {
4516 rtx tmp = EH_RETURN_HANDLER_RTX;
4517 if (tmp && REG_P (tmp))
4518 mark_reg (tmp, set);
4519 }
4520 #endif
4521
4522 /* Mark function return value. */
4523 diddle_return_value (mark_reg, set);
4524 }
4525
4526 /* Callback function for for_each_successor_phi. DATA is a regset.
4527 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4528 INSN, in the regset. */
4529
4530 static int
4531 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4532 rtx insn ATTRIBUTE_UNUSED;
4533 int dest_regno ATTRIBUTE_UNUSED;
4534 int src_regno;
4535 void *data;
4536 {
4537 regset live = (regset) data;
4538 SET_REGNO_REG_SET (live, src_regno);
4539 return 0;
4540 }
4541
4542 /* Propagate global life info around the graph of basic blocks. Begin
4543 considering blocks with their corresponding bit set in BLOCKS_IN.
4544 If BLOCKS_IN is null, consider it the universal set.
4545
4546 BLOCKS_OUT is set for every block that was changed. */
4547
4548 static void
4549 calculate_global_regs_live (blocks_in, blocks_out, flags)
4550 sbitmap blocks_in, blocks_out;
4551 int flags;
4552 {
4553 basic_block *queue, *qhead, *qtail, *qend;
4554 regset tmp, new_live_at_end, call_used;
4555 regset_head tmp_head, call_used_head;
4556 regset_head new_live_at_end_head;
4557 int i;
4558
4559 tmp = INITIALIZE_REG_SET (tmp_head);
4560 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4561 call_used = INITIALIZE_REG_SET (call_used_head);
4562
4563 /* Inconveniently, this is only redily available in hard reg set form. */
4564 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4565 if (call_used_regs[i])
4566 SET_REGNO_REG_SET (call_used, i);
4567
4568 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4569 because the `head == tail' style test for an empty queue doesn't
4570 work with a full queue. */
4571 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4572 qtail = queue;
4573 qhead = qend = queue + n_basic_blocks + 2;
4574
4575 /* Queue the blocks set in the initial mask. Do this in reverse block
4576 number order so that we are more likely for the first round to do
4577 useful work. We use AUX non-null to flag that the block is queued. */
4578 if (blocks_in)
4579 {
4580 /* Clear out the garbage that might be hanging out in bb->aux. */
4581 for (i = n_basic_blocks - 1; i >= 0; --i)
4582 BASIC_BLOCK (i)->aux = NULL;
4583
4584 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4585 {
4586 basic_block bb = BASIC_BLOCK (i);
4587 *--qhead = bb;
4588 bb->aux = bb;
4589 });
4590 }
4591 else
4592 {
4593 for (i = 0; i < n_basic_blocks; ++i)
4594 {
4595 basic_block bb = BASIC_BLOCK (i);
4596 *--qhead = bb;
4597 bb->aux = bb;
4598 }
4599 }
4600
4601 if (blocks_out)
4602 sbitmap_zero (blocks_out);
4603
4604 /* We work through the queue until there are no more blocks. What
4605 is live at the end of this block is precisely the union of what
4606 is live at the beginning of all its successors. So, we set its
4607 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4608 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4609 this block by walking through the instructions in this block in
4610 reverse order and updating as we go. If that changed
4611 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4612 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4613
4614 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4615 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4616 must either be live at the end of the block, or used within the
4617 block. In the latter case, it will certainly never disappear
4618 from GLOBAL_LIVE_AT_START. In the former case, the register
4619 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4620 for one of the successor blocks. By induction, that cannot
4621 occur. */
4622 while (qhead != qtail)
4623 {
4624 int rescan, changed;
4625 basic_block bb;
4626 edge e;
4627
4628 bb = *qhead++;
4629 if (qhead == qend)
4630 qhead = queue;
4631 bb->aux = NULL;
4632
4633 /* Begin by propagating live_at_start from the successor blocks. */
4634 CLEAR_REG_SET (new_live_at_end);
4635 for (e = bb->succ; e; e = e->succ_next)
4636 {
4637 basic_block sb = e->dest;
4638
4639 /* Call-clobbered registers die across exception and call edges. */
4640 /* ??? Abnormal call edges ignored for the moment, as this gets
4641 confused by sibling call edges, which crashes reg-stack. */
4642 if (e->flags & EDGE_EH)
4643 {
4644 bitmap_operation (tmp, sb->global_live_at_start,
4645 call_used, BITMAP_AND_COMPL);
4646 IOR_REG_SET (new_live_at_end, tmp);
4647 }
4648 else
4649 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4650 }
4651
4652 /* The all-important stack pointer must always be live. */
4653 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4654
4655 /* Before reload, there are a few registers that must be forced
4656 live everywhere -- which might not already be the case for
4657 blocks within infinite loops. */
4658 if (! reload_completed)
4659 {
4660 /* Any reference to any pseudo before reload is a potential
4661 reference of the frame pointer. */
4662 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4663
4664 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4665 /* Pseudos with argument area equivalences may require
4666 reloading via the argument pointer. */
4667 if (fixed_regs[ARG_POINTER_REGNUM])
4668 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4669 #endif
4670
4671 /* Any constant, or pseudo with constant equivalences, may
4672 require reloading from memory using the pic register. */
4673 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4674 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4675 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4676 }
4677
4678 /* Regs used in phi nodes are not included in
4679 global_live_at_start, since they are live only along a
4680 particular edge. Set those regs that are live because of a
4681 phi node alternative corresponding to this particular block. */
4682 if (in_ssa_form)
4683 for_each_successor_phi (bb, &set_phi_alternative_reg,
4684 new_live_at_end);
4685
4686 if (bb == ENTRY_BLOCK_PTR)
4687 {
4688 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4689 continue;
4690 }
4691
4692 /* On our first pass through this block, we'll go ahead and continue.
4693 Recognize first pass by local_set NULL. On subsequent passes, we
4694 get to skip out early if live_at_end wouldn't have changed. */
4695
4696 if (bb->local_set == NULL)
4697 {
4698 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4699 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4700 rescan = 1;
4701 }
4702 else
4703 {
4704 /* If any bits were removed from live_at_end, we'll have to
4705 rescan the block. This wouldn't be necessary if we had
4706 precalculated local_live, however with PROP_SCAN_DEAD_CODE
4707 local_live is really dependent on live_at_end. */
4708 CLEAR_REG_SET (tmp);
4709 rescan = bitmap_operation (tmp, bb->global_live_at_end,
4710 new_live_at_end, BITMAP_AND_COMPL);
4711
4712 if (! rescan)
4713 {
4714 /* If any of the registers in the new live_at_end set are
4715 conditionally set in this basic block, we must rescan.
4716 This is because conditional lifetimes at the end of the
4717 block do not just take the live_at_end set into account,
4718 but also the liveness at the start of each successor
4719 block. We can miss changes in those sets if we only
4720 compare the new live_at_end against the previous one. */
4721 CLEAR_REG_SET (tmp);
4722 rescan = bitmap_operation (tmp, new_live_at_end,
4723 bb->cond_local_set, BITMAP_AND);
4724 }
4725
4726 if (! rescan)
4727 {
4728 /* Find the set of changed bits. Take this opportunity
4729 to notice that this set is empty and early out. */
4730 CLEAR_REG_SET (tmp);
4731 changed = bitmap_operation (tmp, bb->global_live_at_end,
4732 new_live_at_end, BITMAP_XOR);
4733 if (! changed)
4734 continue;
4735
4736 /* If any of the changed bits overlap with local_set,
4737 we'll have to rescan the block. Detect overlap by
4738 the AND with ~local_set turning off bits. */
4739 rescan = bitmap_operation (tmp, tmp, bb->local_set,
4740 BITMAP_AND_COMPL);
4741 }
4742 }
4743
4744 /* Let our caller know that BB changed enough to require its
4745 death notes updated. */
4746 if (blocks_out)
4747 SET_BIT (blocks_out, bb->index);
4748
4749 if (! rescan)
4750 {
4751 /* Add to live_at_start the set of all registers in
4752 new_live_at_end that aren't in the old live_at_end. */
4753
4754 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
4755 BITMAP_AND_COMPL);
4756 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4757
4758 changed = bitmap_operation (bb->global_live_at_start,
4759 bb->global_live_at_start,
4760 tmp, BITMAP_IOR);
4761 if (! changed)
4762 continue;
4763 }
4764 else
4765 {
4766 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4767
4768 /* Rescan the block insn by insn to turn (a copy of) live_at_end
4769 into live_at_start. */
4770 propagate_block (bb, new_live_at_end, bb->local_set,
4771 bb->cond_local_set, flags);
4772
4773 /* If live_at start didn't change, no need to go farther. */
4774 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
4775 continue;
4776
4777 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
4778 }
4779
4780 /* Queue all predecessors of BB so that we may re-examine
4781 their live_at_end. */
4782 for (e = bb->pred; e; e = e->pred_next)
4783 {
4784 basic_block pb = e->src;
4785 if (pb->aux == NULL)
4786 {
4787 *qtail++ = pb;
4788 if (qtail == qend)
4789 qtail = queue;
4790 pb->aux = pb;
4791 }
4792 }
4793 }
4794
4795 FREE_REG_SET (tmp);
4796 FREE_REG_SET (new_live_at_end);
4797 FREE_REG_SET (call_used);
4798
4799 if (blocks_out)
4800 {
4801 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
4802 {
4803 basic_block bb = BASIC_BLOCK (i);
4804 FREE_REG_SET (bb->local_set);
4805 FREE_REG_SET (bb->cond_local_set);
4806 });
4807 }
4808 else
4809 {
4810 for (i = n_basic_blocks - 1; i >= 0; --i)
4811 {
4812 basic_block bb = BASIC_BLOCK (i);
4813 FREE_REG_SET (bb->local_set);
4814 FREE_REG_SET (bb->cond_local_set);
4815 }
4816 }
4817
4818 free (queue);
4819 }
4820 \f
4821 /* Subroutines of life analysis. */
4822
4823 /* Allocate the permanent data structures that represent the results
4824 of life analysis. Not static since used also for stupid life analysis. */
4825
4826 void
4827 allocate_bb_life_data ()
4828 {
4829 register int i;
4830
4831 for (i = 0; i < n_basic_blocks; i++)
4832 {
4833 basic_block bb = BASIC_BLOCK (i);
4834
4835 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4836 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4837 }
4838
4839 ENTRY_BLOCK_PTR->global_live_at_end
4840 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4841 EXIT_BLOCK_PTR->global_live_at_start
4842 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4843
4844 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4845 }
4846
4847 void
4848 allocate_reg_life_data ()
4849 {
4850 int i;
4851
4852 max_regno = max_reg_num ();
4853
4854 /* Recalculate the register space, in case it has grown. Old style
4855 vector oriented regsets would set regset_{size,bytes} here also. */
4856 allocate_reg_info (max_regno, FALSE, FALSE);
4857
4858 /* Reset all the data we'll collect in propagate_block and its
4859 subroutines. */
4860 for (i = 0; i < max_regno; i++)
4861 {
4862 REG_N_SETS (i) = 0;
4863 REG_N_REFS (i) = 0;
4864 REG_N_DEATHS (i) = 0;
4865 REG_N_CALLS_CROSSED (i) = 0;
4866 REG_LIVE_LENGTH (i) = 0;
4867 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4868 }
4869 }
4870
4871 /* Delete dead instructions for propagate_block. */
4872
4873 static void
4874 propagate_block_delete_insn (bb, insn)
4875 basic_block bb;
4876 rtx insn;
4877 {
4878 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4879
4880 /* If the insn referred to a label, and that label was attached to
4881 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4882 pretty much mandatory to delete it, because the ADDR_VEC may be
4883 referencing labels that no longer exist.
4884
4885 INSN may reference a deleted label, particularly when a jump
4886 table has been optimized into a direct jump. There's no
4887 real good way to fix up the reference to the deleted label
4888 when the label is deleted, so we just allow it here.
4889
4890 After dead code elimination is complete, we do search for
4891 any REG_LABEL notes which reference deleted labels as a
4892 sanity check. */
4893
4894 if (inote && GET_CODE (inote) == CODE_LABEL)
4895 {
4896 rtx label = XEXP (inote, 0);
4897 rtx next;
4898
4899 /* The label may be forced if it has been put in the constant
4900 pool. If that is the only use we must discard the table
4901 jump following it, but not the label itself. */
4902 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4903 && (next = next_nonnote_insn (label)) != NULL
4904 && GET_CODE (next) == JUMP_INSN
4905 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4906 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4907 {
4908 rtx pat = PATTERN (next);
4909 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4910 int len = XVECLEN (pat, diff_vec_p);
4911 int i;
4912
4913 for (i = 0; i < len; i++)
4914 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4915
4916 flow_delete_insn (next);
4917 }
4918 }
4919
4920 if (bb->end == insn)
4921 bb->end = PREV_INSN (insn);
4922 flow_delete_insn (insn);
4923 }
4924
4925 /* Delete dead libcalls for propagate_block. Return the insn
4926 before the libcall. */
4927
4928 static rtx
4929 propagate_block_delete_libcall (bb, insn, note)
4930 basic_block bb;
4931 rtx insn, note;
4932 {
4933 rtx first = XEXP (note, 0);
4934 rtx before = PREV_INSN (first);
4935
4936 if (insn == bb->end)
4937 bb->end = before;
4938
4939 flow_delete_insn_chain (first, insn);
4940 return before;
4941 }
4942
4943 /* Update the life-status of regs for one insn. Return the previous insn. */
4944
4945 rtx
4946 propagate_one_insn (pbi, insn)
4947 struct propagate_block_info *pbi;
4948 rtx insn;
4949 {
4950 rtx prev = PREV_INSN (insn);
4951 int flags = pbi->flags;
4952 int insn_is_dead = 0;
4953 int libcall_is_dead = 0;
4954 rtx note;
4955 int i;
4956
4957 if (! INSN_P (insn))
4958 return prev;
4959
4960 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4961 if (flags & PROP_SCAN_DEAD_CODE)
4962 {
4963 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4964 libcall_is_dead = (insn_is_dead && note != 0
4965 && libcall_dead_p (pbi, note, insn));
4966 }
4967
4968 /* If an instruction consists of just dead store(s) on final pass,
4969 delete it. */
4970 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4971 {
4972 /* If we're trying to delete a prologue or epilogue instruction
4973 that isn't flagged as possibly being dead, something is wrong.
4974 But if we are keeping the stack pointer depressed, we might well
4975 be deleting insns that are used to compute the amount to update
4976 it by, so they are fine. */
4977 if (reload_completed
4978 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4979 && (TYPE_RETURNS_STACK_DEPRESSED
4980 (TREE_TYPE (current_function_decl))))
4981 && (((HAVE_epilogue || HAVE_prologue)
4982 && prologue_epilogue_contains (insn))
4983 || (HAVE_sibcall_epilogue
4984 && sibcall_epilogue_contains (insn)))
4985 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4986 abort ();
4987
4988 /* Record sets. Do this even for dead instructions, since they
4989 would have killed the values if they hadn't been deleted. */
4990 mark_set_regs (pbi, PATTERN (insn), insn);
4991
4992 /* CC0 is now known to be dead. Either this insn used it,
4993 in which case it doesn't anymore, or clobbered it,
4994 so the next insn can't use it. */
4995 pbi->cc0_live = 0;
4996
4997 if (libcall_is_dead)
4998 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4999 else
5000 propagate_block_delete_insn (pbi->bb, insn);
5001
5002 return prev;
5003 }
5004
5005 /* See if this is an increment or decrement that can be merged into
5006 a following memory address. */
5007 #ifdef AUTO_INC_DEC
5008 {
5009 register rtx x = single_set (insn);
5010
5011 /* Does this instruction increment or decrement a register? */
5012 if ((flags & PROP_AUTOINC)
5013 && x != 0
5014 && GET_CODE (SET_DEST (x)) == REG
5015 && (GET_CODE (SET_SRC (x)) == PLUS
5016 || GET_CODE (SET_SRC (x)) == MINUS)
5017 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
5018 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5019 /* Ok, look for a following memory ref we can combine with.
5020 If one is found, change the memory ref to a PRE_INC
5021 or PRE_DEC, cancel this insn, and return 1.
5022 Return 0 if nothing has been done. */
5023 && try_pre_increment_1 (pbi, insn))
5024 return prev;
5025 }
5026 #endif /* AUTO_INC_DEC */
5027
5028 CLEAR_REG_SET (pbi->new_set);
5029
5030 /* If this is not the final pass, and this insn is copying the value of
5031 a library call and it's dead, don't scan the insns that perform the
5032 library call, so that the call's arguments are not marked live. */
5033 if (libcall_is_dead)
5034 {
5035 /* Record the death of the dest reg. */
5036 mark_set_regs (pbi, PATTERN (insn), insn);
5037
5038 insn = XEXP (note, 0);
5039 return PREV_INSN (insn);
5040 }
5041 else if (GET_CODE (PATTERN (insn)) == SET
5042 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
5043 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
5044 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
5045 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
5046 /* We have an insn to pop a constant amount off the stack.
5047 (Such insns use PLUS regardless of the direction of the stack,
5048 and any insn to adjust the stack by a constant is always a pop.)
5049 These insns, if not dead stores, have no effect on life. */
5050 ;
5051 else
5052 {
5053 /* Any regs live at the time of a call instruction must not go
5054 in a register clobbered by calls. Find all regs now live and
5055 record this for them. */
5056
5057 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
5058 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5059 { REG_N_CALLS_CROSSED (i)++; });
5060
5061 /* Record sets. Do this even for dead instructions, since they
5062 would have killed the values if they hadn't been deleted. */
5063 mark_set_regs (pbi, PATTERN (insn), insn);
5064
5065 if (GET_CODE (insn) == CALL_INSN)
5066 {
5067 register int i;
5068 rtx note, cond;
5069
5070 cond = NULL_RTX;
5071 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5072 cond = COND_EXEC_TEST (PATTERN (insn));
5073
5074 /* Non-constant calls clobber memory. */
5075 if (! CONST_CALL_P (insn))
5076 {
5077 free_EXPR_LIST_list (&pbi->mem_set_list);
5078 pbi->mem_set_list_len = 0;
5079 }
5080
5081 /* There may be extra registers to be clobbered. */
5082 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5083 note;
5084 note = XEXP (note, 1))
5085 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
5086 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
5087 cond, insn, pbi->flags);
5088
5089 /* Calls change all call-used and global registers. */
5090 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5091 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
5092 {
5093 /* We do not want REG_UNUSED notes for these registers. */
5094 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
5095 cond, insn,
5096 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
5097 }
5098 }
5099
5100 /* If an insn doesn't use CC0, it becomes dead since we assume
5101 that every insn clobbers it. So show it dead here;
5102 mark_used_regs will set it live if it is referenced. */
5103 pbi->cc0_live = 0;
5104
5105 /* Record uses. */
5106 if (! insn_is_dead)
5107 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5108
5109 /* Sometimes we may have inserted something before INSN (such as a move)
5110 when we make an auto-inc. So ensure we will scan those insns. */
5111 #ifdef AUTO_INC_DEC
5112 prev = PREV_INSN (insn);
5113 #endif
5114
5115 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5116 {
5117 register int i;
5118 rtx note, cond;
5119
5120 cond = NULL_RTX;
5121 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5122 cond = COND_EXEC_TEST (PATTERN (insn));
5123
5124 /* Calls use their arguments. */
5125 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5126 note;
5127 note = XEXP (note, 1))
5128 if (GET_CODE (XEXP (note, 0)) == USE)
5129 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5130 cond, insn);
5131
5132 /* The stack ptr is used (honorarily) by a CALL insn. */
5133 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5134
5135 /* Calls may also reference any of the global registers,
5136 so they are made live. */
5137 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5138 if (global_regs[i])
5139 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5140 cond, insn);
5141 }
5142 }
5143
5144 /* On final pass, update counts of how many insns in which each reg
5145 is live. */
5146 if (flags & PROP_REG_INFO)
5147 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5148 { REG_LIVE_LENGTH (i)++; });
5149
5150 return prev;
5151 }
5152
5153 /* Initialize a propagate_block_info struct for public consumption.
5154 Note that the structure itself is opaque to this file, but that
5155 the user can use the regsets provided here. */
5156
5157 struct propagate_block_info *
5158 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5159 basic_block bb;
5160 regset live, local_set, cond_local_set;
5161 int flags;
5162 {
5163 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5164
5165 pbi->bb = bb;
5166 pbi->reg_live = live;
5167 pbi->mem_set_list = NULL_RTX;
5168 pbi->mem_set_list_len = 0;
5169 pbi->local_set = local_set;
5170 pbi->cond_local_set = cond_local_set;
5171 pbi->cc0_live = 0;
5172 pbi->flags = flags;
5173
5174 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5175 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5176 else
5177 pbi->reg_next_use = NULL;
5178
5179 pbi->new_set = BITMAP_XMALLOC ();
5180
5181 #ifdef HAVE_conditional_execution
5182 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5183 free_reg_cond_life_info);
5184 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5185
5186 /* If this block ends in a conditional branch, for each register live
5187 from one side of the branch and not the other, record the register
5188 as conditionally dead. */
5189 if (GET_CODE (bb->end) == JUMP_INSN
5190 && any_condjump_p (bb->end))
5191 {
5192 regset_head diff_head;
5193 regset diff = INITIALIZE_REG_SET (diff_head);
5194 basic_block bb_true, bb_false;
5195 rtx cond_true, cond_false, set_src;
5196 int i;
5197
5198 /* Identify the successor blocks. */
5199 bb_true = bb->succ->dest;
5200 if (bb->succ->succ_next != NULL)
5201 {
5202 bb_false = bb->succ->succ_next->dest;
5203
5204 if (bb->succ->flags & EDGE_FALLTHRU)
5205 {
5206 basic_block t = bb_false;
5207 bb_false = bb_true;
5208 bb_true = t;
5209 }
5210 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5211 abort ();
5212 }
5213 else
5214 {
5215 /* This can happen with a conditional jump to the next insn. */
5216 if (JUMP_LABEL (bb->end) != bb_true->head)
5217 abort ();
5218
5219 /* Simplest way to do nothing. */
5220 bb_false = bb_true;
5221 }
5222
5223 /* Extract the condition from the branch. */
5224 set_src = SET_SRC (pc_set (bb->end));
5225 cond_true = XEXP (set_src, 0);
5226 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5227 GET_MODE (cond_true), XEXP (cond_true, 0),
5228 XEXP (cond_true, 1));
5229 if (GET_CODE (XEXP (set_src, 1)) == PC)
5230 {
5231 rtx t = cond_false;
5232 cond_false = cond_true;
5233 cond_true = t;
5234 }
5235
5236 /* Compute which register lead different lives in the successors. */
5237 if (bitmap_operation (diff, bb_true->global_live_at_start,
5238 bb_false->global_live_at_start, BITMAP_XOR))
5239 {
5240 rtx reg = XEXP (cond_true, 0);
5241
5242 if (GET_CODE (reg) == SUBREG)
5243 reg = SUBREG_REG (reg);
5244
5245 if (GET_CODE (reg) != REG)
5246 abort ();
5247
5248 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5249
5250 /* For each such register, mark it conditionally dead. */
5251 EXECUTE_IF_SET_IN_REG_SET
5252 (diff, 0, i,
5253 {
5254 struct reg_cond_life_info *rcli;
5255 rtx cond;
5256
5257 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5258
5259 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5260 cond = cond_false;
5261 else
5262 cond = cond_true;
5263 rcli->condition = cond;
5264 rcli->stores = const0_rtx;
5265 rcli->orig_condition = cond;
5266
5267 splay_tree_insert (pbi->reg_cond_dead, i,
5268 (splay_tree_value) rcli);
5269 });
5270 }
5271
5272 FREE_REG_SET (diff);
5273 }
5274 #endif
5275
5276 /* If this block has no successors, any stores to the frame that aren't
5277 used later in the block are dead. So make a pass over the block
5278 recording any such that are made and show them dead at the end. We do
5279 a very conservative and simple job here. */
5280 if (optimize
5281 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5282 && (TYPE_RETURNS_STACK_DEPRESSED
5283 (TREE_TYPE (current_function_decl))))
5284 && (flags & PROP_SCAN_DEAD_CODE)
5285 && (bb->succ == NULL
5286 || (bb->succ->succ_next == NULL
5287 && bb->succ->dest == EXIT_BLOCK_PTR
5288 && ! current_function_calls_eh_return)))
5289 {
5290 rtx insn, set;
5291 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5292 if (GET_CODE (insn) == INSN
5293 && (set = single_set (insn))
5294 && GET_CODE (SET_DEST (set)) == MEM)
5295 {
5296 rtx mem = SET_DEST (set);
5297 rtx canon_mem = canon_rtx (mem);
5298
5299 /* This optimization is performed by faking a store to the
5300 memory at the end of the block. This doesn't work for
5301 unchanging memories because multiple stores to unchanging
5302 memory is illegal and alias analysis doesn't consider it. */
5303 if (RTX_UNCHANGING_P (canon_mem))
5304 continue;
5305
5306 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5307 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5308 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5309 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5310 {
5311 #ifdef AUTO_INC_DEC
5312 /* Store a copy of mem, otherwise the address may be scrogged
5313 by find_auto_inc. This matters because insn_dead_p uses
5314 an rtx_equal_p check to determine if two addresses are
5315 the same. This works before find_auto_inc, but fails
5316 after find_auto_inc, causing discrepencies between the
5317 set of live registers calculated during the
5318 calculate_global_regs_live phase and what actually exists
5319 after flow completes, leading to aborts. */
5320 if (flags & PROP_AUTOINC)
5321 mem = shallow_copy_rtx (mem);
5322 #endif
5323 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
5324 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
5325 break;
5326 }
5327 }
5328 }
5329
5330 return pbi;
5331 }
5332
5333 /* Release a propagate_block_info struct. */
5334
5335 void
5336 free_propagate_block_info (pbi)
5337 struct propagate_block_info *pbi;
5338 {
5339 free_EXPR_LIST_list (&pbi->mem_set_list);
5340
5341 BITMAP_XFREE (pbi->new_set);
5342
5343 #ifdef HAVE_conditional_execution
5344 splay_tree_delete (pbi->reg_cond_dead);
5345 BITMAP_XFREE (pbi->reg_cond_reg);
5346 #endif
5347
5348 if (pbi->reg_next_use)
5349 free (pbi->reg_next_use);
5350
5351 free (pbi);
5352 }
5353
5354 /* Compute the registers live at the beginning of a basic block BB from
5355 those live at the end.
5356
5357 When called, REG_LIVE contains those live at the end. On return, it
5358 contains those live at the beginning.
5359
5360 LOCAL_SET, if non-null, will be set with all registers killed
5361 unconditionally by this basic block.
5362 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5363 killed conditionally by this basic block. If there is any unconditional
5364 set of a register, then the corresponding bit will be set in LOCAL_SET
5365 and cleared in COND_LOCAL_SET.
5366 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5367 case, the resulting set will be equal to the union of the two sets that
5368 would otherwise be computed. */
5369
5370 void
5371 propagate_block (bb, live, local_set, cond_local_set, flags)
5372 basic_block bb;
5373 regset live;
5374 regset local_set;
5375 regset cond_local_set;
5376 int flags;
5377 {
5378 struct propagate_block_info *pbi;
5379 rtx insn, prev;
5380
5381 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5382
5383 if (flags & PROP_REG_INFO)
5384 {
5385 register int i;
5386
5387 /* Process the regs live at the end of the block.
5388 Mark them as not local to any one basic block. */
5389 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5390 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5391 }
5392
5393 /* Scan the block an insn at a time from end to beginning. */
5394
5395 for (insn = bb->end;; insn = prev)
5396 {
5397 /* If this is a call to `setjmp' et al, warn if any
5398 non-volatile datum is live. */
5399 if ((flags & PROP_REG_INFO)
5400 && GET_CODE (insn) == NOTE
5401 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5402 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5403
5404 prev = propagate_one_insn (pbi, insn);
5405
5406 if (insn == bb->head)
5407 break;
5408 }
5409
5410 free_propagate_block_info (pbi);
5411 }
5412 \f
5413 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5414 (SET expressions whose destinations are registers dead after the insn).
5415 NEEDED is the regset that says which regs are alive after the insn.
5416
5417 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5418
5419 If X is the entire body of an insn, NOTES contains the reg notes
5420 pertaining to the insn. */
5421
5422 static int
5423 insn_dead_p (pbi, x, call_ok, notes)
5424 struct propagate_block_info *pbi;
5425 rtx x;
5426 int call_ok;
5427 rtx notes ATTRIBUTE_UNUSED;
5428 {
5429 enum rtx_code code = GET_CODE (x);
5430
5431 #ifdef AUTO_INC_DEC
5432 /* If flow is invoked after reload, we must take existing AUTO_INC
5433 expresions into account. */
5434 if (reload_completed)
5435 {
5436 for (; notes; notes = XEXP (notes, 1))
5437 {
5438 if (REG_NOTE_KIND (notes) == REG_INC)
5439 {
5440 int regno = REGNO (XEXP (notes, 0));
5441
5442 /* Don't delete insns to set global regs. */
5443 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5444 || REGNO_REG_SET_P (pbi->reg_live, regno))
5445 return 0;
5446 }
5447 }
5448 }
5449 #endif
5450
5451 /* If setting something that's a reg or part of one,
5452 see if that register's altered value will be live. */
5453
5454 if (code == SET)
5455 {
5456 rtx r = SET_DEST (x);
5457
5458 #ifdef HAVE_cc0
5459 if (GET_CODE (r) == CC0)
5460 return ! pbi->cc0_live;
5461 #endif
5462
5463 /* A SET that is a subroutine call cannot be dead. */
5464 if (GET_CODE (SET_SRC (x)) == CALL)
5465 {
5466 if (! call_ok)
5467 return 0;
5468 }
5469
5470 /* Don't eliminate loads from volatile memory or volatile asms. */
5471 else if (volatile_refs_p (SET_SRC (x)))
5472 return 0;
5473
5474 if (GET_CODE (r) == MEM)
5475 {
5476 rtx temp;
5477
5478 if (MEM_VOLATILE_P (r))
5479 return 0;
5480
5481 /* Walk the set of memory locations we are currently tracking
5482 and see if one is an identical match to this memory location.
5483 If so, this memory write is dead (remember, we're walking
5484 backwards from the end of the block to the start). Since
5485 rtx_equal_p does not check the alias set or flags, we also
5486 must have the potential for them to conflict (anti_dependence). */
5487 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5488 if (anti_dependence (r, XEXP (temp, 0)))
5489 {
5490 rtx mem = XEXP (temp, 0);
5491
5492 if (rtx_equal_p (mem, r))
5493 return 1;
5494 #ifdef AUTO_INC_DEC
5495 /* Check if memory reference matches an auto increment. Only
5496 post increment/decrement or modify are valid. */
5497 if (GET_MODE (mem) == GET_MODE (r)
5498 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5499 || GET_CODE (XEXP (mem, 0)) == POST_INC
5500 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5501 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5502 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5503 return 1;
5504 #endif
5505 }
5506 }
5507 else
5508 {
5509 while (GET_CODE (r) == SUBREG
5510 || GET_CODE (r) == STRICT_LOW_PART
5511 || GET_CODE (r) == ZERO_EXTRACT)
5512 r = XEXP (r, 0);
5513
5514 if (GET_CODE (r) == REG)
5515 {
5516 int regno = REGNO (r);
5517
5518 /* Obvious. */
5519 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5520 return 0;
5521
5522 /* If this is a hard register, verify that subsequent
5523 words are not needed. */
5524 if (regno < FIRST_PSEUDO_REGISTER)
5525 {
5526 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5527
5528 while (--n > 0)
5529 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5530 return 0;
5531 }
5532
5533 /* Don't delete insns to set global regs. */
5534 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5535 return 0;
5536
5537 /* Make sure insns to set the stack pointer aren't deleted. */
5538 if (regno == STACK_POINTER_REGNUM)
5539 return 0;
5540
5541 /* ??? These bits might be redundant with the force live bits
5542 in calculate_global_regs_live. We would delete from
5543 sequential sets; whether this actually affects real code
5544 for anything but the stack pointer I don't know. */
5545 /* Make sure insns to set the frame pointer aren't deleted. */
5546 if (regno == FRAME_POINTER_REGNUM
5547 && (! reload_completed || frame_pointer_needed))
5548 return 0;
5549 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5550 if (regno == HARD_FRAME_POINTER_REGNUM
5551 && (! reload_completed || frame_pointer_needed))
5552 return 0;
5553 #endif
5554
5555 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5556 /* Make sure insns to set arg pointer are never deleted
5557 (if the arg pointer isn't fixed, there will be a USE
5558 for it, so we can treat it normally). */
5559 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5560 return 0;
5561 #endif
5562
5563 /* Otherwise, the set is dead. */
5564 return 1;
5565 }
5566 }
5567 }
5568
5569 /* If performing several activities, insn is dead if each activity
5570 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5571 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5572 worth keeping. */
5573 else if (code == PARALLEL)
5574 {
5575 int i = XVECLEN (x, 0);
5576
5577 for (i--; i >= 0; i--)
5578 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5579 && GET_CODE (XVECEXP (x, 0, i)) != USE
5580 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5581 return 0;
5582
5583 return 1;
5584 }
5585
5586 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5587 is not necessarily true for hard registers. */
5588 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5589 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5590 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5591 return 1;
5592
5593 /* We do not check other CLOBBER or USE here. An insn consisting of just
5594 a CLOBBER or just a USE should not be deleted. */
5595 return 0;
5596 }
5597
5598 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5599 return 1 if the entire library call is dead.
5600 This is true if INSN copies a register (hard or pseudo)
5601 and if the hard return reg of the call insn is dead.
5602 (The caller should have tested the destination of the SET inside
5603 INSN already for death.)
5604
5605 If this insn doesn't just copy a register, then we don't
5606 have an ordinary libcall. In that case, cse could not have
5607 managed to substitute the source for the dest later on,
5608 so we can assume the libcall is dead.
5609
5610 PBI is the block info giving pseudoregs live before this insn.
5611 NOTE is the REG_RETVAL note of the insn. */
5612
5613 static int
5614 libcall_dead_p (pbi, note, insn)
5615 struct propagate_block_info *pbi;
5616 rtx note;
5617 rtx insn;
5618 {
5619 rtx x = single_set (insn);
5620
5621 if (x)
5622 {
5623 register rtx r = SET_SRC (x);
5624 if (GET_CODE (r) == REG)
5625 {
5626 rtx call = XEXP (note, 0);
5627 rtx call_pat;
5628 register int i;
5629
5630 /* Find the call insn. */
5631 while (call != insn && GET_CODE (call) != CALL_INSN)
5632 call = NEXT_INSN (call);
5633
5634 /* If there is none, do nothing special,
5635 since ordinary death handling can understand these insns. */
5636 if (call == insn)
5637 return 0;
5638
5639 /* See if the hard reg holding the value is dead.
5640 If this is a PARALLEL, find the call within it. */
5641 call_pat = PATTERN (call);
5642 if (GET_CODE (call_pat) == PARALLEL)
5643 {
5644 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5645 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5646 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5647 break;
5648
5649 /* This may be a library call that is returning a value
5650 via invisible pointer. Do nothing special, since
5651 ordinary death handling can understand these insns. */
5652 if (i < 0)
5653 return 0;
5654
5655 call_pat = XVECEXP (call_pat, 0, i);
5656 }
5657
5658 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5659 }
5660 }
5661 return 1;
5662 }
5663
5664 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5665 live at function entry. Don't count global register variables, variables
5666 in registers that can be used for function arg passing, or variables in
5667 fixed hard registers. */
5668
5669 int
5670 regno_uninitialized (regno)
5671 int regno;
5672 {
5673 if (n_basic_blocks == 0
5674 || (regno < FIRST_PSEUDO_REGISTER
5675 && (global_regs[regno]
5676 || fixed_regs[regno]
5677 || FUNCTION_ARG_REGNO_P (regno))))
5678 return 0;
5679
5680 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5681 }
5682
5683 /* 1 if register REGNO was alive at a place where `setjmp' was called
5684 and was set more than once or is an argument.
5685 Such regs may be clobbered by `longjmp'. */
5686
5687 int
5688 regno_clobbered_at_setjmp (regno)
5689 int regno;
5690 {
5691 if (n_basic_blocks == 0)
5692 return 0;
5693
5694 return ((REG_N_SETS (regno) > 1
5695 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
5696 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
5697 }
5698 \f
5699 /* INSN references memory, possibly using autoincrement addressing modes.
5700 Find any entries on the mem_set_list that need to be invalidated due
5701 to an address change. */
5702
5703 static void
5704 invalidate_mems_from_autoinc (pbi, insn)
5705 struct propagate_block_info *pbi;
5706 rtx insn;
5707 {
5708 rtx note = REG_NOTES (insn);
5709 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
5710 {
5711 if (REG_NOTE_KIND (note) == REG_INC)
5712 {
5713 rtx temp = pbi->mem_set_list;
5714 rtx prev = NULL_RTX;
5715 rtx next;
5716
5717 while (temp)
5718 {
5719 next = XEXP (temp, 1);
5720 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
5721 {
5722 /* Splice temp out of list. */
5723 if (prev)
5724 XEXP (prev, 1) = next;
5725 else
5726 pbi->mem_set_list = next;
5727 free_EXPR_LIST_node (temp);
5728 pbi->mem_set_list_len--;
5729 }
5730 else
5731 prev = temp;
5732 temp = next;
5733 }
5734 }
5735 }
5736 }
5737
5738 /* EXP is either a MEM or a REG. Remove any dependant entries
5739 from pbi->mem_set_list. */
5740
5741 static void
5742 invalidate_mems_from_set (pbi, exp)
5743 struct propagate_block_info *pbi;
5744 rtx exp;
5745 {
5746 rtx temp = pbi->mem_set_list;
5747 rtx prev = NULL_RTX;
5748 rtx next;
5749
5750 while (temp)
5751 {
5752 next = XEXP (temp, 1);
5753 if ((GET_CODE (exp) == MEM
5754 && output_dependence (XEXP (temp, 0), exp))
5755 || (GET_CODE (exp) == REG
5756 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
5757 {
5758 /* Splice this entry out of the list. */
5759 if (prev)
5760 XEXP (prev, 1) = next;
5761 else
5762 pbi->mem_set_list = next;
5763 free_EXPR_LIST_node (temp);
5764 pbi->mem_set_list_len--;
5765 }
5766 else
5767 prev = temp;
5768 temp = next;
5769 }
5770 }
5771
5772 /* Process the registers that are set within X. Their bits are set to
5773 1 in the regset DEAD, because they are dead prior to this insn.
5774
5775 If INSN is nonzero, it is the insn being processed.
5776
5777 FLAGS is the set of operations to perform. */
5778
5779 static void
5780 mark_set_regs (pbi, x, insn)
5781 struct propagate_block_info *pbi;
5782 rtx x, insn;
5783 {
5784 rtx cond = NULL_RTX;
5785 rtx link;
5786 enum rtx_code code;
5787
5788 if (insn)
5789 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5790 {
5791 if (REG_NOTE_KIND (link) == REG_INC)
5792 mark_set_1 (pbi, SET, XEXP (link, 0),
5793 (GET_CODE (x) == COND_EXEC
5794 ? COND_EXEC_TEST (x) : NULL_RTX),
5795 insn, pbi->flags);
5796 }
5797 retry:
5798 switch (code = GET_CODE (x))
5799 {
5800 case SET:
5801 case CLOBBER:
5802 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
5803 return;
5804
5805 case COND_EXEC:
5806 cond = COND_EXEC_TEST (x);
5807 x = COND_EXEC_CODE (x);
5808 goto retry;
5809
5810 case PARALLEL:
5811 {
5812 register int i;
5813 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5814 {
5815 rtx sub = XVECEXP (x, 0, i);
5816 switch (code = GET_CODE (sub))
5817 {
5818 case COND_EXEC:
5819 if (cond != NULL_RTX)
5820 abort ();
5821
5822 cond = COND_EXEC_TEST (sub);
5823 sub = COND_EXEC_CODE (sub);
5824 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
5825 break;
5826 /* Fall through. */
5827
5828 case SET:
5829 case CLOBBER:
5830 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5831 break;
5832
5833 default:
5834 break;
5835 }
5836 }
5837 break;
5838 }
5839
5840 default:
5841 break;
5842 }
5843 }
5844
5845 /* Process a single set, which appears in INSN. REG (which may not
5846 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5847 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5848 If the set is conditional (because it appear in a COND_EXEC), COND
5849 will be the condition. */
5850
5851 static void
5852 mark_set_1 (pbi, code, reg, cond, insn, flags)
5853 struct propagate_block_info *pbi;
5854 enum rtx_code code;
5855 rtx reg, cond, insn;
5856 int flags;
5857 {
5858 int regno_first = -1, regno_last = -1;
5859 unsigned long not_dead = 0;
5860 int i;
5861
5862 /* Modifying just one hardware register of a multi-reg value or just a
5863 byte field of a register does not mean the value from before this insn
5864 is now dead. Of course, if it was dead after it's unused now. */
5865
5866 switch (GET_CODE (reg))
5867 {
5868 case PARALLEL:
5869 /* Some targets place small structures in registers for return values of
5870 functions. We have to detect this case specially here to get correct
5871 flow information. */
5872 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5873 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5874 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5875 flags);
5876 return;
5877
5878 case ZERO_EXTRACT:
5879 case SIGN_EXTRACT:
5880 case STRICT_LOW_PART:
5881 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5882 do
5883 reg = XEXP (reg, 0);
5884 while (GET_CODE (reg) == SUBREG
5885 || GET_CODE (reg) == ZERO_EXTRACT
5886 || GET_CODE (reg) == SIGN_EXTRACT
5887 || GET_CODE (reg) == STRICT_LOW_PART);
5888 if (GET_CODE (reg) == MEM)
5889 break;
5890 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5891 /* Fall through. */
5892
5893 case REG:
5894 regno_last = regno_first = REGNO (reg);
5895 if (regno_first < FIRST_PSEUDO_REGISTER)
5896 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5897 break;
5898
5899 case SUBREG:
5900 if (GET_CODE (SUBREG_REG (reg)) == REG)
5901 {
5902 enum machine_mode outer_mode = GET_MODE (reg);
5903 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5904
5905 /* Identify the range of registers affected. This is moderately
5906 tricky for hard registers. See alter_subreg. */
5907
5908 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5909 if (regno_first < FIRST_PSEUDO_REGISTER)
5910 {
5911 regno_first += subreg_regno_offset (regno_first, inner_mode,
5912 SUBREG_BYTE (reg),
5913 outer_mode);
5914 regno_last = (regno_first
5915 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5916
5917 /* Since we've just adjusted the register number ranges, make
5918 sure REG matches. Otherwise some_was_live will be clear
5919 when it shouldn't have been, and we'll create incorrect
5920 REG_UNUSED notes. */
5921 reg = gen_rtx_REG (outer_mode, regno_first);
5922 }
5923 else
5924 {
5925 /* If the number of words in the subreg is less than the number
5926 of words in the full register, we have a well-defined partial
5927 set. Otherwise the high bits are undefined.
5928
5929 This is only really applicable to pseudos, since we just took
5930 care of multi-word hard registers. */
5931 if (((GET_MODE_SIZE (outer_mode)
5932 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5933 < ((GET_MODE_SIZE (inner_mode)
5934 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5935 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5936 regno_first);
5937
5938 reg = SUBREG_REG (reg);
5939 }
5940 }
5941 else
5942 reg = SUBREG_REG (reg);
5943 break;
5944
5945 default:
5946 break;
5947 }
5948
5949 /* If this set is a MEM, then it kills any aliased writes.
5950 If this set is a REG, then it kills any MEMs which use the reg. */
5951 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5952 {
5953 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5954 invalidate_mems_from_set (pbi, reg);
5955
5956 /* If the memory reference had embedded side effects (autoincrement
5957 address modes. Then we may need to kill some entries on the
5958 memory set list. */
5959 if (insn && GET_CODE (reg) == MEM)
5960 invalidate_mems_from_autoinc (pbi, insn);
5961
5962 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5963 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5964 /* ??? With more effort we could track conditional memory life. */
5965 && ! cond
5966 /* We do not know the size of a BLKmode store, so we do not track
5967 them for redundant store elimination. */
5968 && GET_MODE (reg) != BLKmode
5969 /* There are no REG_INC notes for SP, so we can't assume we'll see
5970 everything that invalidates it. To be safe, don't eliminate any
5971 stores though SP; none of them should be redundant anyway. */
5972 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5973 {
5974 #ifdef AUTO_INC_DEC
5975 /* Store a copy of mem, otherwise the address may be
5976 scrogged by find_auto_inc. */
5977 if (flags & PROP_AUTOINC)
5978 reg = shallow_copy_rtx (reg);
5979 #endif
5980 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5981 pbi->mem_set_list_len++;
5982 }
5983 }
5984
5985 if (GET_CODE (reg) == REG
5986 && ! (regno_first == FRAME_POINTER_REGNUM
5987 && (! reload_completed || frame_pointer_needed))
5988 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5989 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5990 && (! reload_completed || frame_pointer_needed))
5991 #endif
5992 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5993 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5994 #endif
5995 )
5996 {
5997 int some_was_live = 0, some_was_dead = 0;
5998
5999 for (i = regno_first; i <= regno_last; ++i)
6000 {
6001 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6002 if (pbi->local_set)
6003 {
6004 /* Order of the set operation matters here since both
6005 sets may be the same. */
6006 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
6007 if (cond != NULL_RTX
6008 && ! REGNO_REG_SET_P (pbi->local_set, i))
6009 SET_REGNO_REG_SET (pbi->cond_local_set, i);
6010 else
6011 SET_REGNO_REG_SET (pbi->local_set, i);
6012 }
6013 if (code != CLOBBER)
6014 SET_REGNO_REG_SET (pbi->new_set, i);
6015
6016 some_was_live |= needed_regno;
6017 some_was_dead |= ! needed_regno;
6018 }
6019
6020 #ifdef HAVE_conditional_execution
6021 /* Consider conditional death in deciding that the register needs
6022 a death note. */
6023 if (some_was_live && ! not_dead
6024 /* The stack pointer is never dead. Well, not strictly true,
6025 but it's very difficult to tell from here. Hopefully
6026 combine_stack_adjustments will fix up the most egregious
6027 errors. */
6028 && regno_first != STACK_POINTER_REGNUM)
6029 {
6030 for (i = regno_first; i <= regno_last; ++i)
6031 if (! mark_regno_cond_dead (pbi, i, cond))
6032 not_dead |= ((unsigned long) 1) << (i - regno_first);
6033 }
6034 #endif
6035
6036 /* Additional data to record if this is the final pass. */
6037 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
6038 | PROP_DEATH_NOTES | PROP_AUTOINC))
6039 {
6040 register rtx y;
6041 register int blocknum = pbi->bb->index;
6042
6043 y = NULL_RTX;
6044 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6045 {
6046 y = pbi->reg_next_use[regno_first];
6047
6048 /* The next use is no longer next, since a store intervenes. */
6049 for (i = regno_first; i <= regno_last; ++i)
6050 pbi->reg_next_use[i] = 0;
6051 }
6052
6053 if (flags & PROP_REG_INFO)
6054 {
6055 for (i = regno_first; i <= regno_last; ++i)
6056 {
6057 /* Count (weighted) references, stores, etc. This counts a
6058 register twice if it is modified, but that is correct. */
6059 REG_N_SETS (i) += 1;
6060 REG_N_REFS (i) += 1;
6061 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
6062 ? 1 : pbi->bb->frequency);
6063
6064 /* The insns where a reg is live are normally counted
6065 elsewhere, but we want the count to include the insn
6066 where the reg is set, and the normal counting mechanism
6067 would not count it. */
6068 REG_LIVE_LENGTH (i) += 1;
6069 }
6070
6071 /* If this is a hard reg, record this function uses the reg. */
6072 if (regno_first < FIRST_PSEUDO_REGISTER)
6073 {
6074 for (i = regno_first; i <= regno_last; i++)
6075 regs_ever_live[i] = 1;
6076 }
6077 else
6078 {
6079 /* Keep track of which basic blocks each reg appears in. */
6080 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6081 REG_BASIC_BLOCK (regno_first) = blocknum;
6082 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6083 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6084 }
6085 }
6086
6087 if (! some_was_dead)
6088 {
6089 if (flags & PROP_LOG_LINKS)
6090 {
6091 /* Make a logical link from the next following insn
6092 that uses this register, back to this insn.
6093 The following insns have already been processed.
6094
6095 We don't build a LOG_LINK for hard registers containing
6096 in ASM_OPERANDs. If these registers get replaced,
6097 we might wind up changing the semantics of the insn,
6098 even if reload can make what appear to be valid
6099 assignments later. */
6100 if (y && (BLOCK_NUM (y) == blocknum)
6101 && (regno_first >= FIRST_PSEUDO_REGISTER
6102 || asm_noperands (PATTERN (y)) < 0))
6103 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6104 }
6105 }
6106 else if (not_dead)
6107 ;
6108 else if (! some_was_live)
6109 {
6110 if (flags & PROP_REG_INFO)
6111 REG_N_DEATHS (regno_first) += 1;
6112
6113 if (flags & PROP_DEATH_NOTES)
6114 {
6115 /* Note that dead stores have already been deleted
6116 when possible. If we get here, we have found a
6117 dead store that cannot be eliminated (because the
6118 same insn does something useful). Indicate this
6119 by marking the reg being set as dying here. */
6120 REG_NOTES (insn)
6121 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6122 }
6123 }
6124 else
6125 {
6126 if (flags & PROP_DEATH_NOTES)
6127 {
6128 /* This is a case where we have a multi-word hard register
6129 and some, but not all, of the words of the register are
6130 needed in subsequent insns. Write REG_UNUSED notes
6131 for those parts that were not needed. This case should
6132 be rare. */
6133
6134 for (i = regno_first; i <= regno_last; ++i)
6135 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6136 REG_NOTES (insn)
6137 = alloc_EXPR_LIST (REG_UNUSED,
6138 gen_rtx_REG (reg_raw_mode[i], i),
6139 REG_NOTES (insn));
6140 }
6141 }
6142 }
6143
6144 /* Mark the register as being dead. */
6145 if (some_was_live
6146 /* The stack pointer is never dead. Well, not strictly true,
6147 but it's very difficult to tell from here. Hopefully
6148 combine_stack_adjustments will fix up the most egregious
6149 errors. */
6150 && regno_first != STACK_POINTER_REGNUM)
6151 {
6152 for (i = regno_first; i <= regno_last; ++i)
6153 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6154 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6155 }
6156 }
6157 else if (GET_CODE (reg) == REG)
6158 {
6159 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6160 pbi->reg_next_use[regno_first] = 0;
6161 }
6162
6163 /* If this is the last pass and this is a SCRATCH, show it will be dying
6164 here and count it. */
6165 else if (GET_CODE (reg) == SCRATCH)
6166 {
6167 if (flags & PROP_DEATH_NOTES)
6168 REG_NOTES (insn)
6169 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6170 }
6171 }
6172 \f
6173 #ifdef HAVE_conditional_execution
6174 /* Mark REGNO conditionally dead.
6175 Return true if the register is now unconditionally dead. */
6176
6177 static int
6178 mark_regno_cond_dead (pbi, regno, cond)
6179 struct propagate_block_info *pbi;
6180 int regno;
6181 rtx cond;
6182 {
6183 /* If this is a store to a predicate register, the value of the
6184 predicate is changing, we don't know that the predicate as seen
6185 before is the same as that seen after. Flush all dependent
6186 conditions from reg_cond_dead. This will make all such
6187 conditionally live registers unconditionally live. */
6188 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6189 flush_reg_cond_reg (pbi, regno);
6190
6191 /* If this is an unconditional store, remove any conditional
6192 life that may have existed. */
6193 if (cond == NULL_RTX)
6194 splay_tree_remove (pbi->reg_cond_dead, regno);
6195 else
6196 {
6197 splay_tree_node node;
6198 struct reg_cond_life_info *rcli;
6199 rtx ncond;
6200
6201 /* Otherwise this is a conditional set. Record that fact.
6202 It may have been conditionally used, or there may be a
6203 subsequent set with a complimentary condition. */
6204
6205 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6206 if (node == NULL)
6207 {
6208 /* The register was unconditionally live previously.
6209 Record the current condition as the condition under
6210 which it is dead. */
6211 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6212 rcli->condition = cond;
6213 rcli->stores = cond;
6214 rcli->orig_condition = const0_rtx;
6215 splay_tree_insert (pbi->reg_cond_dead, regno,
6216 (splay_tree_value) rcli);
6217
6218 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6219
6220 /* Not unconditionaly dead. */
6221 return 0;
6222 }
6223 else
6224 {
6225 /* The register was conditionally live previously.
6226 Add the new condition to the old. */
6227 rcli = (struct reg_cond_life_info *) node->value;
6228 ncond = rcli->condition;
6229 ncond = ior_reg_cond (ncond, cond, 1);
6230 if (rcli->stores == const0_rtx)
6231 rcli->stores = cond;
6232 else if (rcli->stores != const1_rtx)
6233 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6234
6235 /* If the register is now unconditionally dead, remove the entry
6236 in the splay_tree. A register is unconditionally dead if the
6237 dead condition ncond is true. A register is also unconditionally
6238 dead if the sum of all conditional stores is an unconditional
6239 store (stores is true), and the dead condition is identically the
6240 same as the original dead condition initialized at the end of
6241 the block. This is a pointer compare, not an rtx_equal_p
6242 compare. */
6243 if (ncond == const1_rtx
6244 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6245 splay_tree_remove (pbi->reg_cond_dead, regno);
6246 else
6247 {
6248 rcli->condition = ncond;
6249
6250 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6251
6252 /* Not unconditionaly dead. */
6253 return 0;
6254 }
6255 }
6256 }
6257
6258 return 1;
6259 }
6260
6261 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6262
6263 static void
6264 free_reg_cond_life_info (value)
6265 splay_tree_value value;
6266 {
6267 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6268 free (rcli);
6269 }
6270
6271 /* Helper function for flush_reg_cond_reg. */
6272
6273 static int
6274 flush_reg_cond_reg_1 (node, data)
6275 splay_tree_node node;
6276 void *data;
6277 {
6278 struct reg_cond_life_info *rcli;
6279 int *xdata = (int *) data;
6280 unsigned int regno = xdata[0];
6281
6282 /* Don't need to search if last flushed value was farther on in
6283 the in-order traversal. */
6284 if (xdata[1] >= (int) node->key)
6285 return 0;
6286
6287 /* Splice out portions of the expression that refer to regno. */
6288 rcli = (struct reg_cond_life_info *) node->value;
6289 rcli->condition = elim_reg_cond (rcli->condition, regno);
6290 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6291 rcli->stores = elim_reg_cond (rcli->stores, regno);
6292
6293 /* If the entire condition is now false, signal the node to be removed. */
6294 if (rcli->condition == const0_rtx)
6295 {
6296 xdata[1] = node->key;
6297 return -1;
6298 }
6299 else if (rcli->condition == const1_rtx)
6300 abort ();
6301
6302 return 0;
6303 }
6304
6305 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6306
6307 static void
6308 flush_reg_cond_reg (pbi, regno)
6309 struct propagate_block_info *pbi;
6310 int regno;
6311 {
6312 int pair[2];
6313
6314 pair[0] = regno;
6315 pair[1] = -1;
6316 while (splay_tree_foreach (pbi->reg_cond_dead,
6317 flush_reg_cond_reg_1, pair) == -1)
6318 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6319
6320 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6321 }
6322
6323 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6324 For ior/and, the ADD flag determines whether we want to add the new
6325 condition X to the old one unconditionally. If it is zero, we will
6326 only return a new expression if X allows us to simplify part of
6327 OLD, otherwise we return OLD unchanged to the caller.
6328 If ADD is nonzero, we will return a new condition in all cases. The
6329 toplevel caller of one of these functions should always pass 1 for
6330 ADD. */
6331
6332 static rtx
6333 ior_reg_cond (old, x, add)
6334 rtx old, x;
6335 int add;
6336 {
6337 rtx op0, op1;
6338
6339 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6340 {
6341 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6342 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6343 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6344 return const1_rtx;
6345 if (GET_CODE (x) == GET_CODE (old)
6346 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6347 return old;
6348 if (! add)
6349 return old;
6350 return gen_rtx_IOR (0, old, x);
6351 }
6352
6353 switch (GET_CODE (old))
6354 {
6355 case IOR:
6356 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6357 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6358 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6359 {
6360 if (op0 == const0_rtx)
6361 return op1;
6362 if (op1 == const0_rtx)
6363 return op0;
6364 if (op0 == const1_rtx || op1 == const1_rtx)
6365 return const1_rtx;
6366 if (op0 == XEXP (old, 0))
6367 op0 = gen_rtx_IOR (0, op0, x);
6368 else
6369 op1 = gen_rtx_IOR (0, op1, x);
6370 return gen_rtx_IOR (0, op0, op1);
6371 }
6372 if (! add)
6373 return old;
6374 return gen_rtx_IOR (0, old, x);
6375
6376 case AND:
6377 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6378 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6379 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6380 {
6381 if (op0 == const1_rtx)
6382 return op1;
6383 if (op1 == const1_rtx)
6384 return op0;
6385 if (op0 == const0_rtx || op1 == const0_rtx)
6386 return const0_rtx;
6387 if (op0 == XEXP (old, 0))
6388 op0 = gen_rtx_IOR (0, op0, x);
6389 else
6390 op1 = gen_rtx_IOR (0, op1, x);
6391 return gen_rtx_AND (0, op0, op1);
6392 }
6393 if (! add)
6394 return old;
6395 return gen_rtx_IOR (0, old, x);
6396
6397 case NOT:
6398 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6399 if (op0 != XEXP (old, 0))
6400 return not_reg_cond (op0);
6401 if (! add)
6402 return old;
6403 return gen_rtx_IOR (0, old, x);
6404
6405 default:
6406 abort ();
6407 }
6408 }
6409
6410 static rtx
6411 not_reg_cond (x)
6412 rtx x;
6413 {
6414 enum rtx_code x_code;
6415
6416 if (x == const0_rtx)
6417 return const1_rtx;
6418 else if (x == const1_rtx)
6419 return const0_rtx;
6420 x_code = GET_CODE (x);
6421 if (x_code == NOT)
6422 return XEXP (x, 0);
6423 if (GET_RTX_CLASS (x_code) == '<'
6424 && GET_CODE (XEXP (x, 0)) == REG)
6425 {
6426 if (XEXP (x, 1) != const0_rtx)
6427 abort ();
6428
6429 return gen_rtx_fmt_ee (reverse_condition (x_code),
6430 VOIDmode, XEXP (x, 0), const0_rtx);
6431 }
6432 return gen_rtx_NOT (0, x);
6433 }
6434
6435 static rtx
6436 and_reg_cond (old, x, add)
6437 rtx old, x;
6438 int add;
6439 {
6440 rtx op0, op1;
6441
6442 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6443 {
6444 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6445 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6446 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6447 return const0_rtx;
6448 if (GET_CODE (x) == GET_CODE (old)
6449 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6450 return old;
6451 if (! add)
6452 return old;
6453 return gen_rtx_AND (0, old, x);
6454 }
6455
6456 switch (GET_CODE (old))
6457 {
6458 case IOR:
6459 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6460 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6461 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6462 {
6463 if (op0 == const0_rtx)
6464 return op1;
6465 if (op1 == const0_rtx)
6466 return op0;
6467 if (op0 == const1_rtx || op1 == const1_rtx)
6468 return const1_rtx;
6469 if (op0 == XEXP (old, 0))
6470 op0 = gen_rtx_AND (0, op0, x);
6471 else
6472 op1 = gen_rtx_AND (0, op1, x);
6473 return gen_rtx_IOR (0, op0, op1);
6474 }
6475 if (! add)
6476 return old;
6477 return gen_rtx_AND (0, old, x);
6478
6479 case AND:
6480 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6481 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6482 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6483 {
6484 if (op0 == const1_rtx)
6485 return op1;
6486 if (op1 == const1_rtx)
6487 return op0;
6488 if (op0 == const0_rtx || op1 == const0_rtx)
6489 return const0_rtx;
6490 if (op0 == XEXP (old, 0))
6491 op0 = gen_rtx_AND (0, op0, x);
6492 else
6493 op1 = gen_rtx_AND (0, op1, x);
6494 return gen_rtx_AND (0, op0, op1);
6495 }
6496 if (! add)
6497 return old;
6498
6499 /* If X is identical to one of the existing terms of the AND,
6500 then just return what we already have. */
6501 /* ??? There really should be some sort of recursive check here in
6502 case there are nested ANDs. */
6503 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6504 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6505 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6506 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6507 return old;
6508
6509 return gen_rtx_AND (0, old, x);
6510
6511 case NOT:
6512 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6513 if (op0 != XEXP (old, 0))
6514 return not_reg_cond (op0);
6515 if (! add)
6516 return old;
6517 return gen_rtx_AND (0, old, x);
6518
6519 default:
6520 abort ();
6521 }
6522 }
6523
6524 /* Given a condition X, remove references to reg REGNO and return the
6525 new condition. The removal will be done so that all conditions
6526 involving REGNO are considered to evaluate to false. This function
6527 is used when the value of REGNO changes. */
6528
6529 static rtx
6530 elim_reg_cond (x, regno)
6531 rtx x;
6532 unsigned int regno;
6533 {
6534 rtx op0, op1;
6535
6536 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6537 {
6538 if (REGNO (XEXP (x, 0)) == regno)
6539 return const0_rtx;
6540 return x;
6541 }
6542
6543 switch (GET_CODE (x))
6544 {
6545 case AND:
6546 op0 = elim_reg_cond (XEXP (x, 0), regno);
6547 op1 = elim_reg_cond (XEXP (x, 1), regno);
6548 if (op0 == const0_rtx || op1 == const0_rtx)
6549 return const0_rtx;
6550 if (op0 == const1_rtx)
6551 return op1;
6552 if (op1 == const1_rtx)
6553 return op0;
6554 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6555 return x;
6556 return gen_rtx_AND (0, op0, op1);
6557
6558 case IOR:
6559 op0 = elim_reg_cond (XEXP (x, 0), regno);
6560 op1 = elim_reg_cond (XEXP (x, 1), regno);
6561 if (op0 == const1_rtx || op1 == const1_rtx)
6562 return const1_rtx;
6563 if (op0 == const0_rtx)
6564 return op1;
6565 if (op1 == const0_rtx)
6566 return op0;
6567 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6568 return x;
6569 return gen_rtx_IOR (0, op0, op1);
6570
6571 case NOT:
6572 op0 = elim_reg_cond (XEXP (x, 0), regno);
6573 if (op0 == const0_rtx)
6574 return const1_rtx;
6575 if (op0 == const1_rtx)
6576 return const0_rtx;
6577 if (op0 != XEXP (x, 0))
6578 return not_reg_cond (op0);
6579 return x;
6580
6581 default:
6582 abort ();
6583 }
6584 }
6585 #endif /* HAVE_conditional_execution */
6586 \f
6587 #ifdef AUTO_INC_DEC
6588
6589 /* Try to substitute the auto-inc expression INC as the address inside
6590 MEM which occurs in INSN. Currently, the address of MEM is an expression
6591 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6592 that has a single set whose source is a PLUS of INCR_REG and something
6593 else. */
6594
6595 static void
6596 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6597 struct propagate_block_info *pbi;
6598 rtx inc, insn, mem, incr, incr_reg;
6599 {
6600 int regno = REGNO (incr_reg);
6601 rtx set = single_set (incr);
6602 rtx q = SET_DEST (set);
6603 rtx y = SET_SRC (set);
6604 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6605
6606 /* Make sure this reg appears only once in this insn. */
6607 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6608 return;
6609
6610 if (dead_or_set_p (incr, incr_reg)
6611 /* Mustn't autoinc an eliminable register. */
6612 && (regno >= FIRST_PSEUDO_REGISTER
6613 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6614 {
6615 /* This is the simple case. Try to make the auto-inc. If
6616 we can't, we are done. Otherwise, we will do any
6617 needed updates below. */
6618 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6619 return;
6620 }
6621 else if (GET_CODE (q) == REG
6622 /* PREV_INSN used here to check the semi-open interval
6623 [insn,incr). */
6624 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6625 /* We must also check for sets of q as q may be
6626 a call clobbered hard register and there may
6627 be a call between PREV_INSN (insn) and incr. */
6628 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6629 {
6630 /* We have *p followed sometime later by q = p+size.
6631 Both p and q must be live afterward,
6632 and q is not used between INSN and its assignment.
6633 Change it to q = p, ...*q..., q = q+size.
6634 Then fall into the usual case. */
6635 rtx insns, temp;
6636
6637 start_sequence ();
6638 emit_move_insn (q, incr_reg);
6639 insns = get_insns ();
6640 end_sequence ();
6641
6642 if (basic_block_for_insn)
6643 for (temp = insns; temp; temp = NEXT_INSN (temp))
6644 set_block_for_insn (temp, pbi->bb);
6645
6646 /* If we can't make the auto-inc, or can't make the
6647 replacement into Y, exit. There's no point in making
6648 the change below if we can't do the auto-inc and doing
6649 so is not correct in the pre-inc case. */
6650
6651 XEXP (inc, 0) = q;
6652 validate_change (insn, &XEXP (mem, 0), inc, 1);
6653 validate_change (incr, &XEXP (y, opnum), q, 1);
6654 if (! apply_change_group ())
6655 return;
6656
6657 /* We now know we'll be doing this change, so emit the
6658 new insn(s) and do the updates. */
6659 emit_insns_before (insns, insn);
6660
6661 if (pbi->bb->head == insn)
6662 pbi->bb->head = insns;
6663
6664 /* INCR will become a NOTE and INSN won't contain a
6665 use of INCR_REG. If a use of INCR_REG was just placed in
6666 the insn before INSN, make that the next use.
6667 Otherwise, invalidate it. */
6668 if (GET_CODE (PREV_INSN (insn)) == INSN
6669 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6670 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6671 pbi->reg_next_use[regno] = PREV_INSN (insn);
6672 else
6673 pbi->reg_next_use[regno] = 0;
6674
6675 incr_reg = q;
6676 regno = REGNO (q);
6677
6678 /* REGNO is now used in INCR which is below INSN, but
6679 it previously wasn't live here. If we don't mark
6680 it as live, we'll put a REG_DEAD note for it
6681 on this insn, which is incorrect. */
6682 SET_REGNO_REG_SET (pbi->reg_live, regno);
6683
6684 /* If there are any calls between INSN and INCR, show
6685 that REGNO now crosses them. */
6686 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
6687 if (GET_CODE (temp) == CALL_INSN)
6688 REG_N_CALLS_CROSSED (regno)++;
6689 }
6690 else
6691 return;
6692
6693 /* If we haven't returned, it means we were able to make the
6694 auto-inc, so update the status. First, record that this insn
6695 has an implicit side effect. */
6696
6697 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
6698
6699 /* Modify the old increment-insn to simply copy
6700 the already-incremented value of our register. */
6701 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
6702 abort ();
6703
6704 /* If that makes it a no-op (copying the register into itself) delete
6705 it so it won't appear to be a "use" and a "set" of this
6706 register. */
6707 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
6708 {
6709 /* If the original source was dead, it's dead now. */
6710 rtx note;
6711
6712 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
6713 {
6714 remove_note (incr, note);
6715 if (XEXP (note, 0) != incr_reg)
6716 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
6717 }
6718
6719 PUT_CODE (incr, NOTE);
6720 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
6721 NOTE_SOURCE_FILE (incr) = 0;
6722 }
6723
6724 if (regno >= FIRST_PSEUDO_REGISTER)
6725 {
6726 /* Count an extra reference to the reg. When a reg is
6727 incremented, spilling it is worse, so we want to make
6728 that less likely. */
6729 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6730 ? 1 : pbi->bb->frequency);
6731
6732 /* Count the increment as a setting of the register,
6733 even though it isn't a SET in rtl. */
6734 REG_N_SETS (regno)++;
6735 }
6736 }
6737
6738 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
6739 reference. */
6740
6741 static void
6742 find_auto_inc (pbi, x, insn)
6743 struct propagate_block_info *pbi;
6744 rtx x;
6745 rtx insn;
6746 {
6747 rtx addr = XEXP (x, 0);
6748 HOST_WIDE_INT offset = 0;
6749 rtx set, y, incr, inc_val;
6750 int regno;
6751 int size = GET_MODE_SIZE (GET_MODE (x));
6752
6753 if (GET_CODE (insn) == JUMP_INSN)
6754 return;
6755
6756 /* Here we detect use of an index register which might be good for
6757 postincrement, postdecrement, preincrement, or predecrement. */
6758
6759 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6760 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
6761
6762 if (GET_CODE (addr) != REG)
6763 return;
6764
6765 regno = REGNO (addr);
6766
6767 /* Is the next use an increment that might make auto-increment? */
6768 incr = pbi->reg_next_use[regno];
6769 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
6770 return;
6771 set = single_set (incr);
6772 if (set == 0 || GET_CODE (set) != SET)
6773 return;
6774 y = SET_SRC (set);
6775
6776 if (GET_CODE (y) != PLUS)
6777 return;
6778
6779 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
6780 inc_val = XEXP (y, 1);
6781 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
6782 inc_val = XEXP (y, 0);
6783 else
6784 return;
6785
6786 if (GET_CODE (inc_val) == CONST_INT)
6787 {
6788 if (HAVE_POST_INCREMENT
6789 && (INTVAL (inc_val) == size && offset == 0))
6790 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
6791 incr, addr);
6792 else if (HAVE_POST_DECREMENT
6793 && (INTVAL (inc_val) == -size && offset == 0))
6794 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
6795 incr, addr);
6796 else if (HAVE_PRE_INCREMENT
6797 && (INTVAL (inc_val) == size && offset == size))
6798 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
6799 incr, addr);
6800 else if (HAVE_PRE_DECREMENT
6801 && (INTVAL (inc_val) == -size && offset == -size))
6802 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
6803 incr, addr);
6804 else if (HAVE_POST_MODIFY_DISP && offset == 0)
6805 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6806 gen_rtx_PLUS (Pmode,
6807 addr,
6808 inc_val)),
6809 insn, x, incr, addr);
6810 }
6811 else if (GET_CODE (inc_val) == REG
6812 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
6813 NEXT_INSN (incr)))
6814
6815 {
6816 if (HAVE_POST_MODIFY_REG && offset == 0)
6817 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6818 gen_rtx_PLUS (Pmode,
6819 addr,
6820 inc_val)),
6821 insn, x, incr, addr);
6822 }
6823 }
6824
6825 #endif /* AUTO_INC_DEC */
6826 \f
6827 static void
6828 mark_used_reg (pbi, reg, cond, insn)
6829 struct propagate_block_info *pbi;
6830 rtx reg;
6831 rtx cond ATTRIBUTE_UNUSED;
6832 rtx insn;
6833 {
6834 unsigned int regno_first, regno_last, i;
6835 int some_was_live, some_was_dead, some_not_set;
6836
6837 regno_last = regno_first = REGNO (reg);
6838 if (regno_first < FIRST_PSEUDO_REGISTER)
6839 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6840
6841 /* Find out if any of this register is live after this instruction. */
6842 some_was_live = some_was_dead = 0;
6843 for (i = regno_first; i <= regno_last; ++i)
6844 {
6845 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6846 some_was_live |= needed_regno;
6847 some_was_dead |= ! needed_regno;
6848 }
6849
6850 /* Find out if any of the register was set this insn. */
6851 some_not_set = 0;
6852 for (i = regno_first; i <= regno_last; ++i)
6853 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6854
6855 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6856 {
6857 /* Record where each reg is used, so when the reg is set we know
6858 the next insn that uses it. */
6859 pbi->reg_next_use[regno_first] = insn;
6860 }
6861
6862 if (pbi->flags & PROP_REG_INFO)
6863 {
6864 if (regno_first < FIRST_PSEUDO_REGISTER)
6865 {
6866 /* If this is a register we are going to try to eliminate,
6867 don't mark it live here. If we are successful in
6868 eliminating it, it need not be live unless it is used for
6869 pseudos, in which case it will have been set live when it
6870 was allocated to the pseudos. If the register will not
6871 be eliminated, reload will set it live at that point.
6872
6873 Otherwise, record that this function uses this register. */
6874 /* ??? The PPC backend tries to "eliminate" on the pic
6875 register to itself. This should be fixed. In the mean
6876 time, hack around it. */
6877
6878 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6879 && (regno_first == FRAME_POINTER_REGNUM
6880 || regno_first == ARG_POINTER_REGNUM)))
6881 for (i = regno_first; i <= regno_last; ++i)
6882 regs_ever_live[i] = 1;
6883 }
6884 else
6885 {
6886 /* Keep track of which basic block each reg appears in. */
6887
6888 register int blocknum = pbi->bb->index;
6889 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6890 REG_BASIC_BLOCK (regno_first) = blocknum;
6891 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6892 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6893
6894 /* Count (weighted) number of uses of each reg. */
6895 REG_FREQ (regno_first)
6896 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6897 REG_N_REFS (regno_first)++;
6898 }
6899 }
6900
6901 /* Record and count the insns in which a reg dies. If it is used in
6902 this insn and was dead below the insn then it dies in this insn.
6903 If it was set in this insn, we do not make a REG_DEAD note;
6904 likewise if we already made such a note. */
6905 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6906 && some_was_dead
6907 && some_not_set)
6908 {
6909 /* Check for the case where the register dying partially
6910 overlaps the register set by this insn. */
6911 if (regno_first != regno_last)
6912 for (i = regno_first; i <= regno_last; ++i)
6913 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6914
6915 /* If none of the words in X is needed, make a REG_DEAD note.
6916 Otherwise, we must make partial REG_DEAD notes. */
6917 if (! some_was_live)
6918 {
6919 if ((pbi->flags & PROP_DEATH_NOTES)
6920 && ! find_regno_note (insn, REG_DEAD, regno_first))
6921 REG_NOTES (insn)
6922 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6923
6924 if (pbi->flags & PROP_REG_INFO)
6925 REG_N_DEATHS (regno_first)++;
6926 }
6927 else
6928 {
6929 /* Don't make a REG_DEAD note for a part of a register
6930 that is set in the insn. */
6931 for (i = regno_first; i <= regno_last; ++i)
6932 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6933 && ! dead_or_set_regno_p (insn, i))
6934 REG_NOTES (insn)
6935 = alloc_EXPR_LIST (REG_DEAD,
6936 gen_rtx_REG (reg_raw_mode[i], i),
6937 REG_NOTES (insn));
6938 }
6939 }
6940
6941 /* Mark the register as being live. */
6942 for (i = regno_first; i <= regno_last; ++i)
6943 {
6944 SET_REGNO_REG_SET (pbi->reg_live, i);
6945
6946 #ifdef HAVE_conditional_execution
6947 /* If this is a conditional use, record that fact. If it is later
6948 conditionally set, we'll know to kill the register. */
6949 if (cond != NULL_RTX)
6950 {
6951 splay_tree_node node;
6952 struct reg_cond_life_info *rcli;
6953 rtx ncond;
6954
6955 if (some_was_live)
6956 {
6957 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6958 if (node == NULL)
6959 {
6960 /* The register was unconditionally live previously.
6961 No need to do anything. */
6962 }
6963 else
6964 {
6965 /* The register was conditionally live previously.
6966 Subtract the new life cond from the old death cond. */
6967 rcli = (struct reg_cond_life_info *) node->value;
6968 ncond = rcli->condition;
6969 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6970
6971 /* If the register is now unconditionally live,
6972 remove the entry in the splay_tree. */
6973 if (ncond == const0_rtx)
6974 splay_tree_remove (pbi->reg_cond_dead, i);
6975 else
6976 {
6977 rcli->condition = ncond;
6978 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6979 REGNO (XEXP (cond, 0)));
6980 }
6981 }
6982 }
6983 else
6984 {
6985 /* The register was not previously live at all. Record
6986 the condition under which it is still dead. */
6987 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6988 rcli->condition = not_reg_cond (cond);
6989 rcli->stores = const0_rtx;
6990 rcli->orig_condition = const0_rtx;
6991 splay_tree_insert (pbi->reg_cond_dead, i,
6992 (splay_tree_value) rcli);
6993
6994 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6995 }
6996 }
6997 else if (some_was_live)
6998 {
6999 /* The register may have been conditionally live previously, but
7000 is now unconditionally live. Remove it from the conditionally
7001 dead list, so that a conditional set won't cause us to think
7002 it dead. */
7003 splay_tree_remove (pbi->reg_cond_dead, i);
7004 }
7005 #endif
7006 }
7007 }
7008
7009 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
7010 This is done assuming the registers needed from X are those that
7011 have 1-bits in PBI->REG_LIVE.
7012
7013 INSN is the containing instruction. If INSN is dead, this function
7014 is not called. */
7015
7016 static void
7017 mark_used_regs (pbi, x, cond, insn)
7018 struct propagate_block_info *pbi;
7019 rtx x, cond, insn;
7020 {
7021 register RTX_CODE code;
7022 register int regno;
7023 int flags = pbi->flags;
7024
7025 retry:
7026 code = GET_CODE (x);
7027 switch (code)
7028 {
7029 case LABEL_REF:
7030 case SYMBOL_REF:
7031 case CONST_INT:
7032 case CONST:
7033 case CONST_DOUBLE:
7034 case PC:
7035 case ADDR_VEC:
7036 case ADDR_DIFF_VEC:
7037 return;
7038
7039 #ifdef HAVE_cc0
7040 case CC0:
7041 pbi->cc0_live = 1;
7042 return;
7043 #endif
7044
7045 case CLOBBER:
7046 /* If we are clobbering a MEM, mark any registers inside the address
7047 as being used. */
7048 if (GET_CODE (XEXP (x, 0)) == MEM)
7049 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
7050 return;
7051
7052 case MEM:
7053 /* Don't bother watching stores to mems if this is not the
7054 final pass. We'll not be deleting dead stores this round. */
7055 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
7056 {
7057 /* Invalidate the data for the last MEM stored, but only if MEM is
7058 something that can be stored into. */
7059 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
7060 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
7061 /* Needn't clear the memory set list. */
7062 ;
7063 else
7064 {
7065 rtx temp = pbi->mem_set_list;
7066 rtx prev = NULL_RTX;
7067 rtx next;
7068
7069 while (temp)
7070 {
7071 next = XEXP (temp, 1);
7072 if (anti_dependence (XEXP (temp, 0), x))
7073 {
7074 /* Splice temp out of the list. */
7075 if (prev)
7076 XEXP (prev, 1) = next;
7077 else
7078 pbi->mem_set_list = next;
7079 free_EXPR_LIST_node (temp);
7080 pbi->mem_set_list_len--;
7081 }
7082 else
7083 prev = temp;
7084 temp = next;
7085 }
7086 }
7087
7088 /* If the memory reference had embedded side effects (autoincrement
7089 address modes. Then we may need to kill some entries on the
7090 memory set list. */
7091 if (insn)
7092 invalidate_mems_from_autoinc (pbi, insn);
7093 }
7094
7095 #ifdef AUTO_INC_DEC
7096 if (flags & PROP_AUTOINC)
7097 find_auto_inc (pbi, x, insn);
7098 #endif
7099 break;
7100
7101 case SUBREG:
7102 #ifdef CLASS_CANNOT_CHANGE_MODE
7103 if (GET_CODE (SUBREG_REG (x)) == REG
7104 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
7105 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
7106 GET_MODE (SUBREG_REG (x))))
7107 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7108 #endif
7109
7110 /* While we're here, optimize this case. */
7111 x = SUBREG_REG (x);
7112 if (GET_CODE (x) != REG)
7113 goto retry;
7114 /* Fall through. */
7115
7116 case REG:
7117 /* See a register other than being set => mark it as needed. */
7118 mark_used_reg (pbi, x, cond, insn);
7119 return;
7120
7121 case SET:
7122 {
7123 register rtx testreg = SET_DEST (x);
7124 int mark_dest = 0;
7125
7126 /* If storing into MEM, don't show it as being used. But do
7127 show the address as being used. */
7128 if (GET_CODE (testreg) == MEM)
7129 {
7130 #ifdef AUTO_INC_DEC
7131 if (flags & PROP_AUTOINC)
7132 find_auto_inc (pbi, testreg, insn);
7133 #endif
7134 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7135 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7136 return;
7137 }
7138
7139 /* Storing in STRICT_LOW_PART is like storing in a reg
7140 in that this SET might be dead, so ignore it in TESTREG.
7141 but in some other ways it is like using the reg.
7142
7143 Storing in a SUBREG or a bit field is like storing the entire
7144 register in that if the register's value is not used
7145 then this SET is not needed. */
7146 while (GET_CODE (testreg) == STRICT_LOW_PART
7147 || GET_CODE (testreg) == ZERO_EXTRACT
7148 || GET_CODE (testreg) == SIGN_EXTRACT
7149 || GET_CODE (testreg) == SUBREG)
7150 {
7151 #ifdef CLASS_CANNOT_CHANGE_MODE
7152 if (GET_CODE (testreg) == SUBREG
7153 && GET_CODE (SUBREG_REG (testreg)) == REG
7154 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7155 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7156 GET_MODE (testreg)))
7157 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7158 #endif
7159
7160 /* Modifying a single register in an alternate mode
7161 does not use any of the old value. But these other
7162 ways of storing in a register do use the old value. */
7163 if (GET_CODE (testreg) == SUBREG
7164 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7165 ;
7166 else
7167 mark_dest = 1;
7168
7169 testreg = XEXP (testreg, 0);
7170 }
7171
7172 /* If this is a store into a register or group of registers,
7173 recursively scan the value being stored. */
7174
7175 if ((GET_CODE (testreg) == PARALLEL
7176 && GET_MODE (testreg) == BLKmode)
7177 || (GET_CODE (testreg) == REG
7178 && (regno = REGNO (testreg),
7179 ! (regno == FRAME_POINTER_REGNUM
7180 && (! reload_completed || frame_pointer_needed)))
7181 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7182 && ! (regno == HARD_FRAME_POINTER_REGNUM
7183 && (! reload_completed || frame_pointer_needed))
7184 #endif
7185 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7186 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7187 #endif
7188 ))
7189 {
7190 if (mark_dest)
7191 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7192 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7193 return;
7194 }
7195 }
7196 break;
7197
7198 case ASM_OPERANDS:
7199 case UNSPEC_VOLATILE:
7200 case TRAP_IF:
7201 case ASM_INPUT:
7202 {
7203 /* Traditional and volatile asm instructions must be considered to use
7204 and clobber all hard registers, all pseudo-registers and all of
7205 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7206
7207 Consider for instance a volatile asm that changes the fpu rounding
7208 mode. An insn should not be moved across this even if it only uses
7209 pseudo-regs because it might give an incorrectly rounded result.
7210
7211 ?!? Unfortunately, marking all hard registers as live causes massive
7212 problems for the register allocator and marking all pseudos as live
7213 creates mountains of uninitialized variable warnings.
7214
7215 So for now, just clear the memory set list and mark any regs
7216 we can find in ASM_OPERANDS as used. */
7217 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7218 {
7219 free_EXPR_LIST_list (&pbi->mem_set_list);
7220 pbi->mem_set_list_len = 0;
7221 }
7222
7223 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7224 We can not just fall through here since then we would be confused
7225 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7226 traditional asms unlike their normal usage. */
7227 if (code == ASM_OPERANDS)
7228 {
7229 int j;
7230
7231 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7232 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7233 }
7234 break;
7235 }
7236
7237 case COND_EXEC:
7238 if (cond != NULL_RTX)
7239 abort ();
7240
7241 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7242
7243 cond = COND_EXEC_TEST (x);
7244 x = COND_EXEC_CODE (x);
7245 goto retry;
7246
7247 case PHI:
7248 /* We _do_not_ want to scan operands of phi nodes. Operands of
7249 a phi function are evaluated only when control reaches this
7250 block along a particular edge. Therefore, regs that appear
7251 as arguments to phi should not be added to the global live at
7252 start. */
7253 return;
7254
7255 default:
7256 break;
7257 }
7258
7259 /* Recursively scan the operands of this expression. */
7260
7261 {
7262 register const char *fmt = GET_RTX_FORMAT (code);
7263 register int i;
7264
7265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7266 {
7267 if (fmt[i] == 'e')
7268 {
7269 /* Tail recursive case: save a function call level. */
7270 if (i == 0)
7271 {
7272 x = XEXP (x, 0);
7273 goto retry;
7274 }
7275 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7276 }
7277 else if (fmt[i] == 'E')
7278 {
7279 register int j;
7280 for (j = 0; j < XVECLEN (x, i); j++)
7281 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7282 }
7283 }
7284 }
7285 }
7286 \f
7287 #ifdef AUTO_INC_DEC
7288
7289 static int
7290 try_pre_increment_1 (pbi, insn)
7291 struct propagate_block_info *pbi;
7292 rtx insn;
7293 {
7294 /* Find the next use of this reg. If in same basic block,
7295 make it do pre-increment or pre-decrement if appropriate. */
7296 rtx x = single_set (insn);
7297 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7298 * INTVAL (XEXP (SET_SRC (x), 1)));
7299 int regno = REGNO (SET_DEST (x));
7300 rtx y = pbi->reg_next_use[regno];
7301 if (y != 0
7302 && SET_DEST (x) != stack_pointer_rtx
7303 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7304 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7305 mode would be better. */
7306 && ! dead_or_set_p (y, SET_DEST (x))
7307 && try_pre_increment (y, SET_DEST (x), amount))
7308 {
7309 /* We have found a suitable auto-increment and already changed
7310 insn Y to do it. So flush this increment instruction. */
7311 propagate_block_delete_insn (pbi->bb, insn);
7312
7313 /* Count a reference to this reg for the increment insn we are
7314 deleting. When a reg is incremented, spilling it is worse,
7315 so we want to make that less likely. */
7316 if (regno >= FIRST_PSEUDO_REGISTER)
7317 {
7318 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
7319 ? 1 : pbi->bb->frequency);
7320 REG_N_SETS (regno)++;
7321 }
7322
7323 /* Flush any remembered memories depending on the value of
7324 the incremented register. */
7325 invalidate_mems_from_set (pbi, SET_DEST (x));
7326
7327 return 1;
7328 }
7329 return 0;
7330 }
7331
7332 /* Try to change INSN so that it does pre-increment or pre-decrement
7333 addressing on register REG in order to add AMOUNT to REG.
7334 AMOUNT is negative for pre-decrement.
7335 Returns 1 if the change could be made.
7336 This checks all about the validity of the result of modifying INSN. */
7337
7338 static int
7339 try_pre_increment (insn, reg, amount)
7340 rtx insn, reg;
7341 HOST_WIDE_INT amount;
7342 {
7343 register rtx use;
7344
7345 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7346 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7347 int pre_ok = 0;
7348 /* Nonzero if we can try to make a post-increment or post-decrement.
7349 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7350 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7351 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7352 int post_ok = 0;
7353
7354 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7355 int do_post = 0;
7356
7357 /* From the sign of increment, see which possibilities are conceivable
7358 on this target machine. */
7359 if (HAVE_PRE_INCREMENT && amount > 0)
7360 pre_ok = 1;
7361 if (HAVE_POST_INCREMENT && amount > 0)
7362 post_ok = 1;
7363
7364 if (HAVE_PRE_DECREMENT && amount < 0)
7365 pre_ok = 1;
7366 if (HAVE_POST_DECREMENT && amount < 0)
7367 post_ok = 1;
7368
7369 if (! (pre_ok || post_ok))
7370 return 0;
7371
7372 /* It is not safe to add a side effect to a jump insn
7373 because if the incremented register is spilled and must be reloaded
7374 there would be no way to store the incremented value back in memory. */
7375
7376 if (GET_CODE (insn) == JUMP_INSN)
7377 return 0;
7378
7379 use = 0;
7380 if (pre_ok)
7381 use = find_use_as_address (PATTERN (insn), reg, 0);
7382 if (post_ok && (use == 0 || use == (rtx) 1))
7383 {
7384 use = find_use_as_address (PATTERN (insn), reg, -amount);
7385 do_post = 1;
7386 }
7387
7388 if (use == 0 || use == (rtx) 1)
7389 return 0;
7390
7391 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7392 return 0;
7393
7394 /* See if this combination of instruction and addressing mode exists. */
7395 if (! validate_change (insn, &XEXP (use, 0),
7396 gen_rtx_fmt_e (amount > 0
7397 ? (do_post ? POST_INC : PRE_INC)
7398 : (do_post ? POST_DEC : PRE_DEC),
7399 Pmode, reg), 0))
7400 return 0;
7401
7402 /* Record that this insn now has an implicit side effect on X. */
7403 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7404 return 1;
7405 }
7406
7407 #endif /* AUTO_INC_DEC */
7408 \f
7409 /* Find the place in the rtx X where REG is used as a memory address.
7410 Return the MEM rtx that so uses it.
7411 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7412 (plus REG (const_int PLUSCONST)).
7413
7414 If such an address does not appear, return 0.
7415 If REG appears more than once, or is used other than in such an address,
7416 return (rtx)1. */
7417
7418 rtx
7419 find_use_as_address (x, reg, plusconst)
7420 register rtx x;
7421 rtx reg;
7422 HOST_WIDE_INT plusconst;
7423 {
7424 enum rtx_code code = GET_CODE (x);
7425 const char *fmt = GET_RTX_FORMAT (code);
7426 register int i;
7427 register rtx value = 0;
7428 register rtx tem;
7429
7430 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7431 return x;
7432
7433 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7434 && XEXP (XEXP (x, 0), 0) == reg
7435 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7436 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7437 return x;
7438
7439 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7440 {
7441 /* If REG occurs inside a MEM used in a bit-field reference,
7442 that is unacceptable. */
7443 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7444 return (rtx) (HOST_WIDE_INT) 1;
7445 }
7446
7447 if (x == reg)
7448 return (rtx) (HOST_WIDE_INT) 1;
7449
7450 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7451 {
7452 if (fmt[i] == 'e')
7453 {
7454 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7455 if (value == 0)
7456 value = tem;
7457 else if (tem != 0)
7458 return (rtx) (HOST_WIDE_INT) 1;
7459 }
7460 else if (fmt[i] == 'E')
7461 {
7462 register int j;
7463 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7464 {
7465 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7466 if (value == 0)
7467 value = tem;
7468 else if (tem != 0)
7469 return (rtx) (HOST_WIDE_INT) 1;
7470 }
7471 }
7472 }
7473
7474 return value;
7475 }
7476 \f
7477 /* Write information about registers and basic blocks into FILE.
7478 This is part of making a debugging dump. */
7479
7480 void
7481 dump_regset (r, outf)
7482 regset r;
7483 FILE *outf;
7484 {
7485 int i;
7486 if (r == NULL)
7487 {
7488 fputs (" (nil)", outf);
7489 return;
7490 }
7491
7492 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7493 {
7494 fprintf (outf, " %d", i);
7495 if (i < FIRST_PSEUDO_REGISTER)
7496 fprintf (outf, " [%s]",
7497 reg_names[i]);
7498 });
7499 }
7500
7501 /* Print a human-reaable representation of R on the standard error
7502 stream. This function is designed to be used from within the
7503 debugger. */
7504
7505 void
7506 debug_regset (r)
7507 regset r;
7508 {
7509 dump_regset (r, stderr);
7510 putc ('\n', stderr);
7511 }
7512
7513 void
7514 dump_flow_info (file)
7515 FILE *file;
7516 {
7517 register int i;
7518 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7519
7520 fprintf (file, "%d registers.\n", max_regno);
7521 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7522 if (REG_N_REFS (i))
7523 {
7524 enum reg_class class, altclass;
7525 fprintf (file, "\nRegister %d used %d times across %d insns",
7526 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7527 if (REG_BASIC_BLOCK (i) >= 0)
7528 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7529 if (REG_N_SETS (i))
7530 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7531 (REG_N_SETS (i) == 1) ? "" : "s");
7532 if (REG_USERVAR_P (regno_reg_rtx[i]))
7533 fprintf (file, "; user var");
7534 if (REG_N_DEATHS (i) != 1)
7535 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7536 if (REG_N_CALLS_CROSSED (i) == 1)
7537 fprintf (file, "; crosses 1 call");
7538 else if (REG_N_CALLS_CROSSED (i))
7539 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7540 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7541 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7542 class = reg_preferred_class (i);
7543 altclass = reg_alternate_class (i);
7544 if (class != GENERAL_REGS || altclass != ALL_REGS)
7545 {
7546 if (altclass == ALL_REGS || class == ALL_REGS)
7547 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7548 else if (altclass == NO_REGS)
7549 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7550 else
7551 fprintf (file, "; pref %s, else %s",
7552 reg_class_names[(int) class],
7553 reg_class_names[(int) altclass]);
7554 }
7555 if (REG_POINTER (regno_reg_rtx[i]))
7556 fprintf (file, "; pointer");
7557 fprintf (file, ".\n");
7558 }
7559
7560 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7561 for (i = 0; i < n_basic_blocks; i++)
7562 {
7563 register basic_block bb = BASIC_BLOCK (i);
7564 register edge e;
7565
7566 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7567 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7568 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7569 fprintf (file, ", freq %i.\n", bb->frequency);
7570
7571 fprintf (file, "Predecessors: ");
7572 for (e = bb->pred; e; e = e->pred_next)
7573 dump_edge_info (file, e, 0);
7574
7575 fprintf (file, "\nSuccessors: ");
7576 for (e = bb->succ; e; e = e->succ_next)
7577 dump_edge_info (file, e, 1);
7578
7579 fprintf (file, "\nRegisters live at start:");
7580 dump_regset (bb->global_live_at_start, file);
7581
7582 fprintf (file, "\nRegisters live at end:");
7583 dump_regset (bb->global_live_at_end, file);
7584
7585 putc ('\n', file);
7586 }
7587
7588 putc ('\n', file);
7589 }
7590
7591 void
7592 debug_flow_info ()
7593 {
7594 dump_flow_info (stderr);
7595 }
7596
7597 void
7598 dump_edge_info (file, e, do_succ)
7599 FILE *file;
7600 edge e;
7601 int do_succ;
7602 {
7603 basic_block side = (do_succ ? e->dest : e->src);
7604
7605 if (side == ENTRY_BLOCK_PTR)
7606 fputs (" ENTRY", file);
7607 else if (side == EXIT_BLOCK_PTR)
7608 fputs (" EXIT", file);
7609 else
7610 fprintf (file, " %d", side->index);
7611
7612 if (e->probability)
7613 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7614
7615 if (e->count)
7616 {
7617 fprintf (file, " count:");
7618 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7619 }
7620
7621 if (e->flags)
7622 {
7623 static const char * const bitnames[] = {
7624 "fallthru", "crit", "ab", "abcall", "eh", "fake"
7625 };
7626 int comma = 0;
7627 int i, flags = e->flags;
7628
7629 fputc (' ', file);
7630 fputc ('(', file);
7631 for (i = 0; flags; i++)
7632 if (flags & (1 << i))
7633 {
7634 flags &= ~(1 << i);
7635
7636 if (comma)
7637 fputc (',', file);
7638 if (i < (int) ARRAY_SIZE (bitnames))
7639 fputs (bitnames[i], file);
7640 else
7641 fprintf (file, "%d", i);
7642 comma = 1;
7643 }
7644 fputc (')', file);
7645 }
7646 }
7647 \f
7648 /* Print out one basic block with live information at start and end. */
7649
7650 void
7651 dump_bb (bb, outf)
7652 basic_block bb;
7653 FILE *outf;
7654 {
7655 rtx insn;
7656 rtx last;
7657 edge e;
7658
7659 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7660 bb->index, bb->loop_depth);
7661 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7662 putc ('\n', outf);
7663
7664 fputs (";; Predecessors: ", outf);
7665 for (e = bb->pred; e; e = e->pred_next)
7666 dump_edge_info (outf, e, 0);
7667 putc ('\n', outf);
7668
7669 fputs (";; Registers live at start:", outf);
7670 dump_regset (bb->global_live_at_start, outf);
7671 putc ('\n', outf);
7672
7673 for (insn = bb->head, last = NEXT_INSN (bb->end);
7674 insn != last;
7675 insn = NEXT_INSN (insn))
7676 print_rtl_single (outf, insn);
7677
7678 fputs (";; Registers live at end:", outf);
7679 dump_regset (bb->global_live_at_end, outf);
7680 putc ('\n', outf);
7681
7682 fputs (";; Successors: ", outf);
7683 for (e = bb->succ; e; e = e->succ_next)
7684 dump_edge_info (outf, e, 1);
7685 putc ('\n', outf);
7686 }
7687
7688 void
7689 debug_bb (bb)
7690 basic_block bb;
7691 {
7692 dump_bb (bb, stderr);
7693 }
7694
7695 void
7696 debug_bb_n (n)
7697 int n;
7698 {
7699 dump_bb (BASIC_BLOCK (n), stderr);
7700 }
7701
7702 /* Like print_rtl, but also print out live information for the start of each
7703 basic block. */
7704
7705 void
7706 print_rtl_with_bb (outf, rtx_first)
7707 FILE *outf;
7708 rtx rtx_first;
7709 {
7710 register rtx tmp_rtx;
7711
7712 if (rtx_first == 0)
7713 fprintf (outf, "(nil)\n");
7714 else
7715 {
7716 int i;
7717 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
7718 int max_uid = get_max_uid ();
7719 basic_block *start = (basic_block *)
7720 xcalloc (max_uid, sizeof (basic_block));
7721 basic_block *end = (basic_block *)
7722 xcalloc (max_uid, sizeof (basic_block));
7723 enum bb_state *in_bb_p = (enum bb_state *)
7724 xcalloc (max_uid, sizeof (enum bb_state));
7725
7726 for (i = n_basic_blocks - 1; i >= 0; i--)
7727 {
7728 basic_block bb = BASIC_BLOCK (i);
7729 rtx x;
7730
7731 start[INSN_UID (bb->head)] = bb;
7732 end[INSN_UID (bb->end)] = bb;
7733 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
7734 {
7735 enum bb_state state = IN_MULTIPLE_BB;
7736 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
7737 state = IN_ONE_BB;
7738 in_bb_p[INSN_UID (x)] = state;
7739
7740 if (x == bb->end)
7741 break;
7742 }
7743 }
7744
7745 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
7746 {
7747 int did_output;
7748 basic_block bb;
7749
7750 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
7751 {
7752 fprintf (outf, ";; Start of basic block %d, registers live:",
7753 bb->index);
7754 dump_regset (bb->global_live_at_start, outf);
7755 putc ('\n', outf);
7756 }
7757
7758 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
7759 && GET_CODE (tmp_rtx) != NOTE
7760 && GET_CODE (tmp_rtx) != BARRIER)
7761 fprintf (outf, ";; Insn is not within a basic block\n");
7762 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
7763 fprintf (outf, ";; Insn is in multiple basic blocks\n");
7764
7765 did_output = print_rtl_single (outf, tmp_rtx);
7766
7767 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
7768 {
7769 fprintf (outf, ";; End of basic block %d, registers live:\n",
7770 bb->index);
7771 dump_regset (bb->global_live_at_end, outf);
7772 putc ('\n', outf);
7773 }
7774
7775 if (did_output)
7776 putc ('\n', outf);
7777 }
7778
7779 free (start);
7780 free (end);
7781 free (in_bb_p);
7782 }
7783
7784 if (current_function_epilogue_delay_list != 0)
7785 {
7786 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
7787 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
7788 tmp_rtx = XEXP (tmp_rtx, 1))
7789 print_rtl_single (outf, XEXP (tmp_rtx, 0));
7790 }
7791 }
7792
7793 /* Dump the rtl into the current debugging dump file, then abort. */
7794
7795 static void
7796 print_rtl_and_abort_fcn (file, line, function)
7797 const char *file;
7798 int line;
7799 const char *function;
7800 {
7801 if (rtl_dump_file)
7802 {
7803 print_rtl_with_bb (rtl_dump_file, get_insns ());
7804 fclose (rtl_dump_file);
7805 }
7806
7807 fancy_abort (file, line, function);
7808 }
7809
7810 /* Recompute register set/reference counts immediately prior to register
7811 allocation.
7812
7813 This avoids problems with set/reference counts changing to/from values
7814 which have special meanings to the register allocators.
7815
7816 Additionally, the reference counts are the primary component used by the
7817 register allocators to prioritize pseudos for allocation to hard regs.
7818 More accurate reference counts generally lead to better register allocation.
7819
7820 F is the first insn to be scanned.
7821
7822 LOOP_STEP denotes how much loop_depth should be incremented per
7823 loop nesting level in order to increase the ref count more for
7824 references in a loop.
7825
7826 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
7827 possibly other information which is used by the register allocators. */
7828
7829 void
7830 recompute_reg_usage (f, loop_step)
7831 rtx f ATTRIBUTE_UNUSED;
7832 int loop_step ATTRIBUTE_UNUSED;
7833 {
7834 allocate_reg_life_data ();
7835 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7836 }
7837
7838 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7839 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7840 of the number of registers that died. */
7841
7842 int
7843 count_or_remove_death_notes (blocks, kill)
7844 sbitmap blocks;
7845 int kill;
7846 {
7847 int i, count = 0;
7848
7849 for (i = n_basic_blocks - 1; i >= 0; --i)
7850 {
7851 basic_block bb;
7852 rtx insn;
7853
7854 if (blocks && ! TEST_BIT (blocks, i))
7855 continue;
7856
7857 bb = BASIC_BLOCK (i);
7858
7859 for (insn = bb->head;; insn = NEXT_INSN (insn))
7860 {
7861 if (INSN_P (insn))
7862 {
7863 rtx *pprev = &REG_NOTES (insn);
7864 rtx link = *pprev;
7865
7866 while (link)
7867 {
7868 switch (REG_NOTE_KIND (link))
7869 {
7870 case REG_DEAD:
7871 if (GET_CODE (XEXP (link, 0)) == REG)
7872 {
7873 rtx reg = XEXP (link, 0);
7874 int n;
7875
7876 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7877 n = 1;
7878 else
7879 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7880 count += n;
7881 }
7882 /* Fall through. */
7883
7884 case REG_UNUSED:
7885 if (kill)
7886 {
7887 rtx next = XEXP (link, 1);
7888 free_EXPR_LIST_node (link);
7889 *pprev = link = next;
7890 break;
7891 }
7892 /* Fall through. */
7893
7894 default:
7895 pprev = &XEXP (link, 1);
7896 link = *pprev;
7897 break;
7898 }
7899 }
7900 }
7901
7902 if (insn == bb->end)
7903 break;
7904 }
7905 }
7906
7907 return count;
7908 }
7909
7910
7911 /* Update insns block within BB. */
7912
7913 void
7914 update_bb_for_insn (bb)
7915 basic_block bb;
7916 {
7917 rtx insn;
7918
7919 if (! basic_block_for_insn)
7920 return;
7921
7922 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7923 {
7924 set_block_for_insn (insn, bb);
7925
7926 if (insn == bb->end)
7927 break;
7928 }
7929 }
7930
7931
7932 /* Record INSN's block as BB. */
7933
7934 void
7935 set_block_for_insn (insn, bb)
7936 rtx insn;
7937 basic_block bb;
7938 {
7939 size_t uid = INSN_UID (insn);
7940 if (uid >= basic_block_for_insn->num_elements)
7941 {
7942 int new_size;
7943
7944 /* Add one-eighth the size so we don't keep calling xrealloc. */
7945 new_size = uid + (uid + 7) / 8;
7946
7947 VARRAY_GROW (basic_block_for_insn, new_size);
7948 }
7949 VARRAY_BB (basic_block_for_insn, uid) = bb;
7950 }
7951
7952 /* When a new insn has been inserted into an existing block, it will
7953 sometimes emit more than a single insn. This routine will set the
7954 block number for the specified insn, and look backwards in the insn
7955 chain to see if there are any other uninitialized insns immediately
7956 previous to this one, and set the block number for them too. */
7957
7958 void
7959 set_block_for_new_insns (insn, bb)
7960 rtx insn;
7961 basic_block bb;
7962 {
7963 set_block_for_insn (insn, bb);
7964
7965 /* Scan the previous instructions setting the block number until we find
7966 an instruction that has the block number set, or we find a note
7967 of any kind. */
7968 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7969 {
7970 if (GET_CODE (insn) == NOTE)
7971 break;
7972 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7973 || BLOCK_FOR_INSN (insn) == 0)
7974 set_block_for_insn (insn, bb);
7975 else
7976 break;
7977 }
7978 }
7979 \f
7980 /* Verify the CFG consistency. This function check some CFG invariants and
7981 aborts when something is wrong. Hope that this function will help to
7982 convert many optimization passes to preserve CFG consistent.
7983
7984 Currently it does following checks:
7985
7986 - test head/end pointers
7987 - overlapping of basic blocks
7988 - edge list correctness
7989 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7990 - tails of basic blocks (ensure that boundary is necesary)
7991 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7992 and NOTE_INSN_BASIC_BLOCK
7993 - check that all insns are in the basic blocks
7994 (except the switch handling code, barriers and notes)
7995 - check that all returns are followed by barriers
7996
7997 In future it can be extended check a lot of other stuff as well
7998 (reachability of basic blocks, life information, etc. etc.). */
7999
8000 void
8001 verify_flow_info ()
8002 {
8003 const int max_uid = get_max_uid ();
8004 const rtx rtx_first = get_insns ();
8005 rtx last_head = get_last_insn ();
8006 basic_block *bb_info, *last_visited;
8007 rtx x;
8008 int i, last_bb_num_seen, num_bb_notes, err = 0;
8009
8010 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
8011 last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
8012 sizeof (basic_block));
8013
8014 for (i = n_basic_blocks - 1; i >= 0; i--)
8015 {
8016 basic_block bb = BASIC_BLOCK (i);
8017 rtx head = bb->head;
8018 rtx end = bb->end;
8019
8020 /* Verify the end of the basic block is in the INSN chain. */
8021 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
8022 if (x == end)
8023 break;
8024 if (!x)
8025 {
8026 error ("End insn %d for block %d not found in the insn stream.",
8027 INSN_UID (end), bb->index);
8028 err = 1;
8029 }
8030
8031 /* Work backwards from the end to the head of the basic block
8032 to verify the head is in the RTL chain. */
8033 for (; x != NULL_RTX; x = PREV_INSN (x))
8034 {
8035 /* While walking over the insn chain, verify insns appear
8036 in only one basic block and initialize the BB_INFO array
8037 used by other passes. */
8038 if (bb_info[INSN_UID (x)] != NULL)
8039 {
8040 error ("Insn %d is in multiple basic blocks (%d and %d)",
8041 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
8042 err = 1;
8043 }
8044 bb_info[INSN_UID (x)] = bb;
8045
8046 if (x == head)
8047 break;
8048 }
8049 if (!x)
8050 {
8051 error ("Head insn %d for block %d not found in the insn stream.",
8052 INSN_UID (head), bb->index);
8053 err = 1;
8054 }
8055
8056 last_head = x;
8057 }
8058
8059 /* Now check the basic blocks (boundaries etc.) */
8060 for (i = n_basic_blocks - 1; i >= 0; i--)
8061 {
8062 basic_block bb = BASIC_BLOCK (i);
8063 /* Check correctness of edge lists. */
8064 edge e;
8065 int has_fallthru = 0;
8066
8067 e = bb->succ;
8068 while (e)
8069 {
8070 if (last_visited [e->dest->index + 2] == bb)
8071 {
8072 error ("verify_flow_info: Duplicate edge %i->%i",
8073 e->src->index, e->dest->index);
8074 err = 1;
8075 }
8076 last_visited [e->dest->index + 2] = bb;
8077
8078 if (e->flags & EDGE_FALLTHRU)
8079 has_fallthru = 1;
8080
8081 if ((e->flags & EDGE_FALLTHRU)
8082 && e->src != ENTRY_BLOCK_PTR
8083 && e->dest != EXIT_BLOCK_PTR)
8084 {
8085 rtx insn;
8086 if (e->src->index + 1 != e->dest->index)
8087 {
8088 error ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
8089 e->src->index, e->dest->index);
8090 err = 1;
8091 }
8092 else
8093 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
8094 insn = NEXT_INSN (insn))
8095 if (GET_CODE (insn) == BARRIER || INSN_P (insn))
8096 {
8097 error ("verify_flow_info: Incorrect fallthru %i->%i",
8098 e->src->index, e->dest->index);
8099 fatal_insn ("Wrong insn in the fallthru edge", insn);
8100 err = 1;
8101 }
8102 }
8103 if (e->src != bb)
8104 {
8105 error ("verify_flow_info: Basic block %d succ edge is corrupted",
8106 bb->index);
8107 fprintf (stderr, "Predecessor: ");
8108 dump_edge_info (stderr, e, 0);
8109 fprintf (stderr, "\nSuccessor: ");
8110 dump_edge_info (stderr, e, 1);
8111 fprintf (stderr, "\n");
8112 err = 1;
8113 }
8114 if (e->dest != EXIT_BLOCK_PTR)
8115 {
8116 edge e2 = e->dest->pred;
8117 while (e2 && e2 != e)
8118 e2 = e2->pred_next;
8119 if (!e2)
8120 {
8121 error ("Basic block %i edge lists are corrupted", bb->index);
8122 err = 1;
8123 }
8124 }
8125 e = e->succ_next;
8126 }
8127 if (!has_fallthru)
8128 {
8129 rtx insn = bb->end;
8130
8131 /* Ensure existence of barrier in BB with no fallthru edges. */
8132 for (insn = bb->end; GET_CODE (insn) != BARRIER;
8133 insn = NEXT_INSN (insn))
8134 if (!insn
8135 || (GET_CODE (insn) == NOTE
8136 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
8137 {
8138 error ("Missing barrier after block %i", bb->index);
8139 err = 1;
8140 }
8141 }
8142
8143 e = bb->pred;
8144 while (e)
8145 {
8146 if (e->dest != bb)
8147 {
8148 error ("Basic block %d pred edge is corrupted", bb->index);
8149 fputs ("Predecessor: ", stderr);
8150 dump_edge_info (stderr, e, 0);
8151 fputs ("\nSuccessor: ", stderr);
8152 dump_edge_info (stderr, e, 1);
8153 fputc ('\n', stderr);
8154 err = 1;
8155 }
8156 if (e->src != ENTRY_BLOCK_PTR)
8157 {
8158 edge e2 = e->src->succ;
8159 while (e2 && e2 != e)
8160 e2 = e2->succ_next;
8161 if (!e2)
8162 {
8163 error ("Basic block %i edge lists are corrupted", bb->index);
8164 err = 1;
8165 }
8166 }
8167 e = e->pred_next;
8168 }
8169
8170 /* OK pointers are correct. Now check the header of basic
8171 block. It ought to contain optional CODE_LABEL followed
8172 by NOTE_BASIC_BLOCK. */
8173 x = bb->head;
8174 if (GET_CODE (x) == CODE_LABEL)
8175 {
8176 if (bb->end == x)
8177 {
8178 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8179 bb->index);
8180 err = 1;
8181 }
8182 x = NEXT_INSN (x);
8183 }
8184 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8185 {
8186 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8187 bb->index);
8188 err = 1;
8189 }
8190
8191 if (bb->end == x)
8192 {
8193 /* Do checks for empty blocks here */
8194 }
8195 else
8196 {
8197 x = NEXT_INSN (x);
8198 while (x)
8199 {
8200 if (NOTE_INSN_BASIC_BLOCK_P (x))
8201 {
8202 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8203 INSN_UID (x), bb->index);
8204 err = 1;
8205 }
8206
8207 if (x == bb->end)
8208 break;
8209
8210 if (GET_CODE (x) == JUMP_INSN
8211 || GET_CODE (x) == CODE_LABEL
8212 || GET_CODE (x) == BARRIER)
8213 {
8214 error ("In basic block %d:", bb->index);
8215 fatal_insn ("Flow control insn inside a basic block", x);
8216 }
8217
8218 x = NEXT_INSN (x);
8219 }
8220 }
8221 }
8222
8223 last_bb_num_seen = -1;
8224 num_bb_notes = 0;
8225 x = rtx_first;
8226 while (x)
8227 {
8228 if (NOTE_INSN_BASIC_BLOCK_P (x))
8229 {
8230 basic_block bb = NOTE_BASIC_BLOCK (x);
8231 num_bb_notes++;
8232 if (bb->index != last_bb_num_seen + 1)
8233 internal_error ("Basic blocks not numbered consecutively.");
8234
8235 last_bb_num_seen = bb->index;
8236 }
8237
8238 if (!bb_info[INSN_UID (x)])
8239 {
8240 switch (GET_CODE (x))
8241 {
8242 case BARRIER:
8243 case NOTE:
8244 break;
8245
8246 case CODE_LABEL:
8247 /* An addr_vec is placed outside any block block. */
8248 if (NEXT_INSN (x)
8249 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8250 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8251 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8252 {
8253 x = NEXT_INSN (x);
8254 }
8255
8256 /* But in any case, non-deletable labels can appear anywhere. */
8257 break;
8258
8259 default:
8260 fatal_insn ("Insn outside basic block", x);
8261 }
8262 }
8263
8264 if (INSN_P (x)
8265 && GET_CODE (x) == JUMP_INSN
8266 && returnjump_p (x) && ! condjump_p (x)
8267 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8268 fatal_insn ("Return not followed by barrier", x);
8269
8270 x = NEXT_INSN (x);
8271 }
8272
8273 if (num_bb_notes != n_basic_blocks)
8274 internal_error
8275 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8276 num_bb_notes, n_basic_blocks);
8277
8278 if (err)
8279 internal_error ("verify_flow_info failed.");
8280
8281 /* Clean up. */
8282 free (bb_info);
8283 free (last_visited);
8284 }
8285 \f
8286 /* Functions to access an edge list with a vector representation.
8287 Enough data is kept such that given an index number, the
8288 pred and succ that edge represents can be determined, or
8289 given a pred and a succ, its index number can be returned.
8290 This allows algorithms which consume a lot of memory to
8291 represent the normally full matrix of edge (pred,succ) with a
8292 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8293 wasted space in the client code due to sparse flow graphs. */
8294
8295 /* This functions initializes the edge list. Basically the entire
8296 flowgraph is processed, and all edges are assigned a number,
8297 and the data structure is filled in. */
8298
8299 struct edge_list *
8300 create_edge_list ()
8301 {
8302 struct edge_list *elist;
8303 edge e;
8304 int num_edges;
8305 int x;
8306 int block_count;
8307
8308 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8309
8310 num_edges = 0;
8311
8312 /* Determine the number of edges in the flow graph by counting successor
8313 edges on each basic block. */
8314 for (x = 0; x < n_basic_blocks; x++)
8315 {
8316 basic_block bb = BASIC_BLOCK (x);
8317
8318 for (e = bb->succ; e; e = e->succ_next)
8319 num_edges++;
8320 }
8321 /* Don't forget successors of the entry block. */
8322 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8323 num_edges++;
8324
8325 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8326 elist->num_blocks = block_count;
8327 elist->num_edges = num_edges;
8328 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8329
8330 num_edges = 0;
8331
8332 /* Follow successors of the entry block, and register these edges. */
8333 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8334 {
8335 elist->index_to_edge[num_edges] = e;
8336 num_edges++;
8337 }
8338
8339 for (x = 0; x < n_basic_blocks; x++)
8340 {
8341 basic_block bb = BASIC_BLOCK (x);
8342
8343 /* Follow all successors of blocks, and register these edges. */
8344 for (e = bb->succ; e; e = e->succ_next)
8345 {
8346 elist->index_to_edge[num_edges] = e;
8347 num_edges++;
8348 }
8349 }
8350 return elist;
8351 }
8352
8353 /* This function free's memory associated with an edge list. */
8354
8355 void
8356 free_edge_list (elist)
8357 struct edge_list *elist;
8358 {
8359 if (elist)
8360 {
8361 free (elist->index_to_edge);
8362 free (elist);
8363 }
8364 }
8365
8366 /* This function provides debug output showing an edge list. */
8367
8368 void
8369 print_edge_list (f, elist)
8370 FILE *f;
8371 struct edge_list *elist;
8372 {
8373 int x;
8374 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8375 elist->num_blocks - 2, elist->num_edges);
8376
8377 for (x = 0; x < elist->num_edges; x++)
8378 {
8379 fprintf (f, " %-4d - edge(", x);
8380 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8381 fprintf (f, "entry,");
8382 else
8383 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8384
8385 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8386 fprintf (f, "exit)\n");
8387 else
8388 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8389 }
8390 }
8391
8392 /* This function provides an internal consistency check of an edge list,
8393 verifying that all edges are present, and that there are no
8394 extra edges. */
8395
8396 void
8397 verify_edge_list (f, elist)
8398 FILE *f;
8399 struct edge_list *elist;
8400 {
8401 int x, pred, succ, index;
8402 edge e;
8403
8404 for (x = 0; x < n_basic_blocks; x++)
8405 {
8406 basic_block bb = BASIC_BLOCK (x);
8407
8408 for (e = bb->succ; e; e = e->succ_next)
8409 {
8410 pred = e->src->index;
8411 succ = e->dest->index;
8412 index = EDGE_INDEX (elist, e->src, e->dest);
8413 if (index == EDGE_INDEX_NO_EDGE)
8414 {
8415 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8416 continue;
8417 }
8418 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8419 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8420 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8421 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8422 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8423 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8424 }
8425 }
8426 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8427 {
8428 pred = e->src->index;
8429 succ = e->dest->index;
8430 index = EDGE_INDEX (elist, e->src, e->dest);
8431 if (index == EDGE_INDEX_NO_EDGE)
8432 {
8433 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8434 continue;
8435 }
8436 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8437 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8438 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8439 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8440 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8441 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8442 }
8443 /* We've verified that all the edges are in the list, no lets make sure
8444 there are no spurious edges in the list. */
8445
8446 for (pred = 0; pred < n_basic_blocks; pred++)
8447 for (succ = 0; succ < n_basic_blocks; succ++)
8448 {
8449 basic_block p = BASIC_BLOCK (pred);
8450 basic_block s = BASIC_BLOCK (succ);
8451
8452 int found_edge = 0;
8453
8454 for (e = p->succ; e; e = e->succ_next)
8455 if (e->dest == s)
8456 {
8457 found_edge = 1;
8458 break;
8459 }
8460 for (e = s->pred; e; e = e->pred_next)
8461 if (e->src == p)
8462 {
8463 found_edge = 1;
8464 break;
8465 }
8466 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8467 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8468 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8469 pred, succ);
8470 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8471 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8472 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8473 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8474 BASIC_BLOCK (succ)));
8475 }
8476 for (succ = 0; succ < n_basic_blocks; succ++)
8477 {
8478 basic_block p = ENTRY_BLOCK_PTR;
8479 basic_block s = BASIC_BLOCK (succ);
8480
8481 int found_edge = 0;
8482
8483 for (e = p->succ; e; e = e->succ_next)
8484 if (e->dest == s)
8485 {
8486 found_edge = 1;
8487 break;
8488 }
8489 for (e = s->pred; e; e = e->pred_next)
8490 if (e->src == p)
8491 {
8492 found_edge = 1;
8493 break;
8494 }
8495 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8496 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8497 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8498 succ);
8499 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8500 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8501 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8502 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8503 BASIC_BLOCK (succ)));
8504 }
8505 for (pred = 0; pred < n_basic_blocks; pred++)
8506 {
8507 basic_block p = BASIC_BLOCK (pred);
8508 basic_block s = EXIT_BLOCK_PTR;
8509
8510 int found_edge = 0;
8511
8512 for (e = p->succ; e; e = e->succ_next)
8513 if (e->dest == s)
8514 {
8515 found_edge = 1;
8516 break;
8517 }
8518 for (e = s->pred; e; e = e->pred_next)
8519 if (e->src == p)
8520 {
8521 found_edge = 1;
8522 break;
8523 }
8524 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8525 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8526 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8527 pred);
8528 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8529 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8530 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8531 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8532 EXIT_BLOCK_PTR));
8533 }
8534 }
8535
8536 /* This routine will determine what, if any, edge there is between
8537 a specified predecessor and successor. */
8538
8539 int
8540 find_edge_index (edge_list, pred, succ)
8541 struct edge_list *edge_list;
8542 basic_block pred, succ;
8543 {
8544 int x;
8545 for (x = 0; x < NUM_EDGES (edge_list); x++)
8546 {
8547 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8548 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8549 return x;
8550 }
8551 return (EDGE_INDEX_NO_EDGE);
8552 }
8553
8554 /* This function will remove an edge from the flow graph. */
8555
8556 void
8557 remove_edge (e)
8558 edge e;
8559 {
8560 edge last_pred = NULL;
8561 edge last_succ = NULL;
8562 edge tmp;
8563 basic_block src, dest;
8564 src = e->src;
8565 dest = e->dest;
8566 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8567 last_succ = tmp;
8568
8569 if (!tmp)
8570 abort ();
8571 if (last_succ)
8572 last_succ->succ_next = e->succ_next;
8573 else
8574 src->succ = e->succ_next;
8575
8576 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8577 last_pred = tmp;
8578
8579 if (!tmp)
8580 abort ();
8581 if (last_pred)
8582 last_pred->pred_next = e->pred_next;
8583 else
8584 dest->pred = e->pred_next;
8585
8586 n_edges--;
8587 free (e);
8588 }
8589
8590 /* This routine will remove any fake successor edges for a basic block.
8591 When the edge is removed, it is also removed from whatever predecessor
8592 list it is in. */
8593
8594 static void
8595 remove_fake_successors (bb)
8596 basic_block bb;
8597 {
8598 edge e;
8599 for (e = bb->succ; e;)
8600 {
8601 edge tmp = e;
8602 e = e->succ_next;
8603 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8604 remove_edge (tmp);
8605 }
8606 }
8607
8608 /* This routine will remove all fake edges from the flow graph. If
8609 we remove all fake successors, it will automatically remove all
8610 fake predecessors. */
8611
8612 void
8613 remove_fake_edges ()
8614 {
8615 int x;
8616
8617 for (x = 0; x < n_basic_blocks; x++)
8618 remove_fake_successors (BASIC_BLOCK (x));
8619
8620 /* We've handled all successors except the entry block's. */
8621 remove_fake_successors (ENTRY_BLOCK_PTR);
8622 }
8623
8624 /* This function will add a fake edge between any block which has no
8625 successors, and the exit block. Some data flow equations require these
8626 edges to exist. */
8627
8628 void
8629 add_noreturn_fake_exit_edges ()
8630 {
8631 int x;
8632
8633 for (x = 0; x < n_basic_blocks; x++)
8634 if (BASIC_BLOCK (x)->succ == NULL)
8635 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8636 }
8637
8638 /* This function adds a fake edge between any infinite loops to the
8639 exit block. Some optimizations require a path from each node to
8640 the exit node.
8641
8642 See also Morgan, Figure 3.10, pp. 82-83.
8643
8644 The current implementation is ugly, not attempting to minimize the
8645 number of inserted fake edges. To reduce the number of fake edges
8646 to insert, add fake edges from _innermost_ loops containing only
8647 nodes not reachable from the exit block. */
8648
8649 void
8650 connect_infinite_loops_to_exit ()
8651 {
8652 basic_block unvisited_block;
8653
8654 /* Perform depth-first search in the reverse graph to find nodes
8655 reachable from the exit block. */
8656 struct depth_first_search_dsS dfs_ds;
8657
8658 flow_dfs_compute_reverse_init (&dfs_ds);
8659 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8660
8661 /* Repeatedly add fake edges, updating the unreachable nodes. */
8662 while (1)
8663 {
8664 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8665 if (!unvisited_block)
8666 break;
8667 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8668 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8669 }
8670
8671 flow_dfs_compute_reverse_finish (&dfs_ds);
8672
8673 return;
8674 }
8675
8676 /* Redirect an edge's successor from one block to another. */
8677
8678 void
8679 redirect_edge_succ (e, new_succ)
8680 edge e;
8681 basic_block new_succ;
8682 {
8683 edge *pe;
8684
8685 /* Disconnect the edge from the old successor block. */
8686 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
8687 continue;
8688 *pe = (*pe)->pred_next;
8689
8690 /* Reconnect the edge to the new successor block. */
8691 e->pred_next = new_succ->pred;
8692 new_succ->pred = e;
8693 e->dest = new_succ;
8694 }
8695
8696 /* Like previous but avoid possible dupplicate edge. */
8697
8698 void
8699 redirect_edge_succ_nodup (e, new_succ)
8700 edge e;
8701 basic_block new_succ;
8702 {
8703 edge s;
8704 /* Check whether the edge is already present. */
8705 for (s = e->src->succ; s; s = s->succ_next)
8706 if (s->dest == new_succ && s != e)
8707 break;
8708 if (s)
8709 {
8710 s->flags |= e->flags;
8711 s->probability += e->probability;
8712 s->count += e->count;
8713 remove_edge (e);
8714 }
8715 else
8716 redirect_edge_succ (e, new_succ);
8717 }
8718
8719 /* Redirect an edge's predecessor from one block to another. */
8720
8721 void
8722 redirect_edge_pred (e, new_pred)
8723 edge e;
8724 basic_block new_pred;
8725 {
8726 edge *pe;
8727
8728 /* Disconnect the edge from the old predecessor block. */
8729 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
8730 continue;
8731 *pe = (*pe)->succ_next;
8732
8733 /* Reconnect the edge to the new predecessor block. */
8734 e->succ_next = new_pred->succ;
8735 new_pred->succ = e;
8736 e->src = new_pred;
8737 }
8738 \f
8739 /* Dump the list of basic blocks in the bitmap NODES. */
8740
8741 static void
8742 flow_nodes_print (str, nodes, file)
8743 const char *str;
8744 const sbitmap nodes;
8745 FILE *file;
8746 {
8747 int node;
8748
8749 if (! nodes)
8750 return;
8751
8752 fprintf (file, "%s { ", str);
8753 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
8754 fputs ("}\n", file);
8755 }
8756
8757
8758 /* Dump the list of edges in the array EDGE_LIST. */
8759
8760 static void
8761 flow_edge_list_print (str, edge_list, num_edges, file)
8762 const char *str;
8763 const edge *edge_list;
8764 int num_edges;
8765 FILE *file;
8766 {
8767 int i;
8768
8769 if (! edge_list)
8770 return;
8771
8772 fprintf (file, "%s { ", str);
8773 for (i = 0; i < num_edges; i++)
8774 fprintf (file, "%d->%d ", edge_list[i]->src->index,
8775 edge_list[i]->dest->index);
8776 fputs ("}\n", file);
8777 }
8778
8779
8780 /* Dump loop related CFG information. */
8781
8782 static void
8783 flow_loops_cfg_dump (loops, file)
8784 const struct loops *loops;
8785 FILE *file;
8786 {
8787 int i;
8788
8789 if (! loops->num || ! file || ! loops->cfg.dom)
8790 return;
8791
8792 for (i = 0; i < n_basic_blocks; i++)
8793 {
8794 edge succ;
8795
8796 fprintf (file, ";; %d succs { ", i);
8797 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
8798 fprintf (file, "%d ", succ->dest->index);
8799 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
8800 }
8801
8802 /* Dump the DFS node order. */
8803 if (loops->cfg.dfs_order)
8804 {
8805 fputs (";; DFS order: ", file);
8806 for (i = 0; i < n_basic_blocks; i++)
8807 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
8808 fputs ("\n", file);
8809 }
8810 /* Dump the reverse completion node order. */
8811 if (loops->cfg.rc_order)
8812 {
8813 fputs (";; RC order: ", file);
8814 for (i = 0; i < n_basic_blocks; i++)
8815 fprintf (file, "%d ", loops->cfg.rc_order[i]);
8816 fputs ("\n", file);
8817 }
8818 }
8819
8820 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
8821
8822 static int
8823 flow_loop_nested_p (outer, loop)
8824 struct loop *outer;
8825 struct loop *loop;
8826 {
8827 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
8828 }
8829
8830
8831 /* Dump the loop information specified by LOOP to the stream FILE
8832 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8833 void
8834 flow_loop_dump (loop, file, loop_dump_aux, verbose)
8835 const struct loop *loop;
8836 FILE *file;
8837 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8838 int verbose;
8839 {
8840 if (! loop || ! loop->header)
8841 return;
8842
8843 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
8844 loop->num, INSN_UID (loop->first->head),
8845 INSN_UID (loop->last->end),
8846 loop->shared ? " shared" : "",
8847 loop->invalid ? " invalid" : "");
8848 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
8849 loop->header->index, loop->latch->index,
8850 loop->pre_header ? loop->pre_header->index : -1,
8851 loop->first->index, loop->last->index);
8852 fprintf (file, ";; depth %d, level %d, outer %ld\n",
8853 loop->depth, loop->level,
8854 (long) (loop->outer ? loop->outer->num : -1));
8855
8856 if (loop->pre_header_edges)
8857 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
8858 loop->num_pre_header_edges, file);
8859 flow_edge_list_print (";; entry edges", loop->entry_edges,
8860 loop->num_entries, file);
8861 fprintf (file, ";; %d", loop->num_nodes);
8862 flow_nodes_print (" nodes", loop->nodes, file);
8863 flow_edge_list_print (";; exit edges", loop->exit_edges,
8864 loop->num_exits, file);
8865 if (loop->exits_doms)
8866 flow_nodes_print (";; exit doms", loop->exits_doms, file);
8867 if (loop_dump_aux)
8868 loop_dump_aux (loop, file, verbose);
8869 }
8870
8871
8872 /* Dump the loop information specified by LOOPS to the stream FILE,
8873 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8874 void
8875 flow_loops_dump (loops, file, loop_dump_aux, verbose)
8876 const struct loops *loops;
8877 FILE *file;
8878 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8879 int verbose;
8880 {
8881 int i;
8882 int num_loops;
8883
8884 num_loops = loops->num;
8885 if (! num_loops || ! file)
8886 return;
8887
8888 fprintf (file, ";; %d loops found, %d levels\n",
8889 num_loops, loops->levels);
8890
8891 for (i = 0; i < num_loops; i++)
8892 {
8893 struct loop *loop = &loops->array[i];
8894
8895 flow_loop_dump (loop, file, loop_dump_aux, verbose);
8896
8897 if (loop->shared)
8898 {
8899 int j;
8900
8901 for (j = 0; j < i; j++)
8902 {
8903 struct loop *oloop = &loops->array[j];
8904
8905 if (loop->header == oloop->header)
8906 {
8907 int disjoint;
8908 int smaller;
8909
8910 smaller = loop->num_nodes < oloop->num_nodes;
8911
8912 /* If the union of LOOP and OLOOP is different than
8913 the larger of LOOP and OLOOP then LOOP and OLOOP
8914 must be disjoint. */
8915 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8916 smaller ? oloop : loop);
8917 fprintf (file,
8918 ";; loop header %d shared by loops %d, %d %s\n",
8919 loop->header->index, i, j,
8920 disjoint ? "disjoint" : "nested");
8921 }
8922 }
8923 }
8924 }
8925
8926 if (verbose)
8927 flow_loops_cfg_dump (loops, file);
8928 }
8929
8930
8931 /* Free all the memory allocated for LOOPS. */
8932
8933 void
8934 flow_loops_free (loops)
8935 struct loops *loops;
8936 {
8937 if (loops->array)
8938 {
8939 int i;
8940
8941 if (! loops->num)
8942 abort ();
8943
8944 /* Free the loop descriptors. */
8945 for (i = 0; i < loops->num; i++)
8946 {
8947 struct loop *loop = &loops->array[i];
8948
8949 if (loop->pre_header_edges)
8950 free (loop->pre_header_edges);
8951 if (loop->nodes)
8952 sbitmap_free (loop->nodes);
8953 if (loop->entry_edges)
8954 free (loop->entry_edges);
8955 if (loop->exit_edges)
8956 free (loop->exit_edges);
8957 if (loop->exits_doms)
8958 sbitmap_free (loop->exits_doms);
8959 }
8960 free (loops->array);
8961 loops->array = NULL;
8962
8963 if (loops->cfg.dom)
8964 sbitmap_vector_free (loops->cfg.dom);
8965 if (loops->cfg.dfs_order)
8966 free (loops->cfg.dfs_order);
8967
8968 if (loops->shared_headers)
8969 sbitmap_free (loops->shared_headers);
8970 }
8971 }
8972
8973
8974 /* Find the entry edges into the loop with header HEADER and nodes
8975 NODES and store in ENTRY_EDGES array. Return the number of entry
8976 edges from the loop. */
8977
8978 static int
8979 flow_loop_entry_edges_find (header, nodes, entry_edges)
8980 basic_block header;
8981 const sbitmap nodes;
8982 edge **entry_edges;
8983 {
8984 edge e;
8985 int num_entries;
8986
8987 *entry_edges = NULL;
8988
8989 num_entries = 0;
8990 for (e = header->pred; e; e = e->pred_next)
8991 {
8992 basic_block src = e->src;
8993
8994 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8995 num_entries++;
8996 }
8997
8998 if (! num_entries)
8999 abort ();
9000
9001 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
9002
9003 num_entries = 0;
9004 for (e = header->pred; e; e = e->pred_next)
9005 {
9006 basic_block src = e->src;
9007
9008 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
9009 (*entry_edges)[num_entries++] = e;
9010 }
9011
9012 return num_entries;
9013 }
9014
9015
9016 /* Find the exit edges from the loop using the bitmap of loop nodes
9017 NODES and store in EXIT_EDGES array. Return the number of
9018 exit edges from the loop. */
9019
9020 static int
9021 flow_loop_exit_edges_find (nodes, exit_edges)
9022 const sbitmap nodes;
9023 edge **exit_edges;
9024 {
9025 edge e;
9026 int node;
9027 int num_exits;
9028
9029 *exit_edges = NULL;
9030
9031 /* Check all nodes within the loop to see if there are any
9032 successors not in the loop. Note that a node may have multiple
9033 exiting edges ????? A node can have one jumping edge and one fallthru
9034 edge so only one of these can exit the loop. */
9035 num_exits = 0;
9036 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9037 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9038 {
9039 basic_block dest = e->dest;
9040
9041 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9042 num_exits++;
9043 }
9044 });
9045
9046 if (! num_exits)
9047 return 0;
9048
9049 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
9050
9051 /* Store all exiting edges into an array. */
9052 num_exits = 0;
9053 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
9054 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
9055 {
9056 basic_block dest = e->dest;
9057
9058 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
9059 (*exit_edges)[num_exits++] = e;
9060 }
9061 });
9062
9063 return num_exits;
9064 }
9065
9066
9067 /* Find the nodes contained within the loop with header HEADER and
9068 latch LATCH and store in NODES. Return the number of nodes within
9069 the loop. */
9070
9071 static int
9072 flow_loop_nodes_find (header, latch, nodes)
9073 basic_block header;
9074 basic_block latch;
9075 sbitmap nodes;
9076 {
9077 basic_block *stack;
9078 int sp;
9079 int num_nodes = 0;
9080
9081 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
9082 sp = 0;
9083
9084 /* Start with only the loop header in the set of loop nodes. */
9085 sbitmap_zero (nodes);
9086 SET_BIT (nodes, header->index);
9087 num_nodes++;
9088 header->loop_depth++;
9089
9090 /* Push the loop latch on to the stack. */
9091 if (! TEST_BIT (nodes, latch->index))
9092 {
9093 SET_BIT (nodes, latch->index);
9094 latch->loop_depth++;
9095 num_nodes++;
9096 stack[sp++] = latch;
9097 }
9098
9099 while (sp)
9100 {
9101 basic_block node;
9102 edge e;
9103
9104 node = stack[--sp];
9105 for (e = node->pred; e; e = e->pred_next)
9106 {
9107 basic_block ancestor = e->src;
9108
9109 /* If each ancestor not marked as part of loop, add to set of
9110 loop nodes and push on to stack. */
9111 if (ancestor != ENTRY_BLOCK_PTR
9112 && ! TEST_BIT (nodes, ancestor->index))
9113 {
9114 SET_BIT (nodes, ancestor->index);
9115 ancestor->loop_depth++;
9116 num_nodes++;
9117 stack[sp++] = ancestor;
9118 }
9119 }
9120 }
9121 free (stack);
9122 return num_nodes;
9123 }
9124
9125 /* Compute the depth first search order and store in the array
9126 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
9127 RC_ORDER is non-zero, return the reverse completion number for each
9128 node. Returns the number of nodes visited. A depth first search
9129 tries to get as far away from the starting point as quickly as
9130 possible. */
9131
9132 int
9133 flow_depth_first_order_compute (dfs_order, rc_order)
9134 int *dfs_order;
9135 int *rc_order;
9136 {
9137 edge *stack;
9138 int sp;
9139 int dfsnum = 0;
9140 int rcnum = n_basic_blocks - 1;
9141 sbitmap visited;
9142
9143 /* Allocate stack for back-tracking up CFG. */
9144 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
9145 sp = 0;
9146
9147 /* Allocate bitmap to track nodes that have been visited. */
9148 visited = sbitmap_alloc (n_basic_blocks);
9149
9150 /* None of the nodes in the CFG have been visited yet. */
9151 sbitmap_zero (visited);
9152
9153 /* Push the first edge on to the stack. */
9154 stack[sp++] = ENTRY_BLOCK_PTR->succ;
9155
9156 while (sp)
9157 {
9158 edge e;
9159 basic_block src;
9160 basic_block dest;
9161
9162 /* Look at the edge on the top of the stack. */
9163 e = stack[sp - 1];
9164 src = e->src;
9165 dest = e->dest;
9166
9167 /* Check if the edge destination has been visited yet. */
9168 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9169 {
9170 /* Mark that we have visited the destination. */
9171 SET_BIT (visited, dest->index);
9172
9173 if (dfs_order)
9174 dfs_order[dfsnum++] = dest->index;
9175
9176 if (dest->succ)
9177 {
9178 /* Since the DEST node has been visited for the first
9179 time, check its successors. */
9180 stack[sp++] = dest->succ;
9181 }
9182 else
9183 {
9184 /* There are no successors for the DEST node so assign
9185 its reverse completion number. */
9186 if (rc_order)
9187 rc_order[rcnum--] = dest->index;
9188 }
9189 }
9190 else
9191 {
9192 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9193 {
9194 /* There are no more successors for the SRC node
9195 so assign its reverse completion number. */
9196 if (rc_order)
9197 rc_order[rcnum--] = src->index;
9198 }
9199
9200 if (e->succ_next)
9201 stack[sp - 1] = e->succ_next;
9202 else
9203 sp--;
9204 }
9205 }
9206
9207 free (stack);
9208 sbitmap_free (visited);
9209
9210 /* The number of nodes visited should not be greater than
9211 n_basic_blocks. */
9212 if (dfsnum > n_basic_blocks)
9213 abort ();
9214
9215 /* There are some nodes left in the CFG that are unreachable. */
9216 if (dfsnum < n_basic_blocks)
9217 abort ();
9218 return dfsnum;
9219 }
9220
9221 /* Compute the depth first search order on the _reverse_ graph and
9222 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9223 Returns the number of nodes visited.
9224
9225 The computation is split into three pieces:
9226
9227 flow_dfs_compute_reverse_init () creates the necessary data
9228 structures.
9229
9230 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9231 structures. The block will start the search.
9232
9233 flow_dfs_compute_reverse_execute () continues (or starts) the
9234 search using the block on the top of the stack, stopping when the
9235 stack is empty.
9236
9237 flow_dfs_compute_reverse_finish () destroys the necessary data
9238 structures.
9239
9240 Thus, the user will probably call ..._init(), call ..._add_bb() to
9241 add a beginning basic block to the stack, call ..._execute(),
9242 possibly add another bb to the stack and again call ..._execute(),
9243 ..., and finally call _finish(). */
9244
9245 /* Initialize the data structures used for depth-first search on the
9246 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9247 added to the basic block stack. DATA is the current depth-first
9248 search context. If INITIALIZE_STACK is non-zero, there is an
9249 element on the stack. */
9250
9251 static void
9252 flow_dfs_compute_reverse_init (data)
9253 depth_first_search_ds data;
9254 {
9255 /* Allocate stack for back-tracking up CFG. */
9256 data->stack =
9257 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9258 * sizeof (basic_block));
9259 data->sp = 0;
9260
9261 /* Allocate bitmap to track nodes that have been visited. */
9262 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9263
9264 /* None of the nodes in the CFG have been visited yet. */
9265 sbitmap_zero (data->visited_blocks);
9266
9267 return;
9268 }
9269
9270 /* Add the specified basic block to the top of the dfs data
9271 structures. When the search continues, it will start at the
9272 block. */
9273
9274 static void
9275 flow_dfs_compute_reverse_add_bb (data, bb)
9276 depth_first_search_ds data;
9277 basic_block bb;
9278 {
9279 data->stack[data->sp++] = bb;
9280 return;
9281 }
9282
9283 /* Continue the depth-first search through the reverse graph starting
9284 with the block at the stack's top and ending when the stack is
9285 empty. Visited nodes are marked. Returns an unvisited basic
9286 block, or NULL if there is none available. */
9287
9288 static basic_block
9289 flow_dfs_compute_reverse_execute (data)
9290 depth_first_search_ds data;
9291 {
9292 basic_block bb;
9293 edge e;
9294 int i;
9295
9296 while (data->sp > 0)
9297 {
9298 bb = data->stack[--data->sp];
9299
9300 /* Mark that we have visited this node. */
9301 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9302 {
9303 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9304
9305 /* Perform depth-first search on adjacent vertices. */
9306 for (e = bb->pred; e; e = e->pred_next)
9307 flow_dfs_compute_reverse_add_bb (data, e->src);
9308 }
9309 }
9310
9311 /* Determine if there are unvisited basic blocks. */
9312 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9313 if (!TEST_BIT (data->visited_blocks, i))
9314 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9315 return NULL;
9316 }
9317
9318 /* Destroy the data structures needed for depth-first search on the
9319 reverse graph. */
9320
9321 static void
9322 flow_dfs_compute_reverse_finish (data)
9323 depth_first_search_ds data;
9324 {
9325 free (data->stack);
9326 sbitmap_free (data->visited_blocks);
9327 return;
9328 }
9329
9330
9331 /* Find the root node of the loop pre-header extended basic block and
9332 the edges along the trace from the root node to the loop header. */
9333
9334 static void
9335 flow_loop_pre_header_scan (loop)
9336 struct loop *loop;
9337 {
9338 int num = 0;
9339 basic_block ebb;
9340
9341 loop->num_pre_header_edges = 0;
9342
9343 if (loop->num_entries != 1)
9344 return;
9345
9346 ebb = loop->entry_edges[0]->src;
9347
9348 if (ebb != ENTRY_BLOCK_PTR)
9349 {
9350 edge e;
9351
9352 /* Count number of edges along trace from loop header to
9353 root of pre-header extended basic block. Usually this is
9354 only one or two edges. */
9355 num++;
9356 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9357 {
9358 ebb = ebb->pred->src;
9359 num++;
9360 }
9361
9362 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9363 loop->num_pre_header_edges = num;
9364
9365 /* Store edges in order that they are followed. The source
9366 of the first edge is the root node of the pre-header extended
9367 basic block and the destination of the last last edge is
9368 the loop header. */
9369 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9370 {
9371 loop->pre_header_edges[--num] = e;
9372 }
9373 }
9374 }
9375
9376
9377 /* Return the block for the pre-header of the loop with header
9378 HEADER where DOM specifies the dominator information. Return NULL if
9379 there is no pre-header. */
9380
9381 static basic_block
9382 flow_loop_pre_header_find (header, dom)
9383 basic_block header;
9384 const sbitmap *dom;
9385 {
9386 basic_block pre_header;
9387 edge e;
9388
9389 /* If block p is a predecessor of the header and is the only block
9390 that the header does not dominate, then it is the pre-header. */
9391 pre_header = NULL;
9392 for (e = header->pred; e; e = e->pred_next)
9393 {
9394 basic_block node = e->src;
9395
9396 if (node != ENTRY_BLOCK_PTR
9397 && ! TEST_BIT (dom[node->index], header->index))
9398 {
9399 if (pre_header == NULL)
9400 pre_header = node;
9401 else
9402 {
9403 /* There are multiple edges into the header from outside
9404 the loop so there is no pre-header block. */
9405 pre_header = NULL;
9406 break;
9407 }
9408 }
9409 }
9410 return pre_header;
9411 }
9412
9413 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9414 previously added. The insertion algorithm assumes that the loops
9415 are added in the order found by a depth first search of the CFG. */
9416
9417 static void
9418 flow_loop_tree_node_add (prevloop, loop)
9419 struct loop *prevloop;
9420 struct loop *loop;
9421 {
9422
9423 if (flow_loop_nested_p (prevloop, loop))
9424 {
9425 prevloop->inner = loop;
9426 loop->outer = prevloop;
9427 return;
9428 }
9429
9430 while (prevloop->outer)
9431 {
9432 if (flow_loop_nested_p (prevloop->outer, loop))
9433 {
9434 prevloop->next = loop;
9435 loop->outer = prevloop->outer;
9436 return;
9437 }
9438 prevloop = prevloop->outer;
9439 }
9440
9441 prevloop->next = loop;
9442 loop->outer = NULL;
9443 }
9444
9445 /* Build the loop hierarchy tree for LOOPS. */
9446
9447 static void
9448 flow_loops_tree_build (loops)
9449 struct loops *loops;
9450 {
9451 int i;
9452 int num_loops;
9453
9454 num_loops = loops->num;
9455 if (! num_loops)
9456 return;
9457
9458 /* Root the loop hierarchy tree with the first loop found.
9459 Since we used a depth first search this should be the
9460 outermost loop. */
9461 loops->tree_root = &loops->array[0];
9462 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9463
9464 /* Add the remaining loops to the tree. */
9465 for (i = 1; i < num_loops; i++)
9466 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9467 }
9468
9469 /* Helper function to compute loop nesting depth and enclosed loop level
9470 for the natural loop specified by LOOP at the loop depth DEPTH.
9471 Returns the loop level. */
9472
9473 static int
9474 flow_loop_level_compute (loop, depth)
9475 struct loop *loop;
9476 int depth;
9477 {
9478 struct loop *inner;
9479 int level = 1;
9480
9481 if (! loop)
9482 return 0;
9483
9484 /* Traverse loop tree assigning depth and computing level as the
9485 maximum level of all the inner loops of this loop. The loop
9486 level is equivalent to the height of the loop in the loop tree
9487 and corresponds to the number of enclosed loop levels (including
9488 itself). */
9489 for (inner = loop->inner; inner; inner = inner->next)
9490 {
9491 int ilevel;
9492
9493 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9494
9495 if (ilevel > level)
9496 level = ilevel;
9497 }
9498 loop->level = level;
9499 loop->depth = depth;
9500 return level;
9501 }
9502
9503 /* Compute the loop nesting depth and enclosed loop level for the loop
9504 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9505 level. */
9506
9507 static int
9508 flow_loops_level_compute (loops)
9509 struct loops *loops;
9510 {
9511 struct loop *loop;
9512 int level;
9513 int levels = 0;
9514
9515 /* Traverse all the outer level loops. */
9516 for (loop = loops->tree_root; loop; loop = loop->next)
9517 {
9518 level = flow_loop_level_compute (loop, 1);
9519 if (level > levels)
9520 levels = level;
9521 }
9522 return levels;
9523 }
9524
9525
9526 /* Scan a single natural loop specified by LOOP collecting information
9527 about it specified by FLAGS. */
9528
9529 int
9530 flow_loop_scan (loops, loop, flags)
9531 struct loops *loops;
9532 struct loop *loop;
9533 int flags;
9534 {
9535 /* Determine prerequisites. */
9536 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9537 flags |= LOOP_EXIT_EDGES;
9538
9539 if (flags & LOOP_ENTRY_EDGES)
9540 {
9541 /* Find edges which enter the loop header.
9542 Note that the entry edges should only
9543 enter the header of a natural loop. */
9544 loop->num_entries
9545 = flow_loop_entry_edges_find (loop->header,
9546 loop->nodes,
9547 &loop->entry_edges);
9548 }
9549
9550 if (flags & LOOP_EXIT_EDGES)
9551 {
9552 /* Find edges which exit the loop. */
9553 loop->num_exits
9554 = flow_loop_exit_edges_find (loop->nodes,
9555 &loop->exit_edges);
9556 }
9557
9558 if (flags & LOOP_EXITS_DOMS)
9559 {
9560 int j;
9561
9562 /* Determine which loop nodes dominate all the exits
9563 of the loop. */
9564 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9565 sbitmap_copy (loop->exits_doms, loop->nodes);
9566 for (j = 0; j < loop->num_exits; j++)
9567 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9568 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9569
9570 /* The header of a natural loop must dominate
9571 all exits. */
9572 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9573 abort ();
9574 }
9575
9576 if (flags & LOOP_PRE_HEADER)
9577 {
9578 /* Look to see if the loop has a pre-header node. */
9579 loop->pre_header
9580 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9581
9582 /* Find the blocks within the extended basic block of
9583 the loop pre-header. */
9584 flow_loop_pre_header_scan (loop);
9585 }
9586 return 1;
9587 }
9588
9589
9590 /* Find all the natural loops in the function and save in LOOPS structure
9591 and recalculate loop_depth information in basic block structures.
9592 FLAGS controls which loop information is collected.
9593 Return the number of natural loops found. */
9594
9595 int
9596 flow_loops_find (loops, flags)
9597 struct loops *loops;
9598 int flags;
9599 {
9600 int i;
9601 int b;
9602 int num_loops;
9603 edge e;
9604 sbitmap headers;
9605 sbitmap *dom;
9606 int *dfs_order;
9607 int *rc_order;
9608
9609 /* This function cannot be repeatedly called with different
9610 flags to build up the loop information. The loop tree
9611 must always be built if this function is called. */
9612 if (! (flags & LOOP_TREE))
9613 abort ();
9614
9615 memset (loops, 0, sizeof (*loops));
9616
9617 /* Taking care of this degenerate case makes the rest of
9618 this code simpler. */
9619 if (n_basic_blocks == 0)
9620 return 0;
9621
9622 dfs_order = NULL;
9623 rc_order = NULL;
9624
9625 /* Compute the dominators. */
9626 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9627 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9628
9629 /* Count the number of loop edges (back edges). This should be the
9630 same as the number of natural loops. */
9631
9632 num_loops = 0;
9633 for (b = 0; b < n_basic_blocks; b++)
9634 {
9635 basic_block header;
9636
9637 header = BASIC_BLOCK (b);
9638 header->loop_depth = 0;
9639
9640 for (e = header->pred; e; e = e->pred_next)
9641 {
9642 basic_block latch = e->src;
9643
9644 /* Look for back edges where a predecessor is dominated
9645 by this block. A natural loop has a single entry
9646 node (header) that dominates all the nodes in the
9647 loop. It also has single back edge to the header
9648 from a latch node. Note that multiple natural loops
9649 may share the same header. */
9650 if (b != header->index)
9651 abort ();
9652
9653 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9654 num_loops++;
9655 }
9656 }
9657
9658 if (num_loops)
9659 {
9660 /* Compute depth first search order of the CFG so that outer
9661 natural loops will be found before inner natural loops. */
9662 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9663 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9664 flow_depth_first_order_compute (dfs_order, rc_order);
9665
9666 /* Save CFG derived information to avoid recomputing it. */
9667 loops->cfg.dom = dom;
9668 loops->cfg.dfs_order = dfs_order;
9669 loops->cfg.rc_order = rc_order;
9670
9671 /* Allocate loop structures. */
9672 loops->array
9673 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9674
9675 headers = sbitmap_alloc (n_basic_blocks);
9676 sbitmap_zero (headers);
9677
9678 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9679 sbitmap_zero (loops->shared_headers);
9680
9681 /* Find and record information about all the natural loops
9682 in the CFG. */
9683 num_loops = 0;
9684 for (b = 0; b < n_basic_blocks; b++)
9685 {
9686 basic_block header;
9687
9688 /* Search the nodes of the CFG in reverse completion order
9689 so that we can find outer loops first. */
9690 header = BASIC_BLOCK (rc_order[b]);
9691
9692 /* Look for all the possible latch blocks for this header. */
9693 for (e = header->pred; e; e = e->pred_next)
9694 {
9695 basic_block latch = e->src;
9696
9697 /* Look for back edges where a predecessor is dominated
9698 by this block. A natural loop has a single entry
9699 node (header) that dominates all the nodes in the
9700 loop. It also has single back edge to the header
9701 from a latch node. Note that multiple natural loops
9702 may share the same header. */
9703 if (latch != ENTRY_BLOCK_PTR
9704 && TEST_BIT (dom[latch->index], header->index))
9705 {
9706 struct loop *loop;
9707
9708 loop = loops->array + num_loops;
9709
9710 loop->header = header;
9711 loop->latch = latch;
9712 loop->num = num_loops;
9713
9714 num_loops++;
9715 }
9716 }
9717 }
9718
9719 for (i = 0; i < num_loops; i++)
9720 {
9721 struct loop *loop = &loops->array[i];
9722
9723 /* Keep track of blocks that are loop headers so
9724 that we can tell which loops should be merged. */
9725 if (TEST_BIT (headers, loop->header->index))
9726 SET_BIT (loops->shared_headers, loop->header->index);
9727 SET_BIT (headers, loop->header->index);
9728
9729 /* Find nodes contained within the loop. */
9730 loop->nodes = sbitmap_alloc (n_basic_blocks);
9731 loop->num_nodes
9732 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
9733
9734 /* Compute first and last blocks within the loop.
9735 These are often the same as the loop header and
9736 loop latch respectively, but this is not always
9737 the case. */
9738 loop->first
9739 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
9740 loop->last
9741 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
9742
9743 flow_loop_scan (loops, loop, flags);
9744 }
9745
9746 /* Natural loops with shared headers may either be disjoint or
9747 nested. Disjoint loops with shared headers cannot be inner
9748 loops and should be merged. For now just mark loops that share
9749 headers. */
9750 for (i = 0; i < num_loops; i++)
9751 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
9752 loops->array[i].shared = 1;
9753
9754 sbitmap_free (headers);
9755 }
9756 else
9757 {
9758 sbitmap_vector_free (dom);
9759 }
9760
9761 loops->num = num_loops;
9762
9763 /* Build the loop hierarchy tree. */
9764 flow_loops_tree_build (loops);
9765
9766 /* Assign the loop nesting depth and enclosed loop level for each
9767 loop. */
9768 loops->levels = flow_loops_level_compute (loops);
9769
9770 return num_loops;
9771 }
9772
9773
9774 /* Update the information regarding the loops in the CFG
9775 specified by LOOPS. */
9776 int
9777 flow_loops_update (loops, flags)
9778 struct loops *loops;
9779 int flags;
9780 {
9781 /* One day we may want to update the current loop data. For now
9782 throw away the old stuff and rebuild what we need. */
9783 if (loops->array)
9784 flow_loops_free (loops);
9785
9786 return flow_loops_find (loops, flags);
9787 }
9788
9789
9790 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
9791
9792 int
9793 flow_loop_outside_edge_p (loop, e)
9794 const struct loop *loop;
9795 edge e;
9796 {
9797 if (e->dest != loop->header)
9798 abort ();
9799 return (e->src == ENTRY_BLOCK_PTR)
9800 || ! TEST_BIT (loop->nodes, e->src->index);
9801 }
9802
9803 /* Clear LOG_LINKS fields of insns in a chain.
9804 Also clear the global_live_at_{start,end} fields of the basic block
9805 structures. */
9806
9807 void
9808 clear_log_links (insns)
9809 rtx insns;
9810 {
9811 rtx i;
9812 int b;
9813
9814 for (i = insns; i; i = NEXT_INSN (i))
9815 if (INSN_P (i))
9816 LOG_LINKS (i) = 0;
9817
9818 for (b = 0; b < n_basic_blocks; b++)
9819 {
9820 basic_block bb = BASIC_BLOCK (b);
9821
9822 bb->global_live_at_start = NULL;
9823 bb->global_live_at_end = NULL;
9824 }
9825
9826 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
9827 EXIT_BLOCK_PTR->global_live_at_start = NULL;
9828 }
9829
9830 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
9831 correspond to the hard registers, if any, set in that map. This
9832 could be done far more efficiently by having all sorts of special-cases
9833 with moving single words, but probably isn't worth the trouble. */
9834
9835 void
9836 reg_set_to_hard_reg_set (to, from)
9837 HARD_REG_SET *to;
9838 bitmap from;
9839 {
9840 int i;
9841
9842 EXECUTE_IF_SET_IN_BITMAP
9843 (from, 0, i,
9844 {
9845 if (i >= FIRST_PSEUDO_REGISTER)
9846 return;
9847 SET_HARD_REG_BIT (*to, i);
9848 });
9849 }
9850
9851 /* Called once at intialization time. */
9852
9853 void
9854 init_flow ()
9855 {
9856 static int initialized;
9857
9858 if (!initialized)
9859 {
9860 gcc_obstack_init (&flow_obstack);
9861 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9862 initialized = 1;
9863 }
9864 else
9865 {
9866 obstack_free (&flow_obstack, flow_firstobj);
9867 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9868 }
9869 }
9870
9871 /* Assume that the preceeding pass has possibly eliminated jump instructions
9872 or converted the unconditional jumps. Eliminate the edges from CFG. */
9873
9874 void
9875 purge_dead_edges (bb)
9876 basic_block bb;
9877 {
9878 edge e, next;
9879 rtx insn = bb->end;
9880 if (GET_CODE (insn) == JUMP_INSN && !simplejump_p (insn))
9881 return;
9882 if (GET_CODE (insn) == JUMP_INSN)
9883 {
9884 int removed = 0;
9885 rtx note;
9886 edge b,f;
9887 /* We do care only about conditional jumps and simplejumps. */
9888 if (!any_condjump_p (insn)
9889 && !returnjump_p (insn)
9890 && !simplejump_p (insn))
9891 return;
9892 for (e = bb->succ; e; e = next)
9893 {
9894 next = e->succ_next;
9895
9896 /* Check purposes we can have edge. */
9897 if ((e->flags & EDGE_FALLTHRU)
9898 && any_condjump_p (insn))
9899 continue;
9900 if (e->dest != EXIT_BLOCK_PTR
9901 && e->dest->head == JUMP_LABEL (insn))
9902 continue;
9903 if (e->dest == EXIT_BLOCK_PTR
9904 && returnjump_p (insn))
9905 continue;
9906 removed = 1;
9907 remove_edge (e);
9908 }
9909 if (!bb->succ || !removed)
9910 return;
9911 if (rtl_dump_file)
9912 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
9913 if (!optimize)
9914 return;
9915
9916 /* Redistribute probabilities. */
9917 if (!bb->succ->succ_next)
9918 {
9919 bb->succ->probability = REG_BR_PROB_BASE;
9920 bb->succ->count = bb->count;
9921 }
9922 else
9923 {
9924 note = find_reg_note (insn, REG_BR_PROB, NULL);
9925 if (!note)
9926 return;
9927 b = BRANCH_EDGE (bb);
9928 f = FALLTHRU_EDGE (bb);
9929 b->probability = INTVAL (XEXP (note, 0));
9930 f->probability = REG_BR_PROB_BASE - b->probability;
9931 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
9932 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
9933 }
9934 return;
9935 }
9936 /* If we don't see a jump insn, we don't know exactly why the block would
9937 have been broken at this point. Look for a simple, non-fallthru edge,
9938 as these are only created by conditional branches. If we find such an
9939 edge we know that there used to be a jump here and can then safely
9940 remove all non-fallthru edges. */
9941 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
9942 e = e->succ_next);
9943 if (!e)
9944 return;
9945 for (e = bb->succ; e; e = next)
9946 {
9947 next = e->succ_next;
9948 if (!(e->flags & EDGE_FALLTHRU))
9949 remove_edge (e);
9950 }
9951 if (!bb->succ || bb->succ->succ_next)
9952 abort ();
9953 bb->succ->probability = REG_BR_PROB_BASE;
9954 bb->succ->count = bb->count;
9955
9956 if (rtl_dump_file)
9957 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
9958 bb->index);
9959 return;
9960 }
9961
9962 /* Search all basic blocks for potentionally dead edges and purge them. */
9963
9964 void
9965 purge_all_dead_edges ()
9966 {
9967 int i;
9968 for (i = 0; i < n_basic_blocks; i++)
9969 purge_dead_edges (BASIC_BLOCK (i));
9970 }