16f008c8c3cdb44b2cd9532813791e3e4828472b
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
26
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
30
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
34
35 ** find_basic_blocks **
36
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
41
42 find_basic_blocks also finds any unreachable loops and deletes them.
43
44 ** life_analysis **
45
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
49
50 ** live-register info **
51
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
59
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
66
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
77
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
84
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
88
89 ** Other actions of life_analysis **
90
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
93
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
96
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
102
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
105
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
112
113 /* TODO:
114
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
120 */
121 \f
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
139
140 #include "obstack.h"
141 #include "splay-tree.h"
142
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
145
146
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
153 #endif
154
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
157 #endif
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
160 #endif
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
163 #endif
164
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
169
170 extern struct obstack *function_obstack;
171
172 /* Number of basic blocks in the current function. */
173
174 int n_basic_blocks;
175
176 /* Number of edges in the current function. */
177
178 int n_edges;
179
180 /* The basic block array. */
181
182 varray_type basic_block_info;
183
184 /* The special entry and exit blocks. */
185
186 struct basic_block_def entry_exit_blocks[2]
187 = {{NULL, /* head */
188 NULL, /* end */
189 NULL, /* pred */
190 NULL, /* succ */
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
194 NULL, /* aux */
195 ENTRY_BLOCK, /* index */
196 0, /* loop_depth */
197 -1, -1 /* eh_beg, eh_end */
198 },
199 {
200 NULL, /* head */
201 NULL, /* end */
202 NULL, /* pred */
203 NULL, /* succ */
204 NULL, /* local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
207 NULL, /* aux */
208 EXIT_BLOCK, /* index */
209 0, /* loop_depth */
210 -1, -1 /* eh_beg, eh_end */
211 }
212 };
213
214 /* Nonzero if the second flow pass has completed. */
215 int flow2_completed;
216
217 /* Maximum register number used in this function, plus one. */
218
219 int max_regno;
220
221 /* Indexed by n, giving various register information */
222
223 varray_type reg_n_info;
224
225 /* Size of a regset for the current function,
226 in (1) bytes and (2) elements. */
227
228 int regset_bytes;
229 int regset_size;
230
231 /* Regset of regs live when calls to `setjmp'-like functions happen. */
232 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233
234 regset regs_live_at_setjmp;
235
236 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
237 that have to go in the same hard reg.
238 The first two regs in the list are a pair, and the next two
239 are another pair, etc. */
240 rtx regs_may_share;
241
242 /* Set of registers that may be eliminable. These are handled specially
243 in updating regs_ever_live. */
244
245 static HARD_REG_SET elim_reg_set;
246
247 /* The basic block structure for every insn, indexed by uid. */
248
249 varray_type basic_block_for_insn;
250
251 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
252 /* ??? Should probably be using LABEL_NUSES instead. It would take a
253 bit of surgery to be able to use or co-opt the routines in jump. */
254
255 static rtx label_value_list;
256
257 /* Holds information for tracking conditional register life information. */
258 struct reg_cond_life_info
259 {
260 /* An EXPR_LIST of conditions under which a register is dead. */
261 rtx condition;
262
263 /* ??? Could store mask of bytes that are dead, so that we could finally
264 track lifetimes of multi-word registers accessed via subregs. */
265 };
266
267 /* For use in communicating between propagate_block and its subroutines.
268 Holds all information needed to compute life and def-use information. */
269
270 struct propagate_block_info
271 {
272 /* The basic block we're considering. */
273 basic_block bb;
274
275 /* Bit N is set if register N is conditionally or unconditionally live. */
276 regset reg_live;
277
278 /* Bit N is set if register N is set this insn. */
279 regset new_set;
280
281 /* Element N is the next insn that uses (hard or pseudo) register N
282 within the current basic block; or zero, if there is no such insn. */
283 rtx *reg_next_use;
284
285 /* Contains a list of all the MEMs we are tracking for dead store
286 elimination. */
287 rtx mem_set_list;
288
289 /* If non-null, record the set of registers set in the basic block. */
290 regset local_set;
291
292 #ifdef HAVE_conditional_execution
293 /* Indexed by register number, holds a reg_cond_life_info for each
294 register that is not unconditionally live or dead. */
295 splay_tree reg_cond_dead;
296
297 /* Bit N is set if register N is in an expression in reg_cond_dead. */
298 regset reg_cond_reg;
299 #endif
300
301 /* Non-zero if the value of CC0 is live. */
302 int cc0_live;
303
304 /* Flags controling the set of information propagate_block collects. */
305 int flags;
306 };
307
308 /* Forward declarations */
309 static int count_basic_blocks PARAMS ((rtx));
310 static rtx find_basic_blocks_1 PARAMS ((rtx));
311 static void clear_edges PARAMS ((void));
312 static void make_edges PARAMS ((rtx));
313 static void make_label_edge PARAMS ((sbitmap *, basic_block,
314 rtx, int));
315 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
316 basic_block, rtx, int));
317 static void mark_critical_edges PARAMS ((void));
318 static void move_stray_eh_region_notes PARAMS ((void));
319 static void record_active_eh_regions PARAMS ((rtx));
320
321 static void commit_one_edge_insertion PARAMS ((edge));
322
323 static void delete_unreachable_blocks PARAMS ((void));
324 static void delete_eh_regions PARAMS ((void));
325 static int can_delete_note_p PARAMS ((rtx));
326 static void expunge_block PARAMS ((basic_block));
327 static int can_delete_label_p PARAMS ((rtx));
328 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
329 basic_block));
330 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
331 basic_block));
332 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
333 static void try_merge_blocks PARAMS ((void));
334 static void tidy_fallthru_edges PARAMS ((void));
335 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
336 static void verify_wide_reg PARAMS ((int, rtx, rtx));
337 static void verify_local_live_at_start PARAMS ((regset, basic_block));
338 static int set_noop_p PARAMS ((rtx));
339 static int noop_move_p PARAMS ((rtx));
340 static void delete_noop_moves PARAMS ((rtx));
341 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
342 static void notice_stack_pointer_modification PARAMS ((rtx));
343 static void mark_reg PARAMS ((rtx, void *));
344 static void mark_regs_live_at_end PARAMS ((regset));
345 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
346 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
347 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
348 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
349 static int insn_dead_p PARAMS ((struct propagate_block_info *,
350 rtx, int, rtx));
351 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
352 rtx, rtx, rtx));
353 static void mark_set_regs PARAMS ((struct propagate_block_info *,
354 rtx, rtx));
355 static void mark_set_1 PARAMS ((struct propagate_block_info *,
356 enum rtx_code, rtx, rtx,
357 rtx, int));
358 #ifdef HAVE_conditional_execution
359 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
360 int, rtx));
361 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
362 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
363 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
364 int));
365 static rtx ior_reg_cond PARAMS ((rtx, rtx));
366 static rtx not_reg_cond PARAMS ((rtx));
367 static rtx nand_reg_cond PARAMS ((rtx, rtx));
368 #endif
369 #ifdef AUTO_INC_DEC
370 static void find_auto_inc PARAMS ((struct propagate_block_info *,
371 rtx, rtx));
372 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
373 rtx));
374 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
375 #endif
376 static void mark_used_reg PARAMS ((struct propagate_block_info *,
377 rtx, rtx, rtx));
378 static void mark_used_regs PARAMS ((struct propagate_block_info *,
379 rtx, rtx, rtx));
380 void dump_flow_info PARAMS ((FILE *));
381 void debug_flow_info PARAMS ((void));
382 static void dump_edge_info PARAMS ((FILE *, edge, int));
383
384 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
385 rtx));
386 static void remove_fake_successors PARAMS ((basic_block));
387 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
388 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
389 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
390 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
391 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
392 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
393 static int flow_depth_first_order_compute PARAMS ((int *));
394 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
395 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
396 static void flow_loops_tree_build PARAMS ((struct loops *));
397 static int flow_loop_level_compute PARAMS ((struct loop *, int));
398 static int flow_loops_level_compute PARAMS ((struct loops *));
399 \f
400 /* Find basic blocks of the current function.
401 F is the first insn of the function and NREGS the number of register
402 numbers in use. */
403
404 void
405 find_basic_blocks (f, nregs, file)
406 rtx f;
407 int nregs ATTRIBUTE_UNUSED;
408 FILE *file ATTRIBUTE_UNUSED;
409 {
410 int max_uid;
411
412 /* Flush out existing data. */
413 if (basic_block_info != NULL)
414 {
415 int i;
416
417 clear_edges ();
418
419 /* Clear bb->aux on all extant basic blocks. We'll use this as a
420 tag for reuse during create_basic_block, just in case some pass
421 copies around basic block notes improperly. */
422 for (i = 0; i < n_basic_blocks; ++i)
423 BASIC_BLOCK (i)->aux = NULL;
424
425 VARRAY_FREE (basic_block_info);
426 }
427
428 n_basic_blocks = count_basic_blocks (f);
429
430 /* Size the basic block table. The actual structures will be allocated
431 by find_basic_blocks_1, since we want to keep the structure pointers
432 stable across calls to find_basic_blocks. */
433 /* ??? This whole issue would be much simpler if we called find_basic_blocks
434 exactly once, and thereafter we don't have a single long chain of
435 instructions at all until close to the end of compilation when we
436 actually lay them out. */
437
438 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
439
440 label_value_list = find_basic_blocks_1 (f);
441
442 /* Record the block to which an insn belongs. */
443 /* ??? This should be done another way, by which (perhaps) a label is
444 tagged directly with the basic block that it starts. It is used for
445 more than that currently, but IMO that is the only valid use. */
446
447 max_uid = get_max_uid ();
448 #ifdef AUTO_INC_DEC
449 /* Leave space for insns life_analysis makes in some cases for auto-inc.
450 These cases are rare, so we don't need too much space. */
451 max_uid += max_uid / 10;
452 #endif
453
454 compute_bb_for_insn (max_uid);
455
456 /* Discover the edges of our cfg. */
457 record_active_eh_regions (f);
458 make_edges (label_value_list);
459
460 /* Do very simple cleanup now, for the benefit of code that runs between
461 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
462 tidy_fallthru_edges ();
463
464 mark_critical_edges ();
465
466 #ifdef ENABLE_CHECKING
467 verify_flow_info ();
468 #endif
469 }
470
471 /* Count the basic blocks of the function. */
472
473 static int
474 count_basic_blocks (f)
475 rtx f;
476 {
477 register rtx insn;
478 register RTX_CODE prev_code;
479 register int count = 0;
480 int eh_region = 0;
481 int call_had_abnormal_edge = 0;
482
483 prev_code = JUMP_INSN;
484 for (insn = f; insn; insn = NEXT_INSN (insn))
485 {
486 register RTX_CODE code = GET_CODE (insn);
487
488 if (code == CODE_LABEL
489 || (GET_RTX_CLASS (code) == 'i'
490 && (prev_code == JUMP_INSN
491 || prev_code == BARRIER
492 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
493 count++;
494
495 /* Record whether this call created an edge. */
496 if (code == CALL_INSN)
497 {
498 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
499 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
500
501 call_had_abnormal_edge = 0;
502
503 /* If there is an EH region or rethrow, we have an edge. */
504 if ((eh_region && region > 0)
505 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
506 call_had_abnormal_edge = 1;
507 else if (nonlocal_goto_handler_labels && region >= 0)
508 /* If there is a nonlocal goto label and the specified
509 region number isn't -1, we have an edge. (0 means
510 no throw, but might have a nonlocal goto). */
511 call_had_abnormal_edge = 1;
512 }
513
514 if (code != NOTE)
515 prev_code = code;
516 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
517 ++eh_region;
518 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
519 --eh_region;
520 }
521
522 /* The rest of the compiler works a bit smoother when we don't have to
523 check for the edge case of do-nothing functions with no basic blocks. */
524 if (count == 0)
525 {
526 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
527 count = 1;
528 }
529
530 return count;
531 }
532
533 /* Find all basic blocks of the function whose first insn is F.
534
535 Collect and return a list of labels whose addresses are taken. This
536 will be used in make_edges for use with computed gotos. */
537
538 static rtx
539 find_basic_blocks_1 (f)
540 rtx f;
541 {
542 register rtx insn, next;
543 int i = 0;
544 rtx bb_note = NULL_RTX;
545 rtx eh_list = NULL_RTX;
546 rtx label_value_list = NULL_RTX;
547 rtx head = NULL_RTX;
548 rtx end = NULL_RTX;
549
550 /* We process the instructions in a slightly different way than we did
551 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
552 closed out the previous block, so that it gets attached at the proper
553 place. Since this form should be equivalent to the previous,
554 count_basic_blocks continues to use the old form as a check. */
555
556 for (insn = f; insn; insn = next)
557 {
558 enum rtx_code code = GET_CODE (insn);
559
560 next = NEXT_INSN (insn);
561
562 switch (code)
563 {
564 case NOTE:
565 {
566 int kind = NOTE_LINE_NUMBER (insn);
567
568 /* Keep a LIFO list of the currently active exception notes. */
569 if (kind == NOTE_INSN_EH_REGION_BEG)
570 eh_list = alloc_INSN_LIST (insn, eh_list);
571 else if (kind == NOTE_INSN_EH_REGION_END)
572 {
573 rtx t = eh_list;
574
575 eh_list = XEXP (eh_list, 1);
576 free_INSN_LIST_node (t);
577 }
578
579 /* Look for basic block notes with which to keep the
580 basic_block_info pointers stable. Unthread the note now;
581 we'll put it back at the right place in create_basic_block.
582 Or not at all if we've already found a note in this block. */
583 else if (kind == NOTE_INSN_BASIC_BLOCK)
584 {
585 if (bb_note == NULL_RTX)
586 bb_note = insn;
587
588 next = flow_delete_insn (insn);
589 }
590 break;
591 }
592
593 case CODE_LABEL:
594 /* A basic block starts at a label. If we've closed one off due
595 to a barrier or some such, no need to do it again. */
596 if (head != NULL_RTX)
597 {
598 /* While we now have edge lists with which other portions of
599 the compiler might determine a call ending a basic block
600 does not imply an abnormal edge, it will be a bit before
601 everything can be updated. So continue to emit a noop at
602 the end of such a block. */
603 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
604 {
605 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606 end = emit_insn_after (nop, end);
607 }
608
609 create_basic_block (i++, head, end, bb_note);
610 bb_note = NULL_RTX;
611 }
612
613 head = end = insn;
614 break;
615
616 case JUMP_INSN:
617 /* A basic block ends at a jump. */
618 if (head == NULL_RTX)
619 head = insn;
620 else
621 {
622 /* ??? Make a special check for table jumps. The way this
623 happens is truly and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
627
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
630
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
633
634 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
636 {
637 head = end = NULL;
638 n_basic_blocks--;
639 break;
640 }
641 }
642 end = insn;
643 goto new_bb_inclusive;
644
645 case BARRIER:
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head == NULL_RTX)
649 break;
650
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
655 block. */
656 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
657 {
658 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659 end = emit_insn_after (nop, end);
660 }
661 goto new_bb_exclusive;
662
663 case CALL_INSN:
664 {
665 /* Record whether this call created an edge. */
666 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
667 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
668 int call_has_abnormal_edge = 0;
669
670 /* If there is an EH region or rethrow, we have an edge. */
671 if ((eh_list && region > 0)
672 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
673 call_has_abnormal_edge = 1;
674 else if (nonlocal_goto_handler_labels && region >= 0)
675 /* If there is a nonlocal goto label and the specified
676 region number isn't -1, we have an edge. (0 means
677 no throw, but might have a nonlocal goto). */
678 call_has_abnormal_edge = 1;
679
680 /* A basic block ends at a call that can either throw or
681 do a non-local goto. */
682 if (call_has_abnormal_edge)
683 {
684 new_bb_inclusive:
685 if (head == NULL_RTX)
686 head = insn;
687 end = insn;
688
689 new_bb_exclusive:
690 create_basic_block (i++, head, end, bb_note);
691 head = end = NULL_RTX;
692 bb_note = NULL_RTX;
693 break;
694 }
695 }
696 /* FALLTHRU */
697
698 default:
699 if (GET_RTX_CLASS (code) == 'i')
700 {
701 if (head == NULL_RTX)
702 head = insn;
703 end = insn;
704 }
705 break;
706 }
707
708 if (GET_RTX_CLASS (code) == 'i')
709 {
710 rtx note;
711
712 /* Make a list of all labels referred to other than by jumps
713 (which just don't have the REG_LABEL notes).
714
715 Make a special exception for labels followed by an ADDR*VEC,
716 as this would be a part of the tablejump setup code.
717
718 Make a special exception for the eh_return_stub_label, which
719 we know isn't part of any otherwise visible control flow. */
720
721 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
722 if (REG_NOTE_KIND (note) == REG_LABEL)
723 {
724 rtx lab = XEXP (note, 0), next;
725
726 if (lab == eh_return_stub_label)
727 ;
728 else if ((next = next_nonnote_insn (lab)) != NULL
729 && GET_CODE (next) == JUMP_INSN
730 && (GET_CODE (PATTERN (next)) == ADDR_VEC
731 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
732 ;
733 else
734 label_value_list
735 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
736 }
737 }
738 }
739
740 if (head != NULL_RTX)
741 create_basic_block (i++, head, end, bb_note);
742
743 if (i != n_basic_blocks)
744 abort ();
745
746 return label_value_list;
747 }
748
749 /* Tidy the CFG by deleting unreachable code and whatnot. */
750
751 void
752 cleanup_cfg (f)
753 rtx f;
754 {
755 delete_unreachable_blocks ();
756 move_stray_eh_region_notes ();
757 record_active_eh_regions (f);
758 try_merge_blocks ();
759 mark_critical_edges ();
760
761 /* Kill the data we won't maintain. */
762 label_value_list = NULL_RTX;
763 }
764
765 /* Create a new basic block consisting of the instructions between
766 HEAD and END inclusive. Reuses the note and basic block struct
767 in BB_NOTE, if any. */
768
769 void
770 create_basic_block (index, head, end, bb_note)
771 int index;
772 rtx head, end, bb_note;
773 {
774 basic_block bb;
775
776 if (bb_note
777 && ! RTX_INTEGRATED_P (bb_note)
778 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
779 && bb->aux == NULL)
780 {
781 /* If we found an existing note, thread it back onto the chain. */
782
783 if (GET_CODE (head) == CODE_LABEL)
784 add_insn_after (bb_note, head);
785 else
786 {
787 add_insn_before (bb_note, head);
788 head = bb_note;
789 }
790 }
791 else
792 {
793 /* Otherwise we must create a note and a basic block structure.
794 Since we allow basic block structs in rtl, give the struct
795 the same lifetime by allocating it off the function obstack
796 rather than using malloc. */
797
798 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
799 memset (bb, 0, sizeof (*bb));
800
801 if (GET_CODE (head) == CODE_LABEL)
802 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
803 else
804 {
805 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
806 head = bb_note;
807 }
808 NOTE_BASIC_BLOCK (bb_note) = bb;
809 }
810
811 /* Always include the bb note in the block. */
812 if (NEXT_INSN (end) == bb_note)
813 end = bb_note;
814
815 bb->head = head;
816 bb->end = end;
817 bb->index = index;
818 BASIC_BLOCK (index) = bb;
819
820 /* Tag the block so that we know it has been used when considering
821 other basic block notes. */
822 bb->aux = bb;
823 }
824 \f
825 /* Records the basic block struct in BB_FOR_INSN, for every instruction
826 indexed by INSN_UID. MAX is the size of the array. */
827
828 void
829 compute_bb_for_insn (max)
830 int max;
831 {
832 int i;
833
834 if (basic_block_for_insn)
835 VARRAY_FREE (basic_block_for_insn);
836 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
837
838 for (i = 0; i < n_basic_blocks; ++i)
839 {
840 basic_block bb = BASIC_BLOCK (i);
841 rtx insn, end;
842
843 end = bb->end;
844 insn = bb->head;
845 while (1)
846 {
847 int uid = INSN_UID (insn);
848 if (uid < max)
849 VARRAY_BB (basic_block_for_insn, uid) = bb;
850 if (insn == end)
851 break;
852 insn = NEXT_INSN (insn);
853 }
854 }
855 }
856
857 /* Free the memory associated with the edge structures. */
858
859 static void
860 clear_edges ()
861 {
862 int i;
863 edge n, e;
864
865 for (i = 0; i < n_basic_blocks; ++i)
866 {
867 basic_block bb = BASIC_BLOCK (i);
868
869 for (e = bb->succ; e ; e = n)
870 {
871 n = e->succ_next;
872 free (e);
873 }
874
875 bb->succ = 0;
876 bb->pred = 0;
877 }
878
879 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
880 {
881 n = e->succ_next;
882 free (e);
883 }
884
885 ENTRY_BLOCK_PTR->succ = 0;
886 EXIT_BLOCK_PTR->pred = 0;
887
888 n_edges = 0;
889 }
890
891 /* Identify the edges between basic blocks.
892
893 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
894 that are otherwise unreachable may be reachable with a non-local goto.
895
896 BB_EH_END is an array indexed by basic block number in which we record
897 the list of exception regions active at the end of the basic block. */
898
899 static void
900 make_edges (label_value_list)
901 rtx label_value_list;
902 {
903 int i;
904 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
905 sbitmap *edge_cache = NULL;
906
907 /* Assume no computed jump; revise as we create edges. */
908 current_function_has_computed_jump = 0;
909
910 /* Heavy use of computed goto in machine-generated code can lead to
911 nearly fully-connected CFGs. In that case we spend a significant
912 amount of time searching the edge lists for duplicates. */
913 if (forced_labels || label_value_list)
914 {
915 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
916 sbitmap_vector_zero (edge_cache, n_basic_blocks);
917 }
918
919 /* By nature of the way these get numbered, block 0 is always the entry. */
920 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
921
922 for (i = 0; i < n_basic_blocks; ++i)
923 {
924 basic_block bb = BASIC_BLOCK (i);
925 rtx insn, x;
926 enum rtx_code code;
927 int force_fallthru = 0;
928
929 /* Examine the last instruction of the block, and discover the
930 ways we can leave the block. */
931
932 insn = bb->end;
933 code = GET_CODE (insn);
934
935 /* A branch. */
936 if (code == JUMP_INSN)
937 {
938 rtx tmp;
939
940 /* ??? Recognize a tablejump and do the right thing. */
941 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
942 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
943 && GET_CODE (tmp) == JUMP_INSN
944 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
945 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
946 {
947 rtvec vec;
948 int j;
949
950 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
951 vec = XVEC (PATTERN (tmp), 0);
952 else
953 vec = XVEC (PATTERN (tmp), 1);
954
955 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
956 make_label_edge (edge_cache, bb,
957 XEXP (RTVEC_ELT (vec, j), 0), 0);
958
959 /* Some targets (eg, ARM) emit a conditional jump that also
960 contains the out-of-range target. Scan for these and
961 add an edge if necessary. */
962 if ((tmp = single_set (insn)) != NULL
963 && SET_DEST (tmp) == pc_rtx
964 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
965 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
966 make_label_edge (edge_cache, bb,
967 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
968
969 #ifdef CASE_DROPS_THROUGH
970 /* Silly VAXen. The ADDR_VEC is going to be in the way of
971 us naturally detecting fallthru into the next block. */
972 force_fallthru = 1;
973 #endif
974 }
975
976 /* If this is a computed jump, then mark it as reaching
977 everything on the label_value_list and forced_labels list. */
978 else if (computed_jump_p (insn))
979 {
980 current_function_has_computed_jump = 1;
981
982 for (x = label_value_list; x; x = XEXP (x, 1))
983 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
984
985 for (x = forced_labels; x; x = XEXP (x, 1))
986 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
987 }
988
989 /* Returns create an exit out. */
990 else if (returnjump_p (insn))
991 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
992
993 /* Otherwise, we have a plain conditional or unconditional jump. */
994 else
995 {
996 if (! JUMP_LABEL (insn))
997 abort ();
998 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
999 }
1000 }
1001
1002 /* If this is a sibling call insn, then this is in effect a
1003 combined call and return, and so we need an edge to the
1004 exit block. No need to worry about EH edges, since we
1005 wouldn't have created the sibling call in the first place. */
1006
1007 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1008 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1009 else
1010
1011 /* If this is a CALL_INSN, then mark it as reaching the active EH
1012 handler for this CALL_INSN. If we're handling asynchronous
1013 exceptions then any insn can reach any of the active handlers.
1014
1015 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1016
1017 if (code == CALL_INSN || asynchronous_exceptions)
1018 {
1019 /* Add any appropriate EH edges. We do this unconditionally
1020 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1021 on the call, and this needn't be within an EH region. */
1022 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1023
1024 /* If we have asynchronous exceptions, do the same for *all*
1025 exception regions active in the block. */
1026 if (asynchronous_exceptions
1027 && bb->eh_beg != bb->eh_end)
1028 {
1029 if (bb->eh_beg >= 0)
1030 make_eh_edge (edge_cache, eh_nest_info, bb,
1031 NULL_RTX, bb->eh_beg);
1032
1033 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1034 if (GET_CODE (x) == NOTE
1035 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1036 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1037 {
1038 int region = NOTE_EH_HANDLER (x);
1039 make_eh_edge (edge_cache, eh_nest_info, bb,
1040 NULL_RTX, region);
1041 }
1042 }
1043
1044 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1045 {
1046 /* ??? This could be made smarter: in some cases it's possible
1047 to tell that certain calls will not do a nonlocal goto.
1048
1049 For example, if the nested functions that do the nonlocal
1050 gotos do not have their addresses taken, then only calls to
1051 those functions or to other nested functions that use them
1052 could possibly do nonlocal gotos. */
1053 /* We do know that a REG_EH_REGION note with a value less
1054 than 0 is guaranteed not to perform a non-local goto. */
1055 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1056 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1057 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1058 make_label_edge (edge_cache, bb, XEXP (x, 0),
1059 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1060 }
1061 }
1062
1063 /* We know something about the structure of the function __throw in
1064 libgcc2.c. It is the only function that ever contains eh_stub
1065 labels. It modifies its return address so that the last block
1066 returns to one of the eh_stub labels within it. So we have to
1067 make additional edges in the flow graph. */
1068 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1069 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1070
1071 /* Find out if we can drop through to the next block. */
1072 insn = next_nonnote_insn (insn);
1073 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1074 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1075 else if (i + 1 < n_basic_blocks)
1076 {
1077 rtx tmp = BLOCK_HEAD (i + 1);
1078 if (GET_CODE (tmp) == NOTE)
1079 tmp = next_nonnote_insn (tmp);
1080 if (force_fallthru || insn == tmp)
1081 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1082 }
1083 }
1084
1085 free_eh_nesting_info (eh_nest_info);
1086 if (edge_cache)
1087 sbitmap_vector_free (edge_cache);
1088 }
1089
1090 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1091 about the edge that is accumulated between calls. */
1092
1093 void
1094 make_edge (edge_cache, src, dst, flags)
1095 sbitmap *edge_cache;
1096 basic_block src, dst;
1097 int flags;
1098 {
1099 int use_edge_cache;
1100 edge e;
1101
1102 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1103 many edges to them, and we didn't allocate memory for it. */
1104 use_edge_cache = (edge_cache
1105 && src != ENTRY_BLOCK_PTR
1106 && dst != EXIT_BLOCK_PTR);
1107
1108 /* Make sure we don't add duplicate edges. */
1109 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1110 for (e = src->succ; e ; e = e->succ_next)
1111 if (e->dest == dst)
1112 {
1113 e->flags |= flags;
1114 return;
1115 }
1116
1117 e = (edge) xcalloc (1, sizeof (*e));
1118 n_edges++;
1119
1120 e->succ_next = src->succ;
1121 e->pred_next = dst->pred;
1122 e->src = src;
1123 e->dest = dst;
1124 e->flags = flags;
1125
1126 src->succ = e;
1127 dst->pred = e;
1128
1129 if (use_edge_cache)
1130 SET_BIT (edge_cache[src->index], dst->index);
1131 }
1132
1133 /* Create an edge from a basic block to a label. */
1134
1135 static void
1136 make_label_edge (edge_cache, src, label, flags)
1137 sbitmap *edge_cache;
1138 basic_block src;
1139 rtx label;
1140 int flags;
1141 {
1142 if (GET_CODE (label) != CODE_LABEL)
1143 abort ();
1144
1145 /* If the label was never emitted, this insn is junk, but avoid a
1146 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1147 as a result of a syntax error and a diagnostic has already been
1148 printed. */
1149
1150 if (INSN_UID (label) == 0)
1151 return;
1152
1153 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1154 }
1155
1156 /* Create the edges generated by INSN in REGION. */
1157
1158 static void
1159 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1160 sbitmap *edge_cache;
1161 eh_nesting_info *eh_nest_info;
1162 basic_block src;
1163 rtx insn;
1164 int region;
1165 {
1166 handler_info **handler_list;
1167 int num, is_call;
1168
1169 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1170 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1171 while (--num >= 0)
1172 {
1173 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1174 EDGE_ABNORMAL | EDGE_EH | is_call);
1175 }
1176 }
1177
1178 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1179 dangerous if we intend to move basic blocks around. Move such notes
1180 into the following block. */
1181
1182 static void
1183 move_stray_eh_region_notes ()
1184 {
1185 int i;
1186 basic_block b1, b2;
1187
1188 if (n_basic_blocks < 2)
1189 return;
1190
1191 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1192 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1193 {
1194 rtx insn, next, list = NULL_RTX;
1195
1196 b1 = BASIC_BLOCK (i);
1197 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1198 {
1199 next = NEXT_INSN (insn);
1200 if (GET_CODE (insn) == NOTE
1201 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1202 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1203 {
1204 /* Unlink from the insn chain. */
1205 NEXT_INSN (PREV_INSN (insn)) = next;
1206 PREV_INSN (next) = PREV_INSN (insn);
1207
1208 /* Queue it. */
1209 NEXT_INSN (insn) = list;
1210 list = insn;
1211 }
1212 }
1213
1214 if (list == NULL_RTX)
1215 continue;
1216
1217 /* Find where to insert these things. */
1218 insn = b2->head;
1219 if (GET_CODE (insn) == CODE_LABEL)
1220 insn = NEXT_INSN (insn);
1221
1222 while (list)
1223 {
1224 next = NEXT_INSN (list);
1225 add_insn_after (list, insn);
1226 list = next;
1227 }
1228 }
1229 }
1230
1231 /* Recompute eh_beg/eh_end for each basic block. */
1232
1233 static void
1234 record_active_eh_regions (f)
1235 rtx f;
1236 {
1237 rtx insn, eh_list = NULL_RTX;
1238 int i = 0;
1239 basic_block bb = BASIC_BLOCK (0);
1240
1241 for (insn = f; insn ; insn = NEXT_INSN (insn))
1242 {
1243 if (bb->head == insn)
1244 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1245
1246 if (GET_CODE (insn) == NOTE)
1247 {
1248 int kind = NOTE_LINE_NUMBER (insn);
1249 if (kind == NOTE_INSN_EH_REGION_BEG)
1250 eh_list = alloc_INSN_LIST (insn, eh_list);
1251 else if (kind == NOTE_INSN_EH_REGION_END)
1252 {
1253 rtx t = XEXP (eh_list, 1);
1254 free_INSN_LIST_node (eh_list);
1255 eh_list = t;
1256 }
1257 }
1258
1259 if (bb->end == insn)
1260 {
1261 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1262 i += 1;
1263 if (i == n_basic_blocks)
1264 break;
1265 bb = BASIC_BLOCK (i);
1266 }
1267 }
1268 }
1269
1270 /* Identify critical edges and set the bits appropriately. */
1271
1272 static void
1273 mark_critical_edges ()
1274 {
1275 int i, n = n_basic_blocks;
1276 basic_block bb;
1277
1278 /* We begin with the entry block. This is not terribly important now,
1279 but could be if a front end (Fortran) implemented alternate entry
1280 points. */
1281 bb = ENTRY_BLOCK_PTR;
1282 i = -1;
1283
1284 while (1)
1285 {
1286 edge e;
1287
1288 /* (1) Critical edges must have a source with multiple successors. */
1289 if (bb->succ && bb->succ->succ_next)
1290 {
1291 for (e = bb->succ; e ; e = e->succ_next)
1292 {
1293 /* (2) Critical edges must have a destination with multiple
1294 predecessors. Note that we know there is at least one
1295 predecessor -- the edge we followed to get here. */
1296 if (e->dest->pred->pred_next)
1297 e->flags |= EDGE_CRITICAL;
1298 else
1299 e->flags &= ~EDGE_CRITICAL;
1300 }
1301 }
1302 else
1303 {
1304 for (e = bb->succ; e ; e = e->succ_next)
1305 e->flags &= ~EDGE_CRITICAL;
1306 }
1307
1308 if (++i >= n)
1309 break;
1310 bb = BASIC_BLOCK (i);
1311 }
1312 }
1313 \f
1314 /* Split a (typically critical) edge. Return the new block.
1315 Abort on abnormal edges.
1316
1317 ??? The code generally expects to be called on critical edges.
1318 The case of a block ending in an unconditional jump to a
1319 block with multiple predecessors is not handled optimally. */
1320
1321 basic_block
1322 split_edge (edge_in)
1323 edge edge_in;
1324 {
1325 basic_block old_pred, bb, old_succ;
1326 edge edge_out;
1327 rtx bb_note;
1328 int i, j;
1329
1330 /* Abnormal edges cannot be split. */
1331 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1332 abort ();
1333
1334 old_pred = edge_in->src;
1335 old_succ = edge_in->dest;
1336
1337 /* Remove the existing edge from the destination's pred list. */
1338 {
1339 edge *pp;
1340 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1341 continue;
1342 *pp = edge_in->pred_next;
1343 edge_in->pred_next = NULL;
1344 }
1345
1346 /* Create the new structures. */
1347 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1348 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1349 n_edges++;
1350
1351 memset (bb, 0, sizeof (*bb));
1352 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1353 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1354
1355 /* ??? This info is likely going to be out of date very soon. */
1356 if (old_succ->global_live_at_start)
1357 {
1358 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1359 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1360 }
1361 else
1362 {
1363 CLEAR_REG_SET (bb->global_live_at_start);
1364 CLEAR_REG_SET (bb->global_live_at_end);
1365 }
1366
1367 /* Wire them up. */
1368 bb->pred = edge_in;
1369 bb->succ = edge_out;
1370
1371 edge_in->dest = bb;
1372 edge_in->flags &= ~EDGE_CRITICAL;
1373
1374 edge_out->pred_next = old_succ->pred;
1375 edge_out->succ_next = NULL;
1376 edge_out->src = bb;
1377 edge_out->dest = old_succ;
1378 edge_out->flags = EDGE_FALLTHRU;
1379 edge_out->probability = REG_BR_PROB_BASE;
1380
1381 old_succ->pred = edge_out;
1382
1383 /* Tricky case -- if there existed a fallthru into the successor
1384 (and we're not it) we must add a new unconditional jump around
1385 the new block we're actually interested in.
1386
1387 Further, if that edge is critical, this means a second new basic
1388 block must be created to hold it. In order to simplify correct
1389 insn placement, do this before we touch the existing basic block
1390 ordering for the block we were really wanting. */
1391 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1392 {
1393 edge e;
1394 for (e = edge_out->pred_next; e ; e = e->pred_next)
1395 if (e->flags & EDGE_FALLTHRU)
1396 break;
1397
1398 if (e)
1399 {
1400 basic_block jump_block;
1401 rtx pos;
1402
1403 if ((e->flags & EDGE_CRITICAL) == 0
1404 && e->src != ENTRY_BLOCK_PTR)
1405 {
1406 /* Non critical -- we can simply add a jump to the end
1407 of the existing predecessor. */
1408 jump_block = e->src;
1409 }
1410 else
1411 {
1412 /* We need a new block to hold the jump. The simplest
1413 way to do the bulk of the work here is to recursively
1414 call ourselves. */
1415 jump_block = split_edge (e);
1416 e = jump_block->succ;
1417 }
1418
1419 /* Now add the jump insn ... */
1420 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1421 jump_block->end);
1422 jump_block->end = pos;
1423 if (basic_block_for_insn)
1424 set_block_for_insn (pos, jump_block);
1425 emit_barrier_after (pos);
1426
1427 /* ... let jump know that label is in use, ... */
1428 JUMP_LABEL (pos) = old_succ->head;
1429 ++LABEL_NUSES (old_succ->head);
1430
1431 /* ... and clear fallthru on the outgoing edge. */
1432 e->flags &= ~EDGE_FALLTHRU;
1433
1434 /* Continue splitting the interesting edge. */
1435 }
1436 }
1437
1438 /* Place the new block just in front of the successor. */
1439 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1440 if (old_succ == EXIT_BLOCK_PTR)
1441 j = n_basic_blocks - 1;
1442 else
1443 j = old_succ->index;
1444 for (i = n_basic_blocks - 1; i > j; --i)
1445 {
1446 basic_block tmp = BASIC_BLOCK (i - 1);
1447 BASIC_BLOCK (i) = tmp;
1448 tmp->index = i;
1449 }
1450 BASIC_BLOCK (i) = bb;
1451 bb->index = i;
1452
1453 /* Create the basic block note.
1454
1455 Where we place the note can have a noticable impact on the generated
1456 code. Consider this cfg:
1457
1458
1459 E
1460 |
1461 0
1462 / \
1463 +->1-->2--->E
1464 | |
1465 +--+
1466
1467 If we need to insert an insn on the edge from block 0 to block 1,
1468 we want to ensure the instructions we insert are outside of any
1469 loop notes that physically sit between block 0 and block 1. Otherwise
1470 we confuse the loop optimizer into thinking the loop is a phony. */
1471 if (old_succ != EXIT_BLOCK_PTR
1472 && PREV_INSN (old_succ->head)
1473 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1474 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1475 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1476 PREV_INSN (old_succ->head));
1477 else if (old_succ != EXIT_BLOCK_PTR)
1478 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1479 else
1480 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1481 NOTE_BASIC_BLOCK (bb_note) = bb;
1482 bb->head = bb->end = bb_note;
1483
1484 /* Not quite simple -- for non-fallthru edges, we must adjust the
1485 predecessor's jump instruction to target our new block. */
1486 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1487 {
1488 rtx tmp, insn = old_pred->end;
1489 rtx old_label = old_succ->head;
1490 rtx new_label = gen_label_rtx ();
1491
1492 if (GET_CODE (insn) != JUMP_INSN)
1493 abort ();
1494
1495 /* ??? Recognize a tablejump and adjust all matching cases. */
1496 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1497 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1498 && GET_CODE (tmp) == JUMP_INSN
1499 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1500 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1501 {
1502 rtvec vec;
1503 int j;
1504
1505 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1506 vec = XVEC (PATTERN (tmp), 0);
1507 else
1508 vec = XVEC (PATTERN (tmp), 1);
1509
1510 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1511 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1512 {
1513 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1514 --LABEL_NUSES (old_label);
1515 ++LABEL_NUSES (new_label);
1516 }
1517
1518 /* Handle casesi dispatch insns */
1519 if ((tmp = single_set (insn)) != NULL
1520 && SET_DEST (tmp) == pc_rtx
1521 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1522 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1523 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1524 {
1525 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1526 new_label);
1527 --LABEL_NUSES (old_label);
1528 ++LABEL_NUSES (new_label);
1529 }
1530 }
1531 else
1532 {
1533 /* This would have indicated an abnormal edge. */
1534 if (computed_jump_p (insn))
1535 abort ();
1536
1537 /* A return instruction can't be redirected. */
1538 if (returnjump_p (insn))
1539 abort ();
1540
1541 /* If the insn doesn't go where we think, we're confused. */
1542 if (JUMP_LABEL (insn) != old_label)
1543 abort ();
1544
1545 redirect_jump (insn, new_label);
1546 }
1547
1548 emit_label_before (new_label, bb_note);
1549 bb->head = new_label;
1550 }
1551
1552 return bb;
1553 }
1554
1555 /* Queue instructions for insertion on an edge between two basic blocks.
1556 The new instructions and basic blocks (if any) will not appear in the
1557 CFG until commit_edge_insertions is called. */
1558
1559 void
1560 insert_insn_on_edge (pattern, e)
1561 rtx pattern;
1562 edge e;
1563 {
1564 /* We cannot insert instructions on an abnormal critical edge.
1565 It will be easier to find the culprit if we die now. */
1566 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1567 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1568 abort ();
1569
1570 if (e->insns == NULL_RTX)
1571 start_sequence ();
1572 else
1573 push_to_sequence (e->insns);
1574
1575 emit_insn (pattern);
1576
1577 e->insns = get_insns ();
1578 end_sequence();
1579 }
1580
1581 /* Update the CFG for the instructions queued on edge E. */
1582
1583 static void
1584 commit_one_edge_insertion (e)
1585 edge e;
1586 {
1587 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1588 basic_block bb;
1589
1590 /* Pull the insns off the edge now since the edge might go away. */
1591 insns = e->insns;
1592 e->insns = NULL_RTX;
1593
1594 /* Figure out where to put these things. If the destination has
1595 one predecessor, insert there. Except for the exit block. */
1596 if (e->dest->pred->pred_next == NULL
1597 && e->dest != EXIT_BLOCK_PTR)
1598 {
1599 bb = e->dest;
1600
1601 /* Get the location correct wrt a code label, and "nice" wrt
1602 a basic block note, and before everything else. */
1603 tmp = bb->head;
1604 if (GET_CODE (tmp) == CODE_LABEL)
1605 tmp = NEXT_INSN (tmp);
1606 if (GET_CODE (tmp) == NOTE
1607 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1608 tmp = NEXT_INSN (tmp);
1609 if (tmp == bb->head)
1610 before = tmp;
1611 else
1612 after = PREV_INSN (tmp);
1613 }
1614
1615 /* If the source has one successor and the edge is not abnormal,
1616 insert there. Except for the entry block. */
1617 else if ((e->flags & EDGE_ABNORMAL) == 0
1618 && e->src->succ->succ_next == NULL
1619 && e->src != ENTRY_BLOCK_PTR)
1620 {
1621 bb = e->src;
1622 /* It is possible to have a non-simple jump here. Consider a target
1623 where some forms of unconditional jumps clobber a register. This
1624 happens on the fr30 for example.
1625
1626 We know this block has a single successor, so we can just emit
1627 the queued insns before the jump. */
1628 if (GET_CODE (bb->end) == JUMP_INSN)
1629 {
1630 before = bb->end;
1631 }
1632 else
1633 {
1634 /* We'd better be fallthru, or we've lost track of what's what. */
1635 if ((e->flags & EDGE_FALLTHRU) == 0)
1636 abort ();
1637
1638 after = bb->end;
1639 }
1640 }
1641
1642 /* Otherwise we must split the edge. */
1643 else
1644 {
1645 bb = split_edge (e);
1646 after = bb->end;
1647 }
1648
1649 /* Now that we've found the spot, do the insertion. */
1650
1651 /* Set the new block number for these insns, if structure is allocated. */
1652 if (basic_block_for_insn)
1653 {
1654 rtx i;
1655 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1656 set_block_for_insn (i, bb);
1657 }
1658
1659 if (before)
1660 {
1661 emit_insns_before (insns, before);
1662 if (before == bb->head)
1663 bb->head = insns;
1664 }
1665 else
1666 {
1667 rtx last = emit_insns_after (insns, after);
1668 if (after == bb->end)
1669 {
1670 bb->end = last;
1671
1672 if (GET_CODE (last) == JUMP_INSN)
1673 {
1674 if (returnjump_p (last))
1675 {
1676 /* ??? Remove all outgoing edges from BB and add one
1677 for EXIT. This is not currently a problem because
1678 this only happens for the (single) epilogue, which
1679 already has a fallthru edge to EXIT. */
1680
1681 e = bb->succ;
1682 if (e->dest != EXIT_BLOCK_PTR
1683 || e->succ_next != NULL
1684 || (e->flags & EDGE_FALLTHRU) == 0)
1685 abort ();
1686 e->flags &= ~EDGE_FALLTHRU;
1687
1688 emit_barrier_after (last);
1689 }
1690 else
1691 abort ();
1692 }
1693 }
1694 }
1695 }
1696
1697 /* Update the CFG for all queued instructions. */
1698
1699 void
1700 commit_edge_insertions ()
1701 {
1702 int i;
1703 basic_block bb;
1704
1705 #ifdef ENABLE_CHECKING
1706 verify_flow_info ();
1707 #endif
1708
1709 i = -1;
1710 bb = ENTRY_BLOCK_PTR;
1711 while (1)
1712 {
1713 edge e, next;
1714
1715 for (e = bb->succ; e ; e = next)
1716 {
1717 next = e->succ_next;
1718 if (e->insns)
1719 commit_one_edge_insertion (e);
1720 }
1721
1722 if (++i >= n_basic_blocks)
1723 break;
1724 bb = BASIC_BLOCK (i);
1725 }
1726 }
1727 \f
1728 /* Delete all unreachable basic blocks. */
1729
1730 static void
1731 delete_unreachable_blocks ()
1732 {
1733 basic_block *worklist, *tos;
1734 int deleted_handler;
1735 edge e;
1736 int i, n;
1737
1738 n = n_basic_blocks;
1739 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1740
1741 /* Use basic_block->aux as a marker. Clear them all. */
1742
1743 for (i = 0; i < n; ++i)
1744 BASIC_BLOCK (i)->aux = NULL;
1745
1746 /* Add our starting points to the worklist. Almost always there will
1747 be only one. It isn't inconcievable that we might one day directly
1748 support Fortran alternate entry points. */
1749
1750 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1751 {
1752 *tos++ = e->dest;
1753
1754 /* Mark the block with a handy non-null value. */
1755 e->dest->aux = e;
1756 }
1757
1758 /* Iterate: find everything reachable from what we've already seen. */
1759
1760 while (tos != worklist)
1761 {
1762 basic_block b = *--tos;
1763
1764 for (e = b->succ; e ; e = e->succ_next)
1765 if (!e->dest->aux)
1766 {
1767 *tos++ = e->dest;
1768 e->dest->aux = e;
1769 }
1770 }
1771
1772 /* Delete all unreachable basic blocks. Count down so that we don't
1773 interfere with the block renumbering that happens in flow_delete_block. */
1774
1775 deleted_handler = 0;
1776
1777 for (i = n - 1; i >= 0; --i)
1778 {
1779 basic_block b = BASIC_BLOCK (i);
1780
1781 if (b->aux != NULL)
1782 /* This block was found. Tidy up the mark. */
1783 b->aux = NULL;
1784 else
1785 deleted_handler |= flow_delete_block (b);
1786 }
1787
1788 tidy_fallthru_edges ();
1789
1790 /* If we deleted an exception handler, we may have EH region begin/end
1791 blocks to remove as well. */
1792 if (deleted_handler)
1793 delete_eh_regions ();
1794
1795 free (worklist);
1796 }
1797
1798 /* Find EH regions for which there is no longer a handler, and delete them. */
1799
1800 static void
1801 delete_eh_regions ()
1802 {
1803 rtx insn;
1804
1805 update_rethrow_references ();
1806
1807 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1808 if (GET_CODE (insn) == NOTE)
1809 {
1810 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1811 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1812 {
1813 int num = NOTE_EH_HANDLER (insn);
1814 /* A NULL handler indicates a region is no longer needed,
1815 as long as its rethrow label isn't used. */
1816 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1817 {
1818 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1819 NOTE_SOURCE_FILE (insn) = 0;
1820 }
1821 }
1822 }
1823 }
1824
1825 /* Return true if NOTE is not one of the ones that must be kept paired,
1826 so that we may simply delete them. */
1827
1828 static int
1829 can_delete_note_p (note)
1830 rtx note;
1831 {
1832 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1833 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1834 }
1835
1836 /* Unlink a chain of insns between START and FINISH, leaving notes
1837 that must be paired. */
1838
1839 void
1840 flow_delete_insn_chain (start, finish)
1841 rtx start, finish;
1842 {
1843 /* Unchain the insns one by one. It would be quicker to delete all
1844 of these with a single unchaining, rather than one at a time, but
1845 we need to keep the NOTE's. */
1846
1847 rtx next;
1848
1849 while (1)
1850 {
1851 next = NEXT_INSN (start);
1852 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1853 ;
1854 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1855 ;
1856 else
1857 next = flow_delete_insn (start);
1858
1859 if (start == finish)
1860 break;
1861 start = next;
1862 }
1863 }
1864
1865 /* Delete the insns in a (non-live) block. We physically delete every
1866 non-deleted-note insn, and update the flow graph appropriately.
1867
1868 Return nonzero if we deleted an exception handler. */
1869
1870 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1871 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1872
1873 int
1874 flow_delete_block (b)
1875 basic_block b;
1876 {
1877 int deleted_handler = 0;
1878 rtx insn, end, tmp;
1879
1880 /* If the head of this block is a CODE_LABEL, then it might be the
1881 label for an exception handler which can't be reached.
1882
1883 We need to remove the label from the exception_handler_label list
1884 and remove the associated NOTE_INSN_EH_REGION_BEG and
1885 NOTE_INSN_EH_REGION_END notes. */
1886
1887 insn = b->head;
1888
1889 never_reached_warning (insn);
1890
1891 if (GET_CODE (insn) == CODE_LABEL)
1892 {
1893 rtx x, *prev = &exception_handler_labels;
1894
1895 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1896 {
1897 if (XEXP (x, 0) == insn)
1898 {
1899 /* Found a match, splice this label out of the EH label list. */
1900 *prev = XEXP (x, 1);
1901 XEXP (x, 1) = NULL_RTX;
1902 XEXP (x, 0) = NULL_RTX;
1903
1904 /* Remove the handler from all regions */
1905 remove_handler (insn);
1906 deleted_handler = 1;
1907 break;
1908 }
1909 prev = &XEXP (x, 1);
1910 }
1911
1912 /* This label may be referenced by code solely for its value, or
1913 referenced by static data, or something. We have determined
1914 that it is not reachable, but cannot delete the label itself.
1915 Save code space and continue to delete the balance of the block,
1916 along with properly updating the cfg. */
1917 if (!can_delete_label_p (insn))
1918 {
1919 /* If we've only got one of these, skip the whole deleting
1920 insns thing. */
1921 if (insn == b->end)
1922 goto no_delete_insns;
1923 insn = NEXT_INSN (insn);
1924 }
1925 }
1926
1927 /* Include any jump table following the basic block. */
1928 end = b->end;
1929 if (GET_CODE (end) == JUMP_INSN
1930 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1931 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1932 && GET_CODE (tmp) == JUMP_INSN
1933 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1934 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1935 end = tmp;
1936
1937 /* Include any barrier that may follow the basic block. */
1938 tmp = next_nonnote_insn (end);
1939 if (tmp && GET_CODE (tmp) == BARRIER)
1940 end = tmp;
1941
1942 /* Selectively delete the entire chain. */
1943 flow_delete_insn_chain (insn, end);
1944
1945 no_delete_insns:
1946
1947 /* Remove the edges into and out of this block. Note that there may
1948 indeed be edges in, if we are removing an unreachable loop. */
1949 {
1950 edge e, next, *q;
1951
1952 for (e = b->pred; e ; e = next)
1953 {
1954 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1955 continue;
1956 *q = e->succ_next;
1957 next = e->pred_next;
1958 n_edges--;
1959 free (e);
1960 }
1961 for (e = b->succ; e ; e = next)
1962 {
1963 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1964 continue;
1965 *q = e->pred_next;
1966 next = e->succ_next;
1967 n_edges--;
1968 free (e);
1969 }
1970
1971 b->pred = NULL;
1972 b->succ = NULL;
1973 }
1974
1975 /* Remove the basic block from the array, and compact behind it. */
1976 expunge_block (b);
1977
1978 return deleted_handler;
1979 }
1980
1981 /* Remove block B from the basic block array and compact behind it. */
1982
1983 static void
1984 expunge_block (b)
1985 basic_block b;
1986 {
1987 int i, n = n_basic_blocks;
1988
1989 for (i = b->index; i + 1 < n; ++i)
1990 {
1991 basic_block x = BASIC_BLOCK (i + 1);
1992 BASIC_BLOCK (i) = x;
1993 x->index = i;
1994 }
1995
1996 basic_block_info->num_elements--;
1997 n_basic_blocks--;
1998 }
1999
2000 /* Delete INSN by patching it out. Return the next insn. */
2001
2002 rtx
2003 flow_delete_insn (insn)
2004 rtx insn;
2005 {
2006 rtx prev = PREV_INSN (insn);
2007 rtx next = NEXT_INSN (insn);
2008 rtx note;
2009
2010 PREV_INSN (insn) = NULL_RTX;
2011 NEXT_INSN (insn) = NULL_RTX;
2012
2013 if (prev)
2014 NEXT_INSN (prev) = next;
2015 if (next)
2016 PREV_INSN (next) = prev;
2017 else
2018 set_last_insn (prev);
2019
2020 if (GET_CODE (insn) == CODE_LABEL)
2021 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2022
2023 /* If deleting a jump, decrement the use count of the label. Deleting
2024 the label itself should happen in the normal course of block merging. */
2025 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2026 LABEL_NUSES (JUMP_LABEL (insn))--;
2027
2028 /* Also if deleting an insn that references a label. */
2029 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2030 LABEL_NUSES (XEXP (note, 0))--;
2031
2032 return next;
2033 }
2034
2035 /* True if a given label can be deleted. */
2036
2037 static int
2038 can_delete_label_p (label)
2039 rtx label;
2040 {
2041 rtx x;
2042
2043 if (LABEL_PRESERVE_P (label))
2044 return 0;
2045
2046 for (x = forced_labels; x ; x = XEXP (x, 1))
2047 if (label == XEXP (x, 0))
2048 return 0;
2049 for (x = label_value_list; x ; x = XEXP (x, 1))
2050 if (label == XEXP (x, 0))
2051 return 0;
2052 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2053 if (label == XEXP (x, 0))
2054 return 0;
2055
2056 /* User declared labels must be preserved. */
2057 if (LABEL_NAME (label) != 0)
2058 return 0;
2059
2060 return 1;
2061 }
2062
2063 /* Blocks A and B are to be merged into a single block A. The insns
2064 are already contiguous, hence `nomove'. */
2065
2066 void
2067 merge_blocks_nomove (a, b)
2068 basic_block a, b;
2069 {
2070 edge e;
2071 rtx b_head, b_end, a_end;
2072 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2073 int b_empty = 0;
2074
2075 /* If there was a CODE_LABEL beginning B, delete it. */
2076 b_head = b->head;
2077 b_end = b->end;
2078 if (GET_CODE (b_head) == CODE_LABEL)
2079 {
2080 /* Detect basic blocks with nothing but a label. This can happen
2081 in particular at the end of a function. */
2082 if (b_head == b_end)
2083 b_empty = 1;
2084 del_first = del_last = b_head;
2085 b_head = NEXT_INSN (b_head);
2086 }
2087
2088 /* Delete the basic block note. */
2089 if (GET_CODE (b_head) == NOTE
2090 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2091 {
2092 if (b_head == b_end)
2093 b_empty = 1;
2094 if (! del_last)
2095 del_first = b_head;
2096 del_last = b_head;
2097 b_head = NEXT_INSN (b_head);
2098 }
2099
2100 /* If there was a jump out of A, delete it. */
2101 a_end = a->end;
2102 if (GET_CODE (a_end) == JUMP_INSN)
2103 {
2104 rtx prev;
2105
2106 prev = prev_nonnote_insn (a_end);
2107 if (!prev)
2108 prev = a->head;
2109
2110 del_first = a_end;
2111
2112 #ifdef HAVE_cc0
2113 /* If this was a conditional jump, we need to also delete
2114 the insn that set cc0. */
2115 if (prev && sets_cc0_p (prev))
2116 {
2117 rtx tmp = prev;
2118 prev = prev_nonnote_insn (prev);
2119 if (!prev)
2120 prev = a->head;
2121 del_first = tmp;
2122 }
2123 #endif
2124
2125 a_end = prev;
2126 }
2127
2128 /* Delete everything marked above as well as crap that might be
2129 hanging out between the two blocks. */
2130 flow_delete_insn_chain (del_first, del_last);
2131
2132 /* Normally there should only be one successor of A and that is B, but
2133 partway though the merge of blocks for conditional_execution we'll
2134 be merging a TEST block with THEN and ELSE successors. Free the
2135 whole lot of them and hope the caller knows what they're doing. */
2136 while (a->succ)
2137 remove_edge (a->succ);
2138
2139 /* Adjust the edges out of B for the new owner. */
2140 for (e = b->succ; e ; e = e->succ_next)
2141 e->src = a;
2142 a->succ = b->succ;
2143
2144 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2145 b->pred = b->succ = NULL;
2146
2147 /* Reassociate the insns of B with A. */
2148 if (!b_empty)
2149 {
2150 if (basic_block_for_insn)
2151 {
2152 BLOCK_FOR_INSN (b_head) = a;
2153 while (b_head != b_end)
2154 {
2155 b_head = NEXT_INSN (b_head);
2156 BLOCK_FOR_INSN (b_head) = a;
2157 }
2158 }
2159 a_end = b_end;
2160 }
2161 a->end = a_end;
2162
2163 expunge_block (b);
2164 }
2165
2166 /* Blocks A and B are to be merged into a single block. A has no incoming
2167 fallthru edge, so it can be moved before B without adding or modifying
2168 any jumps (aside from the jump from A to B). */
2169
2170 static int
2171 merge_blocks_move_predecessor_nojumps (a, b)
2172 basic_block a, b;
2173 {
2174 rtx start, end, barrier;
2175 int index;
2176
2177 start = a->head;
2178 end = a->end;
2179
2180 /* We want to delete the BARRIER after the end of the insns we are
2181 going to move. If we don't find a BARRIER, then do nothing. This
2182 can happen in some cases if we have labels we can not delete.
2183
2184 Similarly, do nothing if we can not delete the label at the start
2185 of the target block. */
2186 barrier = next_nonnote_insn (end);
2187 if (GET_CODE (barrier) != BARRIER
2188 || (GET_CODE (b->head) == CODE_LABEL
2189 && ! can_delete_label_p (b->head)))
2190 return 0;
2191 else
2192 flow_delete_insn (barrier);
2193
2194 /* Move block and loop notes out of the chain so that we do not
2195 disturb their order.
2196
2197 ??? A better solution would be to squeeze out all the non-nested notes
2198 and adjust the block trees appropriately. Even better would be to have
2199 a tighter connection between block trees and rtl so that this is not
2200 necessary. */
2201 start = squeeze_notes (start, end);
2202
2203 /* Scramble the insn chain. */
2204 if (end != PREV_INSN (b->head))
2205 reorder_insns (start, end, PREV_INSN (b->head));
2206
2207 if (rtl_dump_file)
2208 {
2209 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2210 a->index, b->index);
2211 }
2212
2213 /* Swap the records for the two blocks around. Although we are deleting B,
2214 A is now where B was and we want to compact the BB array from where
2215 A used to be. */
2216 BASIC_BLOCK(a->index) = b;
2217 BASIC_BLOCK(b->index) = a;
2218 index = a->index;
2219 a->index = b->index;
2220 b->index = index;
2221
2222 /* Now blocks A and B are contiguous. Merge them. */
2223 merge_blocks_nomove (a, b);
2224
2225 return 1;
2226 }
2227
2228 /* Blocks A and B are to be merged into a single block. B has no outgoing
2229 fallthru edge, so it can be moved after A without adding or modifying
2230 any jumps (aside from the jump from A to B). */
2231
2232 static int
2233 merge_blocks_move_successor_nojumps (a, b)
2234 basic_block a, b;
2235 {
2236 rtx start, end, barrier;
2237
2238 start = b->head;
2239 end = b->end;
2240
2241 /* We want to delete the BARRIER after the end of the insns we are
2242 going to move. If we don't find a BARRIER, then do nothing. This
2243 can happen in some cases if we have labels we can not delete.
2244
2245 Similarly, do nothing if we can not delete the label at the start
2246 of the target block. */
2247 barrier = next_nonnote_insn (end);
2248 if (GET_CODE (barrier) != BARRIER
2249 || (GET_CODE (b->head) == CODE_LABEL
2250 && ! can_delete_label_p (b->head)))
2251 return 0;
2252 else
2253 flow_delete_insn (barrier);
2254
2255 /* Move block and loop notes out of the chain so that we do not
2256 disturb their order.
2257
2258 ??? A better solution would be to squeeze out all the non-nested notes
2259 and adjust the block trees appropriately. Even better would be to have
2260 a tighter connection between block trees and rtl so that this is not
2261 necessary. */
2262 start = squeeze_notes (start, end);
2263
2264 /* Scramble the insn chain. */
2265 reorder_insns (start, end, a->end);
2266
2267 /* Now blocks A and B are contiguous. Merge them. */
2268 merge_blocks_nomove (a, b);
2269
2270 if (rtl_dump_file)
2271 {
2272 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2273 b->index, a->index);
2274 }
2275
2276 return 1;
2277 }
2278
2279 /* Attempt to merge basic blocks that are potentially non-adjacent.
2280 Return true iff the attempt succeeded. */
2281
2282 static int
2283 merge_blocks (e, b, c)
2284 edge e;
2285 basic_block b, c;
2286 {
2287 /* If B has a fallthru edge to C, no need to move anything. */
2288 if (e->flags & EDGE_FALLTHRU)
2289 {
2290 /* If a label still appears somewhere and we cannot delete the label,
2291 then we cannot merge the blocks. The edge was tidied already. */
2292
2293 rtx insn, stop = NEXT_INSN (c->head);
2294 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2295 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2296 return 0;
2297
2298 merge_blocks_nomove (b, c);
2299
2300 if (rtl_dump_file)
2301 {
2302 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2303 b->index, c->index);
2304 }
2305
2306 return 1;
2307 }
2308 else
2309 {
2310 edge tmp_edge;
2311 basic_block d;
2312 int c_has_outgoing_fallthru;
2313 int b_has_incoming_fallthru;
2314
2315 /* We must make sure to not munge nesting of exception regions,
2316 lexical blocks, and loop notes.
2317
2318 The first is taken care of by requiring that the active eh
2319 region at the end of one block always matches the active eh
2320 region at the beginning of the next block.
2321
2322 The later two are taken care of by squeezing out all the notes. */
2323
2324 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2325 executed and we may want to treat blocks which have two out
2326 edges, one normal, one abnormal as only having one edge for
2327 block merging purposes. */
2328
2329 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2330 if (tmp_edge->flags & EDGE_FALLTHRU)
2331 break;
2332 c_has_outgoing_fallthru = (tmp_edge != NULL);
2333
2334 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2335 if (tmp_edge->flags & EDGE_FALLTHRU)
2336 break;
2337 b_has_incoming_fallthru = (tmp_edge != NULL);
2338
2339 /* If B does not have an incoming fallthru, and the exception regions
2340 match, then it can be moved immediately before C without introducing
2341 or modifying jumps.
2342
2343 C can not be the first block, so we do not have to worry about
2344 accessing a non-existent block. */
2345 d = BASIC_BLOCK (c->index - 1);
2346 if (! b_has_incoming_fallthru
2347 && d->eh_end == b->eh_beg
2348 && b->eh_end == c->eh_beg)
2349 return merge_blocks_move_predecessor_nojumps (b, c);
2350
2351 /* Otherwise, we're going to try to move C after B. Make sure the
2352 exception regions match.
2353
2354 If B is the last basic block, then we must not try to access the
2355 block structure for block B + 1. Luckily in that case we do not
2356 need to worry about matching exception regions. */
2357 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2358 if (b->eh_end == c->eh_beg
2359 && (d == NULL || c->eh_end == d->eh_beg))
2360 {
2361 /* If C does not have an outgoing fallthru, then it can be moved
2362 immediately after B without introducing or modifying jumps. */
2363 if (! c_has_outgoing_fallthru)
2364 return merge_blocks_move_successor_nojumps (b, c);
2365
2366 /* Otherwise, we'll need to insert an extra jump, and possibly
2367 a new block to contain it. */
2368 /* ??? Not implemented yet. */
2369 }
2370
2371 return 0;
2372 }
2373 }
2374
2375 /* Top level driver for merge_blocks. */
2376
2377 static void
2378 try_merge_blocks ()
2379 {
2380 int i;
2381
2382 /* Attempt to merge blocks as made possible by edge removal. If a block
2383 has only one successor, and the successor has only one predecessor,
2384 they may be combined. */
2385
2386 for (i = 0; i < n_basic_blocks; )
2387 {
2388 basic_block c, b = BASIC_BLOCK (i);
2389 edge s;
2390
2391 /* A loop because chains of blocks might be combineable. */
2392 while ((s = b->succ) != NULL
2393 && s->succ_next == NULL
2394 && (s->flags & EDGE_EH) == 0
2395 && (c = s->dest) != EXIT_BLOCK_PTR
2396 && c->pred->pred_next == NULL
2397 /* If the jump insn has side effects, we can't kill the edge. */
2398 && (GET_CODE (b->end) != JUMP_INSN
2399 || onlyjump_p (b->end))
2400 && merge_blocks (s, b, c))
2401 continue;
2402
2403 /* Don't get confused by the index shift caused by deleting blocks. */
2404 i = b->index + 1;
2405 }
2406 }
2407
2408 /* The given edge should potentially be a fallthru edge. If that is in
2409 fact true, delete the jump and barriers that are in the way. */
2410
2411 void
2412 tidy_fallthru_edge (e, b, c)
2413 edge e;
2414 basic_block b, c;
2415 {
2416 rtx q;
2417
2418 /* ??? In a late-running flow pass, other folks may have deleted basic
2419 blocks by nopping out blocks, leaving multiple BARRIERs between here
2420 and the target label. They ought to be chastized and fixed.
2421
2422 We can also wind up with a sequence of undeletable labels between
2423 one block and the next.
2424
2425 So search through a sequence of barriers, labels, and notes for
2426 the head of block C and assert that we really do fall through. */
2427
2428 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2429 return;
2430
2431 /* Remove what will soon cease being the jump insn from the source block.
2432 If block B consisted only of this single jump, turn it into a deleted
2433 note. */
2434 q = b->end;
2435 if (GET_CODE (q) == JUMP_INSN
2436 && (simplejump_p (q)
2437 || (b->succ == e && e->succ_next == NULL)))
2438 {
2439 #ifdef HAVE_cc0
2440 /* If this was a conditional jump, we need to also delete
2441 the insn that set cc0. */
2442 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2443 q = PREV_INSN (q);
2444 #endif
2445
2446 if (b->head == q)
2447 {
2448 PUT_CODE (q, NOTE);
2449 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2450 NOTE_SOURCE_FILE (q) = 0;
2451 }
2452 else
2453 b->end = q = PREV_INSN (q);
2454 }
2455
2456 /* Selectively unlink the sequence. */
2457 if (q != PREV_INSN (c->head))
2458 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2459
2460 e->flags |= EDGE_FALLTHRU;
2461 }
2462
2463 /* Fix up edges that now fall through, or rather should now fall through
2464 but previously required a jump around now deleted blocks. Simplify
2465 the search by only examining blocks numerically adjacent, since this
2466 is how find_basic_blocks created them. */
2467
2468 static void
2469 tidy_fallthru_edges ()
2470 {
2471 int i;
2472
2473 for (i = 1; i < n_basic_blocks; ++i)
2474 {
2475 basic_block b = BASIC_BLOCK (i - 1);
2476 basic_block c = BASIC_BLOCK (i);
2477 edge s;
2478
2479 /* We care about simple conditional or unconditional jumps with
2480 a single successor.
2481
2482 If we had a conditional branch to the next instruction when
2483 find_basic_blocks was called, then there will only be one
2484 out edge for the block which ended with the conditional
2485 branch (since we do not create duplicate edges).
2486
2487 Furthermore, the edge will be marked as a fallthru because we
2488 merge the flags for the duplicate edges. So we do not want to
2489 check that the edge is not a FALLTHRU edge. */
2490 if ((s = b->succ) != NULL
2491 && s->succ_next == NULL
2492 && s->dest == c
2493 /* If the jump insn has side effects, we can't tidy the edge. */
2494 && (GET_CODE (b->end) != JUMP_INSN
2495 || onlyjump_p (b->end)))
2496 tidy_fallthru_edge (s, b, c);
2497 }
2498 }
2499 \f
2500 /* Perform data flow analysis.
2501 F is the first insn of the function; FLAGS is a set of PROP_* flags
2502 to be used in accumulating flow info. */
2503
2504 void
2505 life_analysis (f, file, flags)
2506 rtx f;
2507 FILE *file;
2508 int flags;
2509 {
2510 #ifdef ELIMINABLE_REGS
2511 register int i;
2512 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2513 #endif
2514
2515 /* Record which registers will be eliminated. We use this in
2516 mark_used_regs. */
2517
2518 CLEAR_HARD_REG_SET (elim_reg_set);
2519
2520 #ifdef ELIMINABLE_REGS
2521 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2522 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2523 #else
2524 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2525 #endif
2526
2527 if (! optimize)
2528 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2529
2530 /* The post-reload life analysis have (on a global basis) the same
2531 registers live as was computed by reload itself. elimination
2532 Otherwise offsets and such may be incorrect.
2533
2534 Reload will make some registers as live even though they do not
2535 appear in the rtl. */
2536 if (reload_completed)
2537 flags &= ~PROP_REG_INFO;
2538
2539 /* We want alias analysis information for local dead store elimination. */
2540 if (flags & PROP_SCAN_DEAD_CODE)
2541 init_alias_analysis ();
2542
2543 /* Always remove no-op moves. Do this before other processing so
2544 that we don't have to keep re-scanning them. */
2545 delete_noop_moves (f);
2546
2547 /* Some targets can emit simpler epilogues if they know that sp was
2548 not ever modified during the function. After reload, of course,
2549 we've already emitted the epilogue so there's no sense searching. */
2550 if (! reload_completed)
2551 notice_stack_pointer_modification (f);
2552
2553 /* Allocate and zero out data structures that will record the
2554 data from lifetime analysis. */
2555 allocate_reg_life_data ();
2556 allocate_bb_life_data ();
2557
2558 /* Find the set of registers live on function exit. */
2559 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2560
2561 /* "Update" life info from zero. It'd be nice to begin the
2562 relaxation with just the exit and noreturn blocks, but that set
2563 is not immediately handy. */
2564
2565 if (flags & PROP_REG_INFO)
2566 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2567 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2568
2569 /* Clean up. */
2570 if (flags & PROP_SCAN_DEAD_CODE)
2571 end_alias_analysis ();
2572
2573 if (file)
2574 dump_flow_info (file);
2575
2576 free_basic_block_vars (1);
2577 }
2578
2579 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2580 Search for REGNO. If found, abort if it is not wider than word_mode. */
2581
2582 static int
2583 verify_wide_reg_1 (px, pregno)
2584 rtx *px;
2585 void *pregno;
2586 {
2587 rtx x = *px;
2588 unsigned int regno = *(int *) pregno;
2589
2590 if (GET_CODE (x) == REG && REGNO (x) == regno)
2591 {
2592 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2593 abort ();
2594 return 1;
2595 }
2596 return 0;
2597 }
2598
2599 /* A subroutine of verify_local_live_at_start. Search through insns
2600 between HEAD and END looking for register REGNO. */
2601
2602 static void
2603 verify_wide_reg (regno, head, end)
2604 int regno;
2605 rtx head, end;
2606 {
2607 while (1)
2608 {
2609 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2610 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2611 return;
2612 if (head == end)
2613 break;
2614 head = NEXT_INSN (head);
2615 }
2616
2617 /* We didn't find the register at all. Something's way screwy. */
2618 abort ();
2619 }
2620
2621 /* A subroutine of update_life_info. Verify that there are no untoward
2622 changes in live_at_start during a local update. */
2623
2624 static void
2625 verify_local_live_at_start (new_live_at_start, bb)
2626 regset new_live_at_start;
2627 basic_block bb;
2628 {
2629 if (reload_completed)
2630 {
2631 /* After reload, there are no pseudos, nor subregs of multi-word
2632 registers. The regsets should exactly match. */
2633 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2634 abort ();
2635 }
2636 else
2637 {
2638 int i;
2639
2640 /* Find the set of changed registers. */
2641 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2642
2643 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2644 {
2645 /* No registers should die. */
2646 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2647 abort ();
2648 /* Verify that the now-live register is wider than word_mode. */
2649 verify_wide_reg (i, bb->head, bb->end);
2650 });
2651 }
2652 }
2653
2654 /* Updates life information starting with the basic blocks set in BLOCKS.
2655 If BLOCKS is null, consider it to be the universal set.
2656
2657 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2658 we are only expecting local modifications to basic blocks. If we find
2659 extra registers live at the beginning of a block, then we either killed
2660 useful data, or we have a broken split that wants data not provided.
2661 If we find registers removed from live_at_start, that means we have
2662 a broken peephole that is killing a register it shouldn't.
2663
2664 ??? This is not true in one situation -- when a pre-reload splitter
2665 generates subregs of a multi-word pseudo, current life analysis will
2666 lose the kill. So we _can_ have a pseudo go live. How irritating.
2667
2668 Including PROP_REG_INFO does not properly refresh regs_ever_live
2669 unless the caller resets it to zero. */
2670
2671 void
2672 update_life_info (blocks, extent, prop_flags)
2673 sbitmap blocks;
2674 enum update_life_extent extent;
2675 int prop_flags;
2676 {
2677 regset tmp;
2678 regset_head tmp_head;
2679 int i;
2680
2681 tmp = INITIALIZE_REG_SET (tmp_head);
2682
2683 /* For a global update, we go through the relaxation process again. */
2684 if (extent != UPDATE_LIFE_LOCAL)
2685 {
2686 calculate_global_regs_live (blocks, blocks,
2687 prop_flags & PROP_SCAN_DEAD_CODE);
2688
2689 /* If asked, remove notes from the blocks we'll update. */
2690 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2691 count_or_remove_death_notes (blocks, 1);
2692 }
2693
2694 if (blocks)
2695 {
2696 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2697 {
2698 basic_block bb = BASIC_BLOCK (i);
2699
2700 COPY_REG_SET (tmp, bb->global_live_at_end);
2701 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2702
2703 if (extent == UPDATE_LIFE_LOCAL)
2704 verify_local_live_at_start (tmp, bb);
2705 });
2706 }
2707 else
2708 {
2709 for (i = n_basic_blocks - 1; i >= 0; --i)
2710 {
2711 basic_block bb = BASIC_BLOCK (i);
2712
2713 COPY_REG_SET (tmp, bb->global_live_at_end);
2714 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2715
2716 if (extent == UPDATE_LIFE_LOCAL)
2717 verify_local_live_at_start (tmp, bb);
2718 }
2719 }
2720
2721 FREE_REG_SET (tmp);
2722
2723 if (prop_flags & PROP_REG_INFO)
2724 {
2725 /* The only pseudos that are live at the beginning of the function
2726 are those that were not set anywhere in the function. local-alloc
2727 doesn't know how to handle these correctly, so mark them as not
2728 local to any one basic block. */
2729 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2730 FIRST_PSEUDO_REGISTER, i,
2731 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2732
2733 /* We have a problem with any pseudoreg that lives across the setjmp.
2734 ANSI says that if a user variable does not change in value between
2735 the setjmp and the longjmp, then the longjmp preserves it. This
2736 includes longjmp from a place where the pseudo appears dead.
2737 (In principle, the value still exists if it is in scope.)
2738 If the pseudo goes in a hard reg, some other value may occupy
2739 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2740 Conclusion: such a pseudo must not go in a hard reg. */
2741 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2742 FIRST_PSEUDO_REGISTER, i,
2743 {
2744 if (regno_reg_rtx[i] != 0)
2745 {
2746 REG_LIVE_LENGTH (i) = -1;
2747 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2748 }
2749 });
2750 }
2751 }
2752
2753 /* Free the variables allocated by find_basic_blocks.
2754
2755 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2756
2757 void
2758 free_basic_block_vars (keep_head_end_p)
2759 int keep_head_end_p;
2760 {
2761 if (basic_block_for_insn)
2762 {
2763 VARRAY_FREE (basic_block_for_insn);
2764 basic_block_for_insn = NULL;
2765 }
2766
2767 if (! keep_head_end_p)
2768 {
2769 clear_edges ();
2770 VARRAY_FREE (basic_block_info);
2771 n_basic_blocks = 0;
2772
2773 ENTRY_BLOCK_PTR->aux = NULL;
2774 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2775 EXIT_BLOCK_PTR->aux = NULL;
2776 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2777 }
2778 }
2779
2780 /* Return nonzero if the destination of SET equals the source. */
2781 static int
2782 set_noop_p (set)
2783 rtx set;
2784 {
2785 rtx src = SET_SRC (set);
2786 rtx dst = SET_DEST (set);
2787
2788 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2789 {
2790 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2791 return 0;
2792 src = SUBREG_REG (src);
2793 dst = SUBREG_REG (dst);
2794 }
2795
2796 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2797 && REGNO (src) == REGNO (dst));
2798 }
2799
2800 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2801 value to itself. */
2802 static int
2803 noop_move_p (insn)
2804 rtx insn;
2805 {
2806 rtx pat = PATTERN (insn);
2807
2808 /* Insns carrying these notes are useful later on. */
2809 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2810 return 0;
2811
2812 if (GET_CODE (pat) == SET && set_noop_p (pat))
2813 return 1;
2814
2815 if (GET_CODE (pat) == PARALLEL)
2816 {
2817 int i;
2818 /* If nothing but SETs of registers to themselves,
2819 this insn can also be deleted. */
2820 for (i = 0; i < XVECLEN (pat, 0); i++)
2821 {
2822 rtx tem = XVECEXP (pat, 0, i);
2823
2824 if (GET_CODE (tem) == USE
2825 || GET_CODE (tem) == CLOBBER)
2826 continue;
2827
2828 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2829 return 0;
2830 }
2831
2832 return 1;
2833 }
2834 return 0;
2835 }
2836
2837 /* Delete any insns that copy a register to itself. */
2838
2839 static void
2840 delete_noop_moves (f)
2841 rtx f;
2842 {
2843 rtx insn;
2844 for (insn = f; insn; insn = NEXT_INSN (insn))
2845 {
2846 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2847 {
2848 PUT_CODE (insn, NOTE);
2849 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2850 NOTE_SOURCE_FILE (insn) = 0;
2851 }
2852 }
2853 }
2854
2855 /* Determine if the stack pointer is constant over the life of the function.
2856 Only useful before prologues have been emitted. */
2857
2858 static void
2859 notice_stack_pointer_modification_1 (x, pat, data)
2860 rtx x;
2861 rtx pat ATTRIBUTE_UNUSED;
2862 void *data ATTRIBUTE_UNUSED;
2863 {
2864 if (x == stack_pointer_rtx
2865 /* The stack pointer is only modified indirectly as the result
2866 of a push until later in flow. See the comments in rtl.texi
2867 regarding Embedded Side-Effects on Addresses. */
2868 || (GET_CODE (x) == MEM
2869 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2870 || GET_CODE (XEXP (x, 0)) == PRE_INC
2871 || GET_CODE (XEXP (x, 0)) == POST_DEC
2872 || GET_CODE (XEXP (x, 0)) == POST_INC)
2873 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2874 current_function_sp_is_unchanging = 0;
2875 }
2876
2877 static void
2878 notice_stack_pointer_modification (f)
2879 rtx f;
2880 {
2881 rtx insn;
2882
2883 /* Assume that the stack pointer is unchanging if alloca hasn't
2884 been used. */
2885 current_function_sp_is_unchanging = !current_function_calls_alloca;
2886 if (! current_function_sp_is_unchanging)
2887 return;
2888
2889 for (insn = f; insn; insn = NEXT_INSN (insn))
2890 {
2891 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2892 {
2893 /* Check if insn modifies the stack pointer. */
2894 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2895 NULL);
2896 if (! current_function_sp_is_unchanging)
2897 return;
2898 }
2899 }
2900 }
2901
2902 /* Mark a register in SET. Hard registers in large modes get all
2903 of their component registers set as well. */
2904 static void
2905 mark_reg (reg, xset)
2906 rtx reg;
2907 void *xset;
2908 {
2909 regset set = (regset) xset;
2910 int regno = REGNO (reg);
2911
2912 if (GET_MODE (reg) == BLKmode)
2913 abort ();
2914
2915 SET_REGNO_REG_SET (set, regno);
2916 if (regno < FIRST_PSEUDO_REGISTER)
2917 {
2918 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2919 while (--n > 0)
2920 SET_REGNO_REG_SET (set, regno + n);
2921 }
2922 }
2923
2924 /* Mark those regs which are needed at the end of the function as live
2925 at the end of the last basic block. */
2926 static void
2927 mark_regs_live_at_end (set)
2928 regset set;
2929 {
2930 int i;
2931
2932 /* If exiting needs the right stack value, consider the stack pointer
2933 live at the end of the function. */
2934 if ((HAVE_epilogue && reload_completed)
2935 || ! EXIT_IGNORE_STACK
2936 || (! FRAME_POINTER_REQUIRED
2937 && ! current_function_calls_alloca
2938 && flag_omit_frame_pointer)
2939 || current_function_sp_is_unchanging)
2940 {
2941 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2942 }
2943
2944 /* Mark the frame pointer if needed at the end of the function. If
2945 we end up eliminating it, it will be removed from the live list
2946 of each basic block by reload. */
2947
2948 if (! reload_completed || frame_pointer_needed)
2949 {
2950 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2951 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2952 /* If they are different, also mark the hard frame pointer as live */
2953 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2954 #endif
2955 }
2956
2957 #ifdef PIC_OFFSET_TABLE_REGNUM
2958 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2959 /* Many architectures have a GP register even without flag_pic.
2960 Assume the pic register is not in use, or will be handled by
2961 other means, if it is not fixed. */
2962 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2963 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2964 #endif
2965 #endif
2966
2967 /* Mark all global registers, and all registers used by the epilogue
2968 as being live at the end of the function since they may be
2969 referenced by our caller. */
2970 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2971 if (global_regs[i]
2972 #ifdef EPILOGUE_USES
2973 || EPILOGUE_USES (i)
2974 #endif
2975 )
2976 SET_REGNO_REG_SET (set, i);
2977
2978 /* Mark all call-saved registers that we actaully used. */
2979 if (HAVE_epilogue && reload_completed)
2980 {
2981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2982 if (! call_used_regs[i] && regs_ever_live[i])
2983 SET_REGNO_REG_SET (set, i);
2984 }
2985
2986 /* Mark function return value. */
2987 diddle_return_value (mark_reg, set);
2988 }
2989
2990 /* Callback function for for_each_successor_phi. DATA is a regset.
2991 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2992 INSN, in the regset. */
2993
2994 static int
2995 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2996 rtx insn ATTRIBUTE_UNUSED;
2997 int dest_regno ATTRIBUTE_UNUSED;
2998 int src_regno;
2999 void *data;
3000 {
3001 regset live = (regset) data;
3002 SET_REGNO_REG_SET (live, src_regno);
3003 return 0;
3004 }
3005
3006 /* Propagate global life info around the graph of basic blocks. Begin
3007 considering blocks with their corresponding bit set in BLOCKS_IN.
3008 If BLOCKS_IN is null, consider it the universal set.
3009
3010 BLOCKS_OUT is set for every block that was changed. */
3011
3012 static void
3013 calculate_global_regs_live (blocks_in, blocks_out, flags)
3014 sbitmap blocks_in, blocks_out;
3015 int flags;
3016 {
3017 basic_block *queue, *qhead, *qtail, *qend;
3018 regset tmp, new_live_at_end;
3019 regset_head tmp_head;
3020 regset_head new_live_at_end_head;
3021 int i;
3022
3023 tmp = INITIALIZE_REG_SET (tmp_head);
3024 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3025
3026 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3027 because the `head == tail' style test for an empty queue doesn't
3028 work with a full queue. */
3029 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3030 qtail = queue;
3031 qhead = qend = queue + n_basic_blocks + 2;
3032
3033 /* Clear out the garbage that might be hanging out in bb->aux. */
3034 for (i = n_basic_blocks - 1; i >= 0; --i)
3035 BASIC_BLOCK (i)->aux = NULL;
3036
3037 /* Queue the blocks set in the initial mask. Do this in reverse block
3038 number order so that we are more likely for the first round to do
3039 useful work. We use AUX non-null to flag that the block is queued. */
3040 if (blocks_in)
3041 {
3042 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3043 {
3044 basic_block bb = BASIC_BLOCK (i);
3045 *--qhead = bb;
3046 bb->aux = bb;
3047 });
3048 }
3049 else
3050 {
3051 for (i = 0; i < n_basic_blocks; ++i)
3052 {
3053 basic_block bb = BASIC_BLOCK (i);
3054 *--qhead = bb;
3055 bb->aux = bb;
3056 }
3057 }
3058
3059 if (blocks_out)
3060 sbitmap_zero (blocks_out);
3061
3062 while (qhead != qtail)
3063 {
3064 int rescan, changed;
3065 basic_block bb;
3066 edge e;
3067
3068 bb = *qhead++;
3069 if (qhead == qend)
3070 qhead = queue;
3071 bb->aux = NULL;
3072
3073 /* Begin by propogating live_at_start from the successor blocks. */
3074 CLEAR_REG_SET (new_live_at_end);
3075 for (e = bb->succ; e ; e = e->succ_next)
3076 {
3077 basic_block sb = e->dest;
3078 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3079 }
3080
3081 /* Force the stack pointer to be live -- which might not already be
3082 the case for blocks within infinite loops. */
3083 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3084
3085 /* Regs used in phi nodes are not included in
3086 global_live_at_start, since they are live only along a
3087 particular edge. Set those regs that are live because of a
3088 phi node alternative corresponding to this particular block. */
3089 for_each_successor_phi (bb, &set_phi_alternative_reg,
3090 new_live_at_end);
3091
3092 if (bb == ENTRY_BLOCK_PTR)
3093 {
3094 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3095 continue;
3096 }
3097
3098 /* On our first pass through this block, we'll go ahead and continue.
3099 Recognize first pass by local_set NULL. On subsequent passes, we
3100 get to skip out early if live_at_end wouldn't have changed. */
3101
3102 if (bb->local_set == NULL)
3103 {
3104 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3105 rescan = 1;
3106 }
3107 else
3108 {
3109 /* If any bits were removed from live_at_end, we'll have to
3110 rescan the block. This wouldn't be necessary if we had
3111 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3112 local_live is really dependant on live_at_end. */
3113 CLEAR_REG_SET (tmp);
3114 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3115 new_live_at_end, BITMAP_AND_COMPL);
3116
3117 if (! rescan)
3118 {
3119 /* Find the set of changed bits. Take this opportunity
3120 to notice that this set is empty and early out. */
3121 CLEAR_REG_SET (tmp);
3122 changed = bitmap_operation (tmp, bb->global_live_at_end,
3123 new_live_at_end, BITMAP_XOR);
3124 if (! changed)
3125 continue;
3126
3127 /* If any of the changed bits overlap with local_set,
3128 we'll have to rescan the block. Detect overlap by
3129 the AND with ~local_set turning off bits. */
3130 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3131 BITMAP_AND_COMPL);
3132 }
3133 }
3134
3135 /* Let our caller know that BB changed enough to require its
3136 death notes updated. */
3137 if (blocks_out)
3138 SET_BIT (blocks_out, bb->index);
3139
3140 if (! rescan)
3141 {
3142 /* Add to live_at_start the set of all registers in
3143 new_live_at_end that aren't in the old live_at_end. */
3144
3145 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3146 BITMAP_AND_COMPL);
3147 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3148
3149 changed = bitmap_operation (bb->global_live_at_start,
3150 bb->global_live_at_start,
3151 tmp, BITMAP_IOR);
3152 if (! changed)
3153 continue;
3154 }
3155 else
3156 {
3157 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3158
3159 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3160 into live_at_start. */
3161 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3162
3163 /* If live_at start didn't change, no need to go farther. */
3164 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3165 continue;
3166
3167 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3168 }
3169
3170 /* Queue all predecessors of BB so that we may re-examine
3171 their live_at_end. */
3172 for (e = bb->pred; e ; e = e->pred_next)
3173 {
3174 basic_block pb = e->src;
3175 if (pb->aux == NULL)
3176 {
3177 *qtail++ = pb;
3178 if (qtail == qend)
3179 qtail = queue;
3180 pb->aux = pb;
3181 }
3182 }
3183 }
3184
3185 FREE_REG_SET (tmp);
3186 FREE_REG_SET (new_live_at_end);
3187
3188 if (blocks_out)
3189 {
3190 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3191 {
3192 basic_block bb = BASIC_BLOCK (i);
3193 FREE_REG_SET (bb->local_set);
3194 });
3195 }
3196 else
3197 {
3198 for (i = n_basic_blocks - 1; i >= 0; --i)
3199 {
3200 basic_block bb = BASIC_BLOCK (i);
3201 FREE_REG_SET (bb->local_set);
3202 }
3203 }
3204
3205 free (queue);
3206 }
3207 \f
3208 /* Subroutines of life analysis. */
3209
3210 /* Allocate the permanent data structures that represent the results
3211 of life analysis. Not static since used also for stupid life analysis. */
3212
3213 void
3214 allocate_bb_life_data ()
3215 {
3216 register int i;
3217
3218 for (i = 0; i < n_basic_blocks; i++)
3219 {
3220 basic_block bb = BASIC_BLOCK (i);
3221
3222 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3223 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3224 }
3225
3226 ENTRY_BLOCK_PTR->global_live_at_end
3227 = OBSTACK_ALLOC_REG_SET (function_obstack);
3228 EXIT_BLOCK_PTR->global_live_at_start
3229 = OBSTACK_ALLOC_REG_SET (function_obstack);
3230
3231 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3232 }
3233
3234 void
3235 allocate_reg_life_data ()
3236 {
3237 int i;
3238
3239 max_regno = max_reg_num ();
3240
3241 /* Recalculate the register space, in case it has grown. Old style
3242 vector oriented regsets would set regset_{size,bytes} here also. */
3243 allocate_reg_info (max_regno, FALSE, FALSE);
3244
3245 /* Reset all the data we'll collect in propagate_block and its
3246 subroutines. */
3247 for (i = 0; i < max_regno; i++)
3248 {
3249 REG_N_SETS (i) = 0;
3250 REG_N_REFS (i) = 0;
3251 REG_N_DEATHS (i) = 0;
3252 REG_N_CALLS_CROSSED (i) = 0;
3253 REG_LIVE_LENGTH (i) = 0;
3254 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3255 }
3256 }
3257
3258 /* Delete dead instructions for propagate_block. */
3259
3260 static void
3261 propagate_block_delete_insn (bb, insn)
3262 basic_block bb;
3263 rtx insn;
3264 {
3265 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3266
3267 /* If the insn referred to a label, and that label was attached to
3268 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3269 pretty much mandatory to delete it, because the ADDR_VEC may be
3270 referencing labels that no longer exist. */
3271
3272 if (inote)
3273 {
3274 rtx label = XEXP (inote, 0);
3275 rtx next;
3276
3277 if (LABEL_NUSES (label) == 1
3278 && (next = next_nonnote_insn (label)) != NULL
3279 && GET_CODE (next) == JUMP_INSN
3280 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3281 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3282 {
3283 rtx pat = PATTERN (next);
3284 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3285 int len = XVECLEN (pat, diff_vec_p);
3286 int i;
3287
3288 for (i = 0; i < len; i++)
3289 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3290
3291 flow_delete_insn (next);
3292 }
3293 }
3294
3295 if (bb->end == insn)
3296 bb->end = PREV_INSN (insn);
3297 flow_delete_insn (insn);
3298 }
3299
3300 /* Delete dead libcalls for propagate_block. Return the insn
3301 before the libcall. */
3302
3303 static rtx
3304 propagate_block_delete_libcall (bb, insn, note)
3305 basic_block bb;
3306 rtx insn, note;
3307 {
3308 rtx first = XEXP (note, 0);
3309 rtx before = PREV_INSN (first);
3310
3311 if (insn == bb->end)
3312 bb->end = before;
3313
3314 flow_delete_insn_chain (first, insn);
3315 return before;
3316 }
3317
3318 /* Update the life-status of regs for one insn. Return the previous insn. */
3319
3320 rtx
3321 propagate_one_insn (pbi, insn)
3322 struct propagate_block_info *pbi;
3323 rtx insn;
3324 {
3325 rtx prev = PREV_INSN (insn);
3326 int flags = pbi->flags;
3327 int insn_is_dead = 0;
3328 int libcall_is_dead = 0;
3329 rtx note;
3330 int i;
3331
3332 if (! INSN_P (insn))
3333 return prev;
3334
3335 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3336 if (flags & PROP_SCAN_DEAD_CODE)
3337 {
3338 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3339 REG_NOTES (insn));
3340 libcall_is_dead = (insn_is_dead && note != 0
3341 && libcall_dead_p (pbi, PATTERN (insn),
3342 note, insn));
3343 }
3344
3345 /* We almost certainly don't want to delete prologue or epilogue
3346 instructions. Warn about probable compiler losage. */
3347 if (insn_is_dead
3348 && reload_completed
3349 && (((HAVE_epilogue || HAVE_prologue)
3350 && prologue_epilogue_contains (insn))
3351 || (HAVE_sibcall_epilogue
3352 && sibcall_epilogue_contains (insn))))
3353 {
3354 if (flags & PROP_KILL_DEAD_CODE)
3355 {
3356 warning ("ICE: would have deleted prologue/epilogue insn");
3357 if (!inhibit_warnings)
3358 debug_rtx (insn);
3359 }
3360 libcall_is_dead = insn_is_dead = 0;
3361 }
3362
3363 /* If an instruction consists of just dead store(s) on final pass,
3364 delete it. */
3365 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3366 {
3367 if (libcall_is_dead)
3368 {
3369 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3370 insn = NEXT_INSN (prev);
3371 }
3372 else
3373 propagate_block_delete_insn (pbi->bb, insn);
3374
3375 /* CC0 is now known to be dead. Either this insn used it,
3376 in which case it doesn't anymore, or clobbered it,
3377 so the next insn can't use it. */
3378 pbi->cc0_live = 0;
3379
3380 return prev;
3381 }
3382
3383 /* See if this is an increment or decrement that can be merged into
3384 a following memory address. */
3385 #ifdef AUTO_INC_DEC
3386 {
3387 register rtx x = single_set (insn);
3388
3389 /* Does this instruction increment or decrement a register? */
3390 if (!reload_completed
3391 && (flags & PROP_AUTOINC)
3392 && x != 0
3393 && GET_CODE (SET_DEST (x)) == REG
3394 && (GET_CODE (SET_SRC (x)) == PLUS
3395 || GET_CODE (SET_SRC (x)) == MINUS)
3396 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3397 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3398 /* Ok, look for a following memory ref we can combine with.
3399 If one is found, change the memory ref to a PRE_INC
3400 or PRE_DEC, cancel this insn, and return 1.
3401 Return 0 if nothing has been done. */
3402 && try_pre_increment_1 (pbi, insn))
3403 return prev;
3404 }
3405 #endif /* AUTO_INC_DEC */
3406
3407 CLEAR_REG_SET (pbi->new_set);
3408
3409 /* If this is not the final pass, and this insn is copying the value of
3410 a library call and it's dead, don't scan the insns that perform the
3411 library call, so that the call's arguments are not marked live. */
3412 if (libcall_is_dead)
3413 {
3414 /* Record the death of the dest reg. */
3415 mark_set_regs (pbi, PATTERN (insn), insn);
3416
3417 insn = XEXP (note, 0);
3418 return PREV_INSN (insn);
3419 }
3420 else if (GET_CODE (PATTERN (insn)) == SET
3421 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3422 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3423 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3424 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3425 /* We have an insn to pop a constant amount off the stack.
3426 (Such insns use PLUS regardless of the direction of the stack,
3427 and any insn to adjust the stack by a constant is always a pop.)
3428 These insns, if not dead stores, have no effect on life. */
3429 ;
3430 else
3431 {
3432 /* Any regs live at the time of a call instruction must not go
3433 in a register clobbered by calls. Find all regs now live and
3434 record this for them. */
3435
3436 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3437 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3438 { REG_N_CALLS_CROSSED (i)++; });
3439
3440 /* Record sets. Do this even for dead instructions, since they
3441 would have killed the values if they hadn't been deleted. */
3442 mark_set_regs (pbi, PATTERN (insn), insn);
3443
3444 if (GET_CODE (insn) == CALL_INSN)
3445 {
3446 register int i;
3447 rtx note, cond;
3448
3449 cond = NULL_RTX;
3450 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3451 cond = COND_EXEC_TEST (PATTERN (insn));
3452
3453 /* Non-constant calls clobber memory. */
3454 if (! CONST_CALL_P (insn))
3455 free_EXPR_LIST_list (&pbi->mem_set_list);
3456
3457 /* There may be extra registers to be clobbered. */
3458 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3459 note;
3460 note = XEXP (note, 1))
3461 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3462 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3463 cond, insn, pbi->flags);
3464
3465 /* Calls change all call-used and global registers. */
3466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3467 if (call_used_regs[i] && ! global_regs[i]
3468 && ! fixed_regs[i])
3469 {
3470 /* We do not want REG_UNUSED notes for these registers. */
3471 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3472 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3473 }
3474 }
3475
3476 /* If an insn doesn't use CC0, it becomes dead since we assume
3477 that every insn clobbers it. So show it dead here;
3478 mark_used_regs will set it live if it is referenced. */
3479 pbi->cc0_live = 0;
3480
3481 /* Record uses. */
3482 if (! insn_is_dead)
3483 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3484
3485 /* Sometimes we may have inserted something before INSN (such as a move)
3486 when we make an auto-inc. So ensure we will scan those insns. */
3487 #ifdef AUTO_INC_DEC
3488 prev = PREV_INSN (insn);
3489 #endif
3490
3491 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3492 {
3493 register int i;
3494 rtx note, cond;
3495
3496 cond = NULL_RTX;
3497 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3498 cond = COND_EXEC_TEST (PATTERN (insn));
3499
3500 /* Calls use their arguments. */
3501 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3502 note;
3503 note = XEXP (note, 1))
3504 if (GET_CODE (XEXP (note, 0)) == USE)
3505 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3506 cond, insn);
3507
3508 /* The stack ptr is used (honorarily) by a CALL insn. */
3509 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3510
3511 /* Calls may also reference any of the global registers,
3512 so they are made live. */
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3514 if (global_regs[i])
3515 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3516 cond, insn);
3517 }
3518 }
3519
3520 /* On final pass, update counts of how many insns in which each reg
3521 is live. */
3522 if (flags & PROP_REG_INFO)
3523 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3524 { REG_LIVE_LENGTH (i)++; });
3525
3526 return prev;
3527 }
3528
3529 /* Initialize a propagate_block_info struct for public consumption.
3530 Note that the structure itself is opaque to this file, but that
3531 the user can use the regsets provided here. */
3532
3533 struct propagate_block_info *
3534 init_propagate_block_info (bb, live, local_set, flags)
3535 basic_block bb;
3536 regset live;
3537 regset local_set;
3538 int flags;
3539 {
3540 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3541
3542 pbi->bb = bb;
3543 pbi->reg_live = live;
3544 pbi->mem_set_list = NULL_RTX;
3545 pbi->local_set = local_set;
3546 pbi->cc0_live = 0;
3547 pbi->flags = flags;
3548
3549 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3550 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3551 else
3552 pbi->reg_next_use = NULL;
3553
3554 pbi->new_set = BITMAP_XMALLOC ();
3555
3556 #ifdef HAVE_conditional_execution
3557 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3558 free_reg_cond_life_info);
3559 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3560
3561 /* If this block ends in a conditional branch, for each register live
3562 from one side of the branch and not the other, record the register
3563 as conditionally dead. */
3564 if (GET_CODE (bb->end) == JUMP_INSN
3565 && condjump_p (bb->end)
3566 && ! simplejump_p (bb->end))
3567 {
3568 regset_head diff_head;
3569 regset diff = INITIALIZE_REG_SET (diff_head);
3570 basic_block bb_true, bb_false;
3571 rtx cond_true, cond_false;
3572 int i;
3573
3574 /* Identify the successor blocks. */
3575 bb_false = bb->succ->succ_next->dest;
3576 bb_true = bb->succ->dest;
3577 if (bb->succ->flags & EDGE_FALLTHRU)
3578 {
3579 basic_block t = bb_false;
3580 bb_false = bb_true;
3581 bb_true = t;
3582 }
3583 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3584 abort ();
3585
3586 /* Extract the condition from the branch. */
3587 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3588 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3589 GET_MODE (cond_true), XEXP (cond_true, 0),
3590 XEXP (cond_true, 1));
3591 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3592 {
3593 rtx t = cond_false;
3594 cond_false = cond_true;
3595 cond_true = t;
3596 }
3597
3598 /* Compute which register lead different lives in the successors. */
3599 if (bitmap_operation (diff, bb_true->global_live_at_start,
3600 bb_false->global_live_at_start, BITMAP_XOR))
3601 {
3602 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3603 abort ();
3604 SET_REGNO_REG_SET (pbi.reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3605
3606 /* For each such register, mark it conditionally dead. */
3607 EXECUTE_IF_SET_IN_REG_SET
3608 (diff, 0, i,
3609 {
3610 struct reg_cond_life_info *rcli;
3611 rtx cond;
3612
3613 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3614
3615 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3616 cond = cond_false;
3617 else
3618 cond = cond_true;
3619 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3620
3621 splay_tree_insert (pbi.reg_cond_dead, i,
3622 (splay_tree_value) rcli);
3623 });
3624 }
3625
3626 FREE_REG_SET (diff);
3627 }
3628 #endif
3629
3630 return pbi;
3631 }
3632
3633 /* Release a propagate_block_info struct. */
3634
3635 void
3636 free_propagate_block_info (pbi)
3637 struct propagate_block_info *pbi;
3638 {
3639 free_EXPR_LIST_list (&pbi->mem_set_list);
3640
3641 BITMAP_XFREE (pbi->new_set);
3642
3643 #ifdef HAVE_conditional_execution
3644 splay_tree_delete (pbi->reg_cond_dead);
3645 BITMAP_XFREE (pbi->reg_cond_reg);
3646 #endif
3647
3648 if (pbi->reg_next_use)
3649 free (pbi->reg_next_use);
3650
3651 free (pbi);
3652 }
3653
3654 /* Compute the registers live at the beginning of a basic block BB from
3655 those live at the end.
3656
3657 When called, REG_LIVE contains those live at the end. On return, it
3658 contains those live at the beginning.
3659
3660 LOCAL_SET, if non-null, will be set with all registers killed by
3661 this basic block. */
3662
3663 void
3664 propagate_block (bb, live, local_set, flags)
3665 basic_block bb;
3666 regset live;
3667 regset local_set;
3668 int flags;
3669 {
3670 struct propagate_block_info *pbi;
3671 rtx insn, prev;
3672
3673 pbi = init_propagate_block_info (bb, live, local_set, flags);
3674
3675 if (flags & PROP_REG_INFO)
3676 {
3677 register int i;
3678
3679 /* Process the regs live at the end of the block.
3680 Mark them as not local to any one basic block. */
3681 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3682 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3683 }
3684
3685 /* Scan the block an insn at a time from end to beginning. */
3686
3687 for (insn = bb->end; ; insn = prev)
3688 {
3689 /* If this is a call to `setjmp' et al, warn if any
3690 non-volatile datum is live. */
3691 if ((flags & PROP_REG_INFO)
3692 && GET_CODE (insn) == NOTE
3693 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3694 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3695
3696 prev = propagate_one_insn (pbi, insn);
3697
3698 if (insn == bb->head)
3699 break;
3700 }
3701
3702 free_propagate_block_info (pbi);
3703 }
3704 \f
3705 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3706 (SET expressions whose destinations are registers dead after the insn).
3707 NEEDED is the regset that says which regs are alive after the insn.
3708
3709 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3710
3711 If X is the entire body of an insn, NOTES contains the reg notes
3712 pertaining to the insn. */
3713
3714 static int
3715 insn_dead_p (pbi, x, call_ok, notes)
3716 struct propagate_block_info *pbi;
3717 rtx x;
3718 int call_ok;
3719 rtx notes ATTRIBUTE_UNUSED;
3720 {
3721 enum rtx_code code = GET_CODE (x);
3722
3723 #ifdef AUTO_INC_DEC
3724 /* If flow is invoked after reload, we must take existing AUTO_INC
3725 expresions into account. */
3726 if (reload_completed)
3727 {
3728 for ( ; notes; notes = XEXP (notes, 1))
3729 {
3730 if (REG_NOTE_KIND (notes) == REG_INC)
3731 {
3732 int regno = REGNO (XEXP (notes, 0));
3733
3734 /* Don't delete insns to set global regs. */
3735 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3736 || REGNO_REG_SET_P (pbi->reg_live, regno))
3737 return 0;
3738 }
3739 }
3740 }
3741 #endif
3742
3743 /* If setting something that's a reg or part of one,
3744 see if that register's altered value will be live. */
3745
3746 if (code == SET)
3747 {
3748 rtx r = SET_DEST (x);
3749
3750 #ifdef HAVE_cc0
3751 if (GET_CODE (r) == CC0)
3752 return ! pbi->cc0_live;
3753 #endif
3754
3755 /* A SET that is a subroutine call cannot be dead. */
3756 if (GET_CODE (SET_SRC (x)) == CALL)
3757 {
3758 if (! call_ok)
3759 return 0;
3760 }
3761
3762 /* Don't eliminate loads from volatile memory or volatile asms. */
3763 else if (volatile_refs_p (SET_SRC (x)))
3764 return 0;
3765
3766 if (GET_CODE (r) == MEM)
3767 {
3768 rtx temp;
3769
3770 if (MEM_VOLATILE_P (r))
3771 return 0;
3772
3773 /* Walk the set of memory locations we are currently tracking
3774 and see if one is an identical match to this memory location.
3775 If so, this memory write is dead (remember, we're walking
3776 backwards from the end of the block to the start. */
3777 temp = pbi->mem_set_list;
3778 while (temp)
3779 {
3780 if (rtx_equal_p (XEXP (temp, 0), r))
3781 return 1;
3782 temp = XEXP (temp, 1);
3783 }
3784 }
3785 else
3786 {
3787 while (GET_CODE (r) == SUBREG
3788 || GET_CODE (r) == STRICT_LOW_PART
3789 || GET_CODE (r) == ZERO_EXTRACT)
3790 r = XEXP (r, 0);
3791
3792 if (GET_CODE (r) == REG)
3793 {
3794 int regno = REGNO (r);
3795
3796 /* Obvious. */
3797 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3798 return 0;
3799
3800 /* If this is a hard register, verify that subsequent
3801 words are not needed. */
3802 if (regno < FIRST_PSEUDO_REGISTER)
3803 {
3804 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3805
3806 while (--n > 0)
3807 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3808 return 0;
3809 }
3810
3811 /* Don't delete insns to set global regs. */
3812 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3813 return 0;
3814
3815 /* Make sure insns to set the stack pointer aren't deleted. */
3816 if (regno == STACK_POINTER_REGNUM)
3817 return 0;
3818
3819 /* Make sure insns to set the frame pointer aren't deleted. */
3820 if (regno == FRAME_POINTER_REGNUM
3821 && (! reload_completed || frame_pointer_needed))
3822 return 0;
3823 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3824 if (regno == HARD_FRAME_POINTER_REGNUM
3825 && (! reload_completed || frame_pointer_needed))
3826 return 0;
3827 #endif
3828
3829 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3830 /* Make sure insns to set arg pointer are never deleted
3831 (if the arg pointer isn't fixed, there will be a USE
3832 for it, so we can treat it normally). */
3833 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3834 return 0;
3835 #endif
3836
3837 /* Otherwise, the set is dead. */
3838 return 1;
3839 }
3840 }
3841 }
3842
3843 /* If performing several activities, insn is dead if each activity
3844 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3845 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3846 worth keeping. */
3847 else if (code == PARALLEL)
3848 {
3849 int i = XVECLEN (x, 0);
3850
3851 for (i--; i >= 0; i--)
3852 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3853 && GET_CODE (XVECEXP (x, 0, i)) != USE
3854 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3855 return 0;
3856
3857 return 1;
3858 }
3859
3860 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3861 is not necessarily true for hard registers. */
3862 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3863 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3864 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3865 return 1;
3866
3867 /* We do not check other CLOBBER or USE here. An insn consisting of just
3868 a CLOBBER or just a USE should not be deleted. */
3869 return 0;
3870 }
3871
3872 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3873 return 1 if the entire library call is dead.
3874 This is true if X copies a register (hard or pseudo)
3875 and if the hard return reg of the call insn is dead.
3876 (The caller should have tested the destination of X already for death.)
3877
3878 If this insn doesn't just copy a register, then we don't
3879 have an ordinary libcall. In that case, cse could not have
3880 managed to substitute the source for the dest later on,
3881 so we can assume the libcall is dead.
3882
3883 NEEDED is the bit vector of pseudoregs live before this insn.
3884 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3885
3886 static int
3887 libcall_dead_p (pbi, x, note, insn)
3888 struct propagate_block_info *pbi;
3889 rtx x;
3890 rtx note;
3891 rtx insn;
3892 {
3893 register RTX_CODE code = GET_CODE (x);
3894
3895 if (code == SET)
3896 {
3897 register rtx r = SET_SRC (x);
3898 if (GET_CODE (r) == REG)
3899 {
3900 rtx call = XEXP (note, 0);
3901 rtx call_pat;
3902 register int i;
3903
3904 /* Find the call insn. */
3905 while (call != insn && GET_CODE (call) != CALL_INSN)
3906 call = NEXT_INSN (call);
3907
3908 /* If there is none, do nothing special,
3909 since ordinary death handling can understand these insns. */
3910 if (call == insn)
3911 return 0;
3912
3913 /* See if the hard reg holding the value is dead.
3914 If this is a PARALLEL, find the call within it. */
3915 call_pat = PATTERN (call);
3916 if (GET_CODE (call_pat) == PARALLEL)
3917 {
3918 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3919 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3920 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3921 break;
3922
3923 /* This may be a library call that is returning a value
3924 via invisible pointer. Do nothing special, since
3925 ordinary death handling can understand these insns. */
3926 if (i < 0)
3927 return 0;
3928
3929 call_pat = XVECEXP (call_pat, 0, i);
3930 }
3931
3932 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3933 }
3934 }
3935 return 1;
3936 }
3937
3938 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3939 live at function entry. Don't count global register variables, variables
3940 in registers that can be used for function arg passing, or variables in
3941 fixed hard registers. */
3942
3943 int
3944 regno_uninitialized (regno)
3945 int regno;
3946 {
3947 if (n_basic_blocks == 0
3948 || (regno < FIRST_PSEUDO_REGISTER
3949 && (global_regs[regno]
3950 || fixed_regs[regno]
3951 || FUNCTION_ARG_REGNO_P (regno))))
3952 return 0;
3953
3954 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3955 }
3956
3957 /* 1 if register REGNO was alive at a place where `setjmp' was called
3958 and was set more than once or is an argument.
3959 Such regs may be clobbered by `longjmp'. */
3960
3961 int
3962 regno_clobbered_at_setjmp (regno)
3963 int regno;
3964 {
3965 if (n_basic_blocks == 0)
3966 return 0;
3967
3968 return ((REG_N_SETS (regno) > 1
3969 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3970 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3971 }
3972 \f
3973 /* INSN references memory, possibly using autoincrement addressing modes.
3974 Find any entries on the mem_set_list that need to be invalidated due
3975 to an address change. */
3976 static void
3977 invalidate_mems_from_autoinc (pbi, insn)
3978 struct propagate_block_info *pbi;
3979 rtx insn;
3980 {
3981 rtx note = REG_NOTES (insn);
3982 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3983 {
3984 if (REG_NOTE_KIND (note) == REG_INC)
3985 {
3986 rtx temp = pbi->mem_set_list;
3987 rtx prev = NULL_RTX;
3988 rtx next;
3989
3990 while (temp)
3991 {
3992 next = XEXP (temp, 1);
3993 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3994 {
3995 /* Splice temp out of list. */
3996 if (prev)
3997 XEXP (prev, 1) = next;
3998 else
3999 pbi->mem_set_list = next;
4000 free_EXPR_LIST_node (temp);
4001 }
4002 else
4003 prev = temp;
4004 temp = next;
4005 }
4006 }
4007 }
4008 }
4009
4010 /* Process the registers that are set within X. Their bits are set to
4011 1 in the regset DEAD, because they are dead prior to this insn.
4012
4013 If INSN is nonzero, it is the insn being processed.
4014
4015 FLAGS is the set of operations to perform. */
4016
4017 static void
4018 mark_set_regs (pbi, x, insn)
4019 struct propagate_block_info *pbi;
4020 rtx x, insn;
4021 {
4022 rtx cond = NULL_RTX;
4023 enum rtx_code code;
4024
4025 retry:
4026 switch (code = GET_CODE (x))
4027 {
4028 case SET:
4029 case CLOBBER:
4030 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4031 return;
4032
4033 case COND_EXEC:
4034 cond = COND_EXEC_TEST (x);
4035 x = COND_EXEC_CODE (x);
4036 goto retry;
4037
4038 case PARALLEL:
4039 {
4040 register int i;
4041 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4042 {
4043 rtx sub = XVECEXP (x, 0, i);
4044 switch (code = GET_CODE (sub))
4045 {
4046 case COND_EXEC:
4047 if (cond != NULL_RTX)
4048 abort ();
4049
4050 cond = COND_EXEC_TEST (sub);
4051 sub = COND_EXEC_CODE (sub);
4052 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4053 break;
4054 /* FALLTHRU */
4055
4056 case SET:
4057 case CLOBBER:
4058 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4059 break;
4060
4061 default:
4062 break;
4063 }
4064 }
4065 break;
4066 }
4067
4068 default:
4069 break;
4070 }
4071 }
4072
4073 /* Process a single SET rtx, X. */
4074
4075 static void
4076 mark_set_1 (pbi, code, reg, cond, insn, flags)
4077 struct propagate_block_info *pbi;
4078 enum rtx_code code;
4079 rtx reg, cond, insn;
4080 int flags;
4081 {
4082 int regno_first = -1, regno_last = -1;
4083 int not_dead = 0;
4084 int i;
4085
4086 /* Some targets place small structures in registers for
4087 return values of functions. We have to detect this
4088 case specially here to get correct flow information. */
4089 if (GET_CODE (reg) == PARALLEL
4090 && GET_MODE (reg) == BLKmode)
4091 {
4092 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4093 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4094 return;
4095 }
4096
4097 /* Modifying just one hardware register of a multi-reg value or just a
4098 byte field of a register does not mean the value from before this insn
4099 is now dead. Of course, if it was dead after it's unused now. */
4100
4101 switch (GET_CODE (reg))
4102 {
4103 case ZERO_EXTRACT:
4104 case SIGN_EXTRACT:
4105 case STRICT_LOW_PART:
4106 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4107 do
4108 reg = XEXP (reg, 0);
4109 while (GET_CODE (reg) == SUBREG
4110 || GET_CODE (reg) == ZERO_EXTRACT
4111 || GET_CODE (reg) == SIGN_EXTRACT
4112 || GET_CODE (reg) == STRICT_LOW_PART);
4113 if (GET_CODE (reg) == MEM)
4114 break;
4115 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4116 /* FALLTHRU */
4117
4118 case REG:
4119 regno_last = regno_first = REGNO (reg);
4120 if (regno_first < FIRST_PSEUDO_REGISTER)
4121 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4122 break;
4123
4124 case SUBREG:
4125 if (GET_CODE (SUBREG_REG (reg)) == REG)
4126 {
4127 enum machine_mode outer_mode = GET_MODE (reg);
4128 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4129
4130 /* Identify the range of registers affected. This is moderately
4131 tricky for hard registers. See alter_subreg. */
4132
4133 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4134 if (regno_first < FIRST_PSEUDO_REGISTER)
4135 {
4136 #ifdef ALTER_HARD_SUBREG
4137 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4138 inner_mode, regno_first);
4139 #else
4140 regno_first += SUBREG_WORD (reg);
4141 #endif
4142 regno_last = (regno_first
4143 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4144
4145 /* Since we've just adjusted the register number ranges, make
4146 sure REG matches. Otherwise some_was_live will be clear
4147 when it shouldn't have been, and we'll create incorrect
4148 REG_UNUSED notes. */
4149 reg = gen_rtx_REG (outer_mode, regno_first);
4150 }
4151 else
4152 {
4153 /* If the number of words in the subreg is less than the number
4154 of words in the full register, we have a well-defined partial
4155 set. Otherwise the high bits are undefined.
4156
4157 This is only really applicable to pseudos, since we just took
4158 care of multi-word hard registers. */
4159 if (((GET_MODE_SIZE (outer_mode)
4160 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4161 < ((GET_MODE_SIZE (inner_mode)
4162 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4163 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4164
4165 reg = SUBREG_REG (reg);
4166 }
4167 }
4168 else
4169 reg = SUBREG_REG (reg);
4170 break;
4171
4172 default:
4173 break;
4174 }
4175
4176 /* If this set is a MEM, then it kills any aliased writes.
4177 If this set is a REG, then it kills any MEMs which use the reg. */
4178 if (flags & PROP_SCAN_DEAD_CODE)
4179 {
4180 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4181 {
4182 rtx temp = pbi->mem_set_list;
4183 rtx prev = NULL_RTX;
4184 rtx next;
4185
4186 while (temp)
4187 {
4188 next = XEXP (temp, 1);
4189 if ((GET_CODE (reg) == MEM
4190 && output_dependence (XEXP (temp, 0), reg))
4191 || (GET_CODE (reg) == REG
4192 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4193 {
4194 /* Splice this entry out of the list. */
4195 if (prev)
4196 XEXP (prev, 1) = next;
4197 else
4198 pbi->mem_set_list = next;
4199 free_EXPR_LIST_node (temp);
4200 }
4201 else
4202 prev = temp;
4203 temp = next;
4204 }
4205 }
4206
4207 /* If the memory reference had embedded side effects (autoincrement
4208 address modes. Then we may need to kill some entries on the
4209 memory set list. */
4210 if (insn && GET_CODE (reg) == MEM)
4211 invalidate_mems_from_autoinc (pbi, insn);
4212
4213 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4214 /* We do not know the size of a BLKmode store, so we do not track
4215 them for redundant store elimination. */
4216 && GET_MODE (reg) != BLKmode
4217 /* There are no REG_INC notes for SP, so we can't assume we'll see
4218 everything that invalidates it. To be safe, don't eliminate any
4219 stores though SP; none of them should be redundant anyway. */
4220 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4221 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4222 }
4223
4224 if (GET_CODE (reg) == REG
4225 && ! (regno_first == FRAME_POINTER_REGNUM
4226 && (! reload_completed || frame_pointer_needed))
4227 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4228 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4229 && (! reload_completed || frame_pointer_needed))
4230 #endif
4231 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4232 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4233 #endif
4234 )
4235 {
4236 int some_was_live = 0, some_was_dead = 0;
4237
4238 for (i = regno_first; i <= regno_last; ++i)
4239 {
4240 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4241 if (pbi->local_set)
4242 SET_REGNO_REG_SET (pbi->local_set, i);
4243 if (code != CLOBBER)
4244 SET_REGNO_REG_SET (pbi->new_set, i);
4245
4246 some_was_live |= needed_regno;
4247 some_was_dead |= ! needed_regno;
4248 }
4249
4250 #ifdef HAVE_conditional_execution
4251 /* Consider conditional death in deciding that the register needs
4252 a death note. */
4253 if (some_was_live
4254 /* The stack pointer is never dead. Well, not strictly true,
4255 but it's very difficult to tell from here. Hopefully
4256 combine_stack_adjustments will fix up the most egregious
4257 errors. */
4258 && regno_first != STACK_POINTER_REGNUM)
4259 {
4260 for (i = regno_first; i <= regno_last; ++i)
4261 if (! mark_regno_cond_dead (pbi, i, cond))
4262 not_dead = 1;
4263 }
4264 #endif
4265
4266 /* Additional data to record if this is the final pass. */
4267 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4268 | PROP_DEATH_NOTES | PROP_AUTOINC))
4269 {
4270 register rtx y;
4271 register int blocknum = pbi->bb->index;
4272
4273 y = NULL_RTX;
4274 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4275 {
4276 y = pbi->reg_next_use[regno_first];
4277
4278 /* The next use is no longer next, since a store intervenes. */
4279 for (i = regno_first; i <= regno_last; ++i)
4280 pbi->reg_next_use[i] = 0;
4281 }
4282
4283 if (flags & PROP_REG_INFO)
4284 {
4285 for (i = regno_first; i <= regno_last; ++i)
4286 {
4287 /* Count (weighted) references, stores, etc. This counts a
4288 register twice if it is modified, but that is correct. */
4289 REG_N_SETS (i) += 1;
4290 REG_N_REFS (i) += (optimize_size ? 1
4291 : pbi->bb->loop_depth + 1);
4292
4293 /* The insns where a reg is live are normally counted
4294 elsewhere, but we want the count to include the insn
4295 where the reg is set, and the normal counting mechanism
4296 would not count it. */
4297 REG_LIVE_LENGTH (i) += 1;
4298 }
4299
4300 /* If this is a hard reg, record this function uses the reg. */
4301 if (regno_first < FIRST_PSEUDO_REGISTER)
4302 {
4303 for (i = regno_first; i <= regno_last; i++)
4304 regs_ever_live[i] = 1;
4305 }
4306 else
4307 {
4308 /* Keep track of which basic blocks each reg appears in. */
4309 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4310 REG_BASIC_BLOCK (regno_first) = blocknum;
4311 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4312 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4313 }
4314 }
4315
4316 if (! some_was_dead)
4317 {
4318 if (flags & PROP_LOG_LINKS)
4319 {
4320 /* Make a logical link from the next following insn
4321 that uses this register, back to this insn.
4322 The following insns have already been processed.
4323
4324 We don't build a LOG_LINK for hard registers containing
4325 in ASM_OPERANDs. If these registers get replaced,
4326 we might wind up changing the semantics of the insn,
4327 even if reload can make what appear to be valid
4328 assignments later. */
4329 if (y && (BLOCK_NUM (y) == blocknum)
4330 && (regno_first >= FIRST_PSEUDO_REGISTER
4331 || asm_noperands (PATTERN (y)) < 0))
4332 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4333 }
4334 }
4335 else if (not_dead)
4336 ;
4337 else if (! some_was_live)
4338 {
4339 if (flags & PROP_REG_INFO)
4340 REG_N_DEATHS (regno_first) += 1;
4341
4342 if (flags & PROP_DEATH_NOTES)
4343 {
4344 /* Note that dead stores have already been deleted
4345 when possible. If we get here, we have found a
4346 dead store that cannot be eliminated (because the
4347 same insn does something useful). Indicate this
4348 by marking the reg being set as dying here. */
4349 REG_NOTES (insn)
4350 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4351 }
4352 }
4353 else
4354 {
4355 if (flags & PROP_DEATH_NOTES)
4356 {
4357 /* This is a case where we have a multi-word hard register
4358 and some, but not all, of the words of the register are
4359 needed in subsequent insns. Write REG_UNUSED notes
4360 for those parts that were not needed. This case should
4361 be rare. */
4362
4363 for (i = regno_first; i <= regno_last; ++i)
4364 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4365 REG_NOTES (insn)
4366 = alloc_EXPR_LIST (REG_UNUSED,
4367 gen_rtx_REG (reg_raw_mode[i], i),
4368 REG_NOTES (insn));
4369 }
4370 }
4371 }
4372
4373 /* Mark the register as being dead. */
4374 if (some_was_live
4375 /* The stack pointer is never dead. Well, not strictly true,
4376 but it's very difficult to tell from here. Hopefully
4377 combine_stack_adjustments will fix up the most egregious
4378 errors. */
4379 && regno_first != STACK_POINTER_REGNUM)
4380 {
4381 for (i = regno_first; i <= regno_last; ++i)
4382 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4383 }
4384 }
4385 else if (GET_CODE (reg) == REG)
4386 {
4387 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4388 pbi->reg_next_use[regno_first] = 0;
4389 }
4390
4391 /* If this is the last pass and this is a SCRATCH, show it will be dying
4392 here and count it. */
4393 else if (GET_CODE (reg) == SCRATCH)
4394 {
4395 if (flags & PROP_DEATH_NOTES)
4396 REG_NOTES (insn)
4397 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4398 }
4399 }
4400 \f
4401 #ifdef HAVE_conditional_execution
4402 /* Mark REGNO conditionally dead. Return true if the register is
4403 now unconditionally dead. */
4404
4405 static int
4406 mark_regno_cond_dead (pbi, regno, cond)
4407 struct propagate_block_info *pbi;
4408 int regno;
4409 rtx cond;
4410 {
4411 /* If this is a store to a predicate register, the value of the
4412 predicate is changing, we don't know that the predicate as seen
4413 before is the same as that seen after. Flush all dependant
4414 conditions from reg_cond_dead. This will make all such
4415 conditionally live registers unconditionally live. */
4416 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4417 flush_reg_cond_reg (pbi, regno);
4418
4419 /* If this is an unconditional store, remove any conditional
4420 life that may have existed. */
4421 if (cond == NULL_RTX)
4422 splay_tree_remove (pbi->reg_cond_dead, regno);
4423 else
4424 {
4425 splay_tree_node node;
4426 struct reg_cond_life_info *rcli;
4427 rtx ncond;
4428
4429 /* Otherwise this is a conditional set. Record that fact.
4430 It may have been conditionally used, or there may be a
4431 subsequent set with a complimentary condition. */
4432
4433 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4434 if (node == NULL)
4435 {
4436 /* The register was unconditionally live previously.
4437 Record the current condition as the condition under
4438 which it is dead. */
4439 rcli = (struct reg_cond_life_info *)
4440 xmalloc (sizeof (*rcli));
4441 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4442 splay_tree_insert (pbi->reg_cond_dead, regno,
4443 (splay_tree_value) rcli);
4444
4445 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4446 REGNO (XEXP (cond, 0)));
4447
4448 /* Not unconditionaly dead. */
4449 return 0;
4450 }
4451 else
4452 {
4453 /* The register was conditionally live previously.
4454 Add the new condition to the old. */
4455 rcli = (struct reg_cond_life_info *) node->value;
4456 ncond = rcli->condition;
4457 ncond = ior_reg_cond (ncond, cond);
4458
4459 /* If the register is now unconditionally dead,
4460 remove the entry in the splay_tree. */
4461 if (ncond == const1_rtx)
4462 splay_tree_remove (pbi->reg_cond_dead, regno);
4463 else
4464 {
4465 rcli->condition = ncond;
4466
4467 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4468 REGNO (XEXP (cond, 0)));
4469
4470 /* Not unconditionaly dead. */
4471 return 0;
4472 }
4473 }
4474 }
4475
4476 return 1;
4477 }
4478
4479 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4480
4481 static void
4482 free_reg_cond_life_info (value)
4483 splay_tree_value value;
4484 {
4485 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4486 free_EXPR_LIST_list (&rcli->condition);
4487 free (rcli);
4488 }
4489
4490 /* Helper function for flush_reg_cond_reg. */
4491
4492 static int
4493 flush_reg_cond_reg_1 (node, data)
4494 splay_tree_node node;
4495 void *data;
4496 {
4497 struct reg_cond_life_info *rcli;
4498 int *xdata = (int *) data;
4499 unsigned int regno = xdata[0];
4500 rtx c, *prev;
4501
4502 /* Don't need to search if last flushed value was farther on in
4503 the in-order traversal. */
4504 if (xdata[1] >= (int) node->key)
4505 return 0;
4506
4507 /* Splice out portions of the expression that refer to regno. */
4508 rcli = (struct reg_cond_life_info *) node->value;
4509 c = *(prev = &rcli->condition);
4510 while (c)
4511 {
4512 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4513 {
4514 rtx next = XEXP (c, 1);
4515 free_EXPR_LIST_node (c);
4516 c = *prev = next;
4517 }
4518 else
4519 c = *(prev = &XEXP (c, 1));
4520 }
4521
4522 /* If the entire condition is now NULL, signal the node to be removed. */
4523 if (! rcli->condition)
4524 {
4525 xdata[1] = node->key;
4526 return -1;
4527 }
4528 else
4529 return 0;
4530 }
4531
4532 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4533
4534 static void
4535 flush_reg_cond_reg (pbi, regno)
4536 struct propagate_block_info *pbi;
4537 int regno;
4538 {
4539 int pair[2];
4540
4541 pair[0] = regno;
4542 pair[1] = -1;
4543 while (splay_tree_foreach (pbi->reg_cond_dead,
4544 flush_reg_cond_reg_1, pair) == -1)
4545 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4546
4547 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4548 }
4549
4550 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4551 We actually use EXPR_LIST to chain the sub-expressions together
4552 instead of IOR because it's easier to manipulate and we have
4553 the lists.c functions to reuse nodes.
4554
4555 Return a new rtl expression as appropriate. */
4556
4557 static rtx
4558 ior_reg_cond (old, x)
4559 rtx old, x;
4560 {
4561 enum rtx_code x_code;
4562 rtx x_reg;
4563 rtx c;
4564
4565 /* We expect these conditions to be of the form (eq reg 0). */
4566 x_code = GET_CODE (x);
4567 if (GET_RTX_CLASS (x_code) != '<'
4568 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4569 || XEXP (x, 1) != const0_rtx)
4570 abort ();
4571
4572 /* Search the expression for an existing sub-expression of X_REG. */
4573 for (c = old; c ; c = XEXP (c, 1))
4574 {
4575 rtx y = XEXP (c, 0);
4576 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4577 {
4578 /* If we find X already present in OLD, we need do nothing. */
4579 if (GET_CODE (y) == x_code)
4580 return old;
4581
4582 /* If we find X being a compliment of a condition in OLD,
4583 then the entire condition is true. */
4584 if (GET_CODE (y) == reverse_condition (x_code))
4585 return const1_rtx;
4586 }
4587 }
4588
4589 /* Otherwise just add to the chain. */
4590 return alloc_EXPR_LIST (0, x, old);
4591 }
4592
4593 static rtx
4594 not_reg_cond (x)
4595 rtx x;
4596 {
4597 enum rtx_code x_code;
4598 rtx x_reg;
4599
4600 /* We expect these conditions to be of the form (eq reg 0). */
4601 x_code = GET_CODE (x);
4602 if (GET_RTX_CLASS (x_code) != '<'
4603 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4604 || XEXP (x, 1) != const0_rtx)
4605 abort ();
4606
4607 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4608 VOIDmode, x_reg, const0_rtx),
4609 NULL_RTX);
4610 }
4611
4612 static rtx
4613 nand_reg_cond (old, x)
4614 rtx old, x;
4615 {
4616 enum rtx_code x_code;
4617 rtx x_reg;
4618 rtx c, *prev;
4619
4620 /* We expect these conditions to be of the form (eq reg 0). */
4621 x_code = GET_CODE (x);
4622 if (GET_RTX_CLASS (x_code) != '<'
4623 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4624 || XEXP (x, 1) != const0_rtx)
4625 abort ();
4626
4627 /* Search the expression for an existing sub-expression of X_REG. */
4628
4629 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4630 {
4631 rtx y = XEXP (c, 0);
4632 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4633 {
4634 /* If we find X already present in OLD, then we need to
4635 splice it out. */
4636 if (GET_CODE (y) == x_code)
4637 {
4638 *prev = XEXP (c, 1);
4639 free_EXPR_LIST_node (c);
4640 return old ? old : const0_rtx;
4641 }
4642
4643 /* If we find X being a compliment of a condition in OLD,
4644 then we need do nothing. */
4645 if (GET_CODE (y) == reverse_condition (x_code))
4646 return old;
4647 }
4648 }
4649
4650 /* Otherwise, by implication, the register in question is now live for
4651 the inverse of the condition X. */
4652 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4653 VOIDmode, x_reg, const0_rtx),
4654 old);
4655 }
4656 #endif /* HAVE_conditional_execution */
4657 \f
4658 #ifdef AUTO_INC_DEC
4659
4660 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4661 reference. */
4662
4663 static void
4664 find_auto_inc (pbi, x, insn)
4665 struct propagate_block_info *pbi;
4666 rtx x;
4667 rtx insn;
4668 {
4669 rtx addr = XEXP (x, 0);
4670 HOST_WIDE_INT offset = 0;
4671 rtx set;
4672
4673 /* Here we detect use of an index register which might be good for
4674 postincrement, postdecrement, preincrement, or predecrement. */
4675
4676 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4677 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4678
4679 if (GET_CODE (addr) == REG)
4680 {
4681 register rtx y;
4682 register int size = GET_MODE_SIZE (GET_MODE (x));
4683 rtx use;
4684 rtx incr;
4685 int regno = REGNO (addr);
4686
4687 /* Is the next use an increment that might make auto-increment? */
4688 if ((incr = pbi->reg_next_use[regno]) != 0
4689 && (set = single_set (incr)) != 0
4690 && GET_CODE (set) == SET
4691 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4692 /* Can't add side effects to jumps; if reg is spilled and
4693 reloaded, there's no way to store back the altered value. */
4694 && GET_CODE (insn) != JUMP_INSN
4695 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4696 && XEXP (y, 0) == addr
4697 && GET_CODE (XEXP (y, 1)) == CONST_INT
4698 && ((HAVE_POST_INCREMENT
4699 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4700 || (HAVE_POST_DECREMENT
4701 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4702 || (HAVE_PRE_INCREMENT
4703 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4704 || (HAVE_PRE_DECREMENT
4705 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4706 /* Make sure this reg appears only once in this insn. */
4707 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4708 use != 0 && use != (rtx) 1))
4709 {
4710 rtx q = SET_DEST (set);
4711 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4712 ? (offset ? PRE_INC : POST_INC)
4713 : (offset ? PRE_DEC : POST_DEC));
4714
4715 if (dead_or_set_p (incr, addr)
4716 /* Mustn't autoinc an eliminable register. */
4717 && (regno >= FIRST_PSEUDO_REGISTER
4718 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4719 {
4720 /* This is the simple case. Try to make the auto-inc. If
4721 we can't, we are done. Otherwise, we will do any
4722 needed updates below. */
4723 if (! validate_change (insn, &XEXP (x, 0),
4724 gen_rtx_fmt_e (inc_code, Pmode, addr),
4725 0))
4726 return;
4727 }
4728 else if (GET_CODE (q) == REG
4729 /* PREV_INSN used here to check the semi-open interval
4730 [insn,incr). */
4731 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4732 /* We must also check for sets of q as q may be
4733 a call clobbered hard register and there may
4734 be a call between PREV_INSN (insn) and incr. */
4735 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4736 {
4737 /* We have *p followed sometime later by q = p+size.
4738 Both p and q must be live afterward,
4739 and q is not used between INSN and its assignment.
4740 Change it to q = p, ...*q..., q = q+size.
4741 Then fall into the usual case. */
4742 rtx insns, temp;
4743
4744 start_sequence ();
4745 emit_move_insn (q, addr);
4746 insns = get_insns ();
4747 end_sequence ();
4748
4749 if (basic_block_for_insn)
4750 for (temp = insns; temp; temp = NEXT_INSN (temp))
4751 set_block_for_insn (temp, pbi->bb);
4752
4753 /* If we can't make the auto-inc, or can't make the
4754 replacement into Y, exit. There's no point in making
4755 the change below if we can't do the auto-inc and doing
4756 so is not correct in the pre-inc case. */
4757
4758 validate_change (insn, &XEXP (x, 0),
4759 gen_rtx_fmt_e (inc_code, Pmode, q),
4760 1);
4761 validate_change (incr, &XEXP (y, 0), q, 1);
4762 if (! apply_change_group ())
4763 return;
4764
4765 /* We now know we'll be doing this change, so emit the
4766 new insn(s) and do the updates. */
4767 emit_insns_before (insns, insn);
4768
4769 if (pbi->bb->head == insn)
4770 pbi->bb->head = insns;
4771
4772 /* INCR will become a NOTE and INSN won't contain a
4773 use of ADDR. If a use of ADDR was just placed in
4774 the insn before INSN, make that the next use.
4775 Otherwise, invalidate it. */
4776 if (GET_CODE (PREV_INSN (insn)) == INSN
4777 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4778 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4779 pbi->reg_next_use[regno] = PREV_INSN (insn);
4780 else
4781 pbi->reg_next_use[regno] = 0;
4782
4783 addr = q;
4784 regno = REGNO (q);
4785
4786 /* REGNO is now used in INCR which is below INSN, but it
4787 previously wasn't live here. If we don't mark it as
4788 live, we'll put a REG_DEAD note for it on this insn,
4789 which is incorrect. */
4790 SET_REGNO_REG_SET (pbi->reg_live, regno);
4791
4792 /* If there are any calls between INSN and INCR, show
4793 that REGNO now crosses them. */
4794 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4795 if (GET_CODE (temp) == CALL_INSN)
4796 REG_N_CALLS_CROSSED (regno)++;
4797 }
4798 else
4799 return;
4800
4801 /* If we haven't returned, it means we were able to make the
4802 auto-inc, so update the status. First, record that this insn
4803 has an implicit side effect. */
4804
4805 REG_NOTES (insn)
4806 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4807
4808 /* Modify the old increment-insn to simply copy
4809 the already-incremented value of our register. */
4810 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4811 abort ();
4812
4813 /* If that makes it a no-op (copying the register into itself) delete
4814 it so it won't appear to be a "use" and a "set" of this
4815 register. */
4816 if (SET_DEST (set) == addr)
4817 {
4818 /* If the original source was dead, it's dead now. */
4819 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4820 if (note && XEXP (note, 0) != addr)
4821 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4822
4823 PUT_CODE (incr, NOTE);
4824 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4825 NOTE_SOURCE_FILE (incr) = 0;
4826 }
4827
4828 if (regno >= FIRST_PSEUDO_REGISTER)
4829 {
4830 /* Count an extra reference to the reg. When a reg is
4831 incremented, spilling it is worse, so we want to make
4832 that less likely. */
4833 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4834
4835 /* Count the increment as a setting of the register,
4836 even though it isn't a SET in rtl. */
4837 REG_N_SETS (regno)++;
4838 }
4839 }
4840 }
4841 }
4842 #endif /* AUTO_INC_DEC */
4843 \f
4844 static void
4845 mark_used_reg (pbi, reg, cond, insn)
4846 struct propagate_block_info *pbi;
4847 rtx reg;
4848 rtx cond ATTRIBUTE_UNUSED;
4849 rtx insn;
4850 {
4851 int regno = REGNO (reg);
4852 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4853 int some_was_dead = ! some_was_live;
4854 int some_not_set;
4855 int n;
4856
4857 /* A hard reg in a wide mode may really be multiple registers.
4858 If so, mark all of them just like the first. */
4859 if (regno < FIRST_PSEUDO_REGISTER)
4860 {
4861 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4862 while (--n > 0)
4863 {
4864 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4865 some_was_live |= needed_regno;
4866 some_was_dead |= ! needed_regno;
4867 }
4868 }
4869
4870 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4871 {
4872 /* Record where each reg is used, so when the reg is set we know
4873 the next insn that uses it. */
4874 pbi->reg_next_use[regno] = insn;
4875 }
4876
4877 if (pbi->flags & PROP_REG_INFO)
4878 {
4879 if (regno < FIRST_PSEUDO_REGISTER)
4880 {
4881 /* If this is a register we are going to try to eliminate,
4882 don't mark it live here. If we are successful in
4883 eliminating it, it need not be live unless it is used for
4884 pseudos, in which case it will have been set live when it
4885 was allocated to the pseudos. If the register will not
4886 be eliminated, reload will set it live at that point.
4887
4888 Otherwise, record that this function uses this register. */
4889 /* ??? The PPC backend tries to "eliminate" on the pic
4890 register to itself. This should be fixed. In the mean
4891 time, hack around it. */
4892
4893 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4894 && (regno == FRAME_POINTER_REGNUM
4895 || regno == ARG_POINTER_REGNUM)))
4896 {
4897 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4898 do
4899 regs_ever_live[regno + --n] = 1;
4900 while (n > 0);
4901 }
4902 }
4903 else
4904 {
4905 /* Keep track of which basic block each reg appears in. */
4906
4907 register int blocknum = pbi->bb->index;
4908 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4909 REG_BASIC_BLOCK (regno) = blocknum;
4910 else if (REG_BASIC_BLOCK (regno) != blocknum)
4911 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4912
4913 /* Count (weighted) number of uses of each reg. */
4914 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4915 }
4916 }
4917
4918 /* Find out if any of the register was set this insn. */
4919 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
4920 if (regno < FIRST_PSEUDO_REGISTER)
4921 {
4922 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4923 while (--n > 0)
4924 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
4925 }
4926
4927 /* Record and count the insns in which a reg dies. If it is used in
4928 this insn and was dead below the insn then it dies in this insn.
4929 If it was set in this insn, we do not make a REG_DEAD note;
4930 likewise if we already made such a note. */
4931 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
4932 && some_was_dead
4933 && some_not_set)
4934 {
4935 /* Check for the case where the register dying partially
4936 overlaps the register set by this insn. */
4937 if (regno < FIRST_PSEUDO_REGISTER
4938 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4939 {
4940 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4941 while (--n >= 0)
4942 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
4943 }
4944
4945 /* If none of the words in X is needed, make a REG_DEAD note.
4946 Otherwise, we must make partial REG_DEAD notes. */
4947 if (! some_was_live)
4948 {
4949 if ((pbi->flags & PROP_DEATH_NOTES)
4950 && ! find_regno_note (insn, REG_DEAD, regno))
4951 REG_NOTES (insn)
4952 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4953
4954 if (pbi->flags & PROP_REG_INFO)
4955 REG_N_DEATHS (regno)++;
4956 }
4957 else
4958 {
4959 /* Don't make a REG_DEAD note for a part of a register
4960 that is set in the insn. */
4961
4962 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4963 for (; n >= regno; n--)
4964 if (! REGNO_REG_SET_P (pbi->reg_live, n)
4965 && ! dead_or_set_regno_p (insn, n))
4966 REG_NOTES (insn)
4967 = alloc_EXPR_LIST (REG_DEAD,
4968 gen_rtx_REG (reg_raw_mode[n], n),
4969 REG_NOTES (insn));
4970 }
4971 }
4972
4973 SET_REGNO_REG_SET (pbi->reg_live, regno);
4974 if (regno < FIRST_PSEUDO_REGISTER)
4975 {
4976 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4977 while (--n > 0)
4978 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
4979 }
4980
4981 #ifdef HAVE_conditional_execution
4982 /* If this is a conditional use, record that fact. If it is later
4983 conditionally set, we'll know to kill the register. */
4984 if (cond != NULL_RTX)
4985 {
4986 splay_tree_node node;
4987 struct reg_cond_life_info *rcli;
4988 rtx ncond;
4989
4990 if (some_was_live)
4991 {
4992 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4993 if (node == NULL)
4994 {
4995 /* The register was unconditionally live previously.
4996 No need to do anything. */
4997 }
4998 else
4999 {
5000 /* The register was conditionally live previously.
5001 Subtract the new life cond from the old death cond. */
5002 rcli = (struct reg_cond_life_info *) node->value;
5003 ncond = rcli->condition;
5004 ncond = nand_reg_cond (ncond, cond);
5005
5006 /* If the register is now unconditionally live, remove the
5007 entry in the splay_tree. */
5008 if (ncond == const0_rtx)
5009 {
5010 rcli->condition = NULL_RTX;
5011 splay_tree_remove (pbi->reg_cond_dead, regno);
5012 }
5013 else
5014 rcli->condition = ncond;
5015 }
5016 }
5017 else
5018 {
5019 /* The register was not previously live at all. Record
5020 the condition under which it is still dead. */
5021 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5022 rcli->condition = not_reg_cond (cond);
5023 splay_tree_insert (pbi->reg_cond_dead, regno,
5024 (splay_tree_value) rcli);
5025 }
5026 }
5027 #endif
5028 }
5029
5030 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5031 This is done assuming the registers needed from X are those that
5032 have 1-bits in PBI->REG_LIVE.
5033
5034 INSN is the containing instruction. If INSN is dead, this function
5035 is not called. */
5036
5037 static void
5038 mark_used_regs (pbi, x, cond, insn)
5039 struct propagate_block_info *pbi;
5040 rtx x, cond, insn;
5041 {
5042 register RTX_CODE code;
5043 register int regno;
5044 int flags = pbi->flags;
5045
5046 retry:
5047 code = GET_CODE (x);
5048 switch (code)
5049 {
5050 case LABEL_REF:
5051 case SYMBOL_REF:
5052 case CONST_INT:
5053 case CONST:
5054 case CONST_DOUBLE:
5055 case PC:
5056 case ADDR_VEC:
5057 case ADDR_DIFF_VEC:
5058 return;
5059
5060 #ifdef HAVE_cc0
5061 case CC0:
5062 pbi->cc0_live = 1;
5063 return;
5064 #endif
5065
5066 case CLOBBER:
5067 /* If we are clobbering a MEM, mark any registers inside the address
5068 as being used. */
5069 if (GET_CODE (XEXP (x, 0)) == MEM)
5070 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5071 return;
5072
5073 case MEM:
5074 /* Don't bother watching stores to mems if this is not the
5075 final pass. We'll not be deleting dead stores this round. */
5076 if (flags & PROP_SCAN_DEAD_CODE)
5077 {
5078 /* Invalidate the data for the last MEM stored, but only if MEM is
5079 something that can be stored into. */
5080 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5081 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5082 ; /* needn't clear the memory set list */
5083 else
5084 {
5085 rtx temp = pbi->mem_set_list;
5086 rtx prev = NULL_RTX;
5087 rtx next;
5088
5089 while (temp)
5090 {
5091 next = XEXP (temp, 1);
5092 if (anti_dependence (XEXP (temp, 0), x))
5093 {
5094 /* Splice temp out of the list. */
5095 if (prev)
5096 XEXP (prev, 1) = next;
5097 else
5098 pbi->mem_set_list = next;
5099 free_EXPR_LIST_node (temp);
5100 }
5101 else
5102 prev = temp;
5103 temp = next;
5104 }
5105 }
5106
5107 /* If the memory reference had embedded side effects (autoincrement
5108 address modes. Then we may need to kill some entries on the
5109 memory set list. */
5110 if (insn)
5111 invalidate_mems_from_autoinc (pbi, insn);
5112 }
5113
5114 #ifdef AUTO_INC_DEC
5115 if (flags & PROP_AUTOINC)
5116 find_auto_inc (pbi, x, insn);
5117 #endif
5118 break;
5119
5120 case SUBREG:
5121 if (GET_CODE (SUBREG_REG (x)) == REG
5122 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5123 && (GET_MODE_SIZE (GET_MODE (x))
5124 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
5125 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
5126
5127 /* While we're here, optimize this case. */
5128 x = SUBREG_REG (x);
5129 if (GET_CODE (x) != REG)
5130 goto retry;
5131 /* FALLTHRU */
5132
5133 case REG:
5134 /* See a register other than being set => mark it as needed. */
5135 mark_used_reg (pbi, x, cond, insn);
5136 return;
5137
5138 case SET:
5139 {
5140 register rtx testreg = SET_DEST (x);
5141 int mark_dest = 0;
5142
5143 /* If storing into MEM, don't show it as being used. But do
5144 show the address as being used. */
5145 if (GET_CODE (testreg) == MEM)
5146 {
5147 #ifdef AUTO_INC_DEC
5148 if (flags & PROP_AUTOINC)
5149 find_auto_inc (pbi, testreg, insn);
5150 #endif
5151 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5152 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5153 return;
5154 }
5155
5156 /* Storing in STRICT_LOW_PART is like storing in a reg
5157 in that this SET might be dead, so ignore it in TESTREG.
5158 but in some other ways it is like using the reg.
5159
5160 Storing in a SUBREG or a bit field is like storing the entire
5161 register in that if the register's value is not used
5162 then this SET is not needed. */
5163 while (GET_CODE (testreg) == STRICT_LOW_PART
5164 || GET_CODE (testreg) == ZERO_EXTRACT
5165 || GET_CODE (testreg) == SIGN_EXTRACT
5166 || GET_CODE (testreg) == SUBREG)
5167 {
5168 if (GET_CODE (testreg) == SUBREG
5169 && GET_CODE (SUBREG_REG (testreg)) == REG
5170 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5171 && (GET_MODE_SIZE (GET_MODE (testreg))
5172 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
5173 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
5174
5175 /* Modifying a single register in an alternate mode
5176 does not use any of the old value. But these other
5177 ways of storing in a register do use the old value. */
5178 if (GET_CODE (testreg) == SUBREG
5179 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5180 ;
5181 else
5182 mark_dest = 1;
5183
5184 testreg = XEXP (testreg, 0);
5185 }
5186
5187 /* If this is a store into a register, recursively scan the
5188 value being stored. */
5189
5190 if ((GET_CODE (testreg) == PARALLEL
5191 && GET_MODE (testreg) == BLKmode)
5192 || (GET_CODE (testreg) == REG
5193 && (regno = REGNO (testreg),
5194 ! (regno == FRAME_POINTER_REGNUM
5195 && (! reload_completed || frame_pointer_needed)))
5196 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5197 && ! (regno == HARD_FRAME_POINTER_REGNUM
5198 && (! reload_completed || frame_pointer_needed))
5199 #endif
5200 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5201 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5202 #endif
5203 ))
5204 {
5205 if (mark_dest)
5206 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5207 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5208 return;
5209 }
5210 }
5211 break;
5212
5213 case ASM_OPERANDS:
5214 case UNSPEC_VOLATILE:
5215 case TRAP_IF:
5216 case ASM_INPUT:
5217 {
5218 /* Traditional and volatile asm instructions must be considered to use
5219 and clobber all hard registers, all pseudo-registers and all of
5220 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5221
5222 Consider for instance a volatile asm that changes the fpu rounding
5223 mode. An insn should not be moved across this even if it only uses
5224 pseudo-regs because it might give an incorrectly rounded result.
5225
5226 ?!? Unfortunately, marking all hard registers as live causes massive
5227 problems for the register allocator and marking all pseudos as live
5228 creates mountains of uninitialized variable warnings.
5229
5230 So for now, just clear the memory set list and mark any regs
5231 we can find in ASM_OPERANDS as used. */
5232 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5233 free_EXPR_LIST_list (&pbi->mem_set_list);
5234
5235 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5236 We can not just fall through here since then we would be confused
5237 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5238 traditional asms unlike their normal usage. */
5239 if (code == ASM_OPERANDS)
5240 {
5241 int j;
5242
5243 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5244 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5245 }
5246 break;
5247 }
5248
5249 case COND_EXEC:
5250 if (cond != NULL_RTX)
5251 abort ();
5252
5253 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5254
5255 cond = COND_EXEC_TEST (x);
5256 x = COND_EXEC_CODE (x);
5257 goto retry;
5258
5259 case PHI:
5260 /* We _do_not_ want to scan operands of phi nodes. Operands of
5261 a phi function are evaluated only when control reaches this
5262 block along a particular edge. Therefore, regs that appear
5263 as arguments to phi should not be added to the global live at
5264 start. */
5265 return;
5266
5267 default:
5268 break;
5269 }
5270
5271 /* Recursively scan the operands of this expression. */
5272
5273 {
5274 register const char *fmt = GET_RTX_FORMAT (code);
5275 register int i;
5276
5277 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5278 {
5279 if (fmt[i] == 'e')
5280 {
5281 /* Tail recursive case: save a function call level. */
5282 if (i == 0)
5283 {
5284 x = XEXP (x, 0);
5285 goto retry;
5286 }
5287 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5288 }
5289 else if (fmt[i] == 'E')
5290 {
5291 register int j;
5292 for (j = 0; j < XVECLEN (x, i); j++)
5293 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5294 }
5295 }
5296 }
5297 }
5298 \f
5299 #ifdef AUTO_INC_DEC
5300
5301 static int
5302 try_pre_increment_1 (pbi, insn)
5303 struct propagate_block_info *pbi;
5304 rtx insn;
5305 {
5306 /* Find the next use of this reg. If in same basic block,
5307 make it do pre-increment or pre-decrement if appropriate. */
5308 rtx x = single_set (insn);
5309 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5310 * INTVAL (XEXP (SET_SRC (x), 1)));
5311 int regno = REGNO (SET_DEST (x));
5312 rtx y = pbi->reg_next_use[regno];
5313 if (y != 0
5314 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5315 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5316 mode would be better. */
5317 && ! dead_or_set_p (y, SET_DEST (x))
5318 && try_pre_increment (y, SET_DEST (x), amount))
5319 {
5320 /* We have found a suitable auto-increment
5321 and already changed insn Y to do it.
5322 So flush this increment-instruction. */
5323 PUT_CODE (insn, NOTE);
5324 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5325 NOTE_SOURCE_FILE (insn) = 0;
5326 /* Count a reference to this reg for the increment
5327 insn we are deleting. When a reg is incremented.
5328 spilling it is worse, so we want to make that
5329 less likely. */
5330 if (regno >= FIRST_PSEUDO_REGISTER)
5331 {
5332 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
5333 REG_N_SETS (regno)++;
5334 }
5335 return 1;
5336 }
5337 return 0;
5338 }
5339
5340 /* Try to change INSN so that it does pre-increment or pre-decrement
5341 addressing on register REG in order to add AMOUNT to REG.
5342 AMOUNT is negative for pre-decrement.
5343 Returns 1 if the change could be made.
5344 This checks all about the validity of the result of modifying INSN. */
5345
5346 static int
5347 try_pre_increment (insn, reg, amount)
5348 rtx insn, reg;
5349 HOST_WIDE_INT amount;
5350 {
5351 register rtx use;
5352
5353 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5354 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5355 int pre_ok = 0;
5356 /* Nonzero if we can try to make a post-increment or post-decrement.
5357 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5358 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5359 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5360 int post_ok = 0;
5361
5362 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5363 int do_post = 0;
5364
5365 /* From the sign of increment, see which possibilities are conceivable
5366 on this target machine. */
5367 if (HAVE_PRE_INCREMENT && amount > 0)
5368 pre_ok = 1;
5369 if (HAVE_POST_INCREMENT && amount > 0)
5370 post_ok = 1;
5371
5372 if (HAVE_PRE_DECREMENT && amount < 0)
5373 pre_ok = 1;
5374 if (HAVE_POST_DECREMENT && amount < 0)
5375 post_ok = 1;
5376
5377 if (! (pre_ok || post_ok))
5378 return 0;
5379
5380 /* It is not safe to add a side effect to a jump insn
5381 because if the incremented register is spilled and must be reloaded
5382 there would be no way to store the incremented value back in memory. */
5383
5384 if (GET_CODE (insn) == JUMP_INSN)
5385 return 0;
5386
5387 use = 0;
5388 if (pre_ok)
5389 use = find_use_as_address (PATTERN (insn), reg, 0);
5390 if (post_ok && (use == 0 || use == (rtx) 1))
5391 {
5392 use = find_use_as_address (PATTERN (insn), reg, -amount);
5393 do_post = 1;
5394 }
5395
5396 if (use == 0 || use == (rtx) 1)
5397 return 0;
5398
5399 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5400 return 0;
5401
5402 /* See if this combination of instruction and addressing mode exists. */
5403 if (! validate_change (insn, &XEXP (use, 0),
5404 gen_rtx_fmt_e (amount > 0
5405 ? (do_post ? POST_INC : PRE_INC)
5406 : (do_post ? POST_DEC : PRE_DEC),
5407 Pmode, reg), 0))
5408 return 0;
5409
5410 /* Record that this insn now has an implicit side effect on X. */
5411 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5412 return 1;
5413 }
5414
5415 #endif /* AUTO_INC_DEC */
5416 \f
5417 /* Find the place in the rtx X where REG is used as a memory address.
5418 Return the MEM rtx that so uses it.
5419 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5420 (plus REG (const_int PLUSCONST)).
5421
5422 If such an address does not appear, return 0.
5423 If REG appears more than once, or is used other than in such an address,
5424 return (rtx)1. */
5425
5426 rtx
5427 find_use_as_address (x, reg, plusconst)
5428 register rtx x;
5429 rtx reg;
5430 HOST_WIDE_INT plusconst;
5431 {
5432 enum rtx_code code = GET_CODE (x);
5433 const char *fmt = GET_RTX_FORMAT (code);
5434 register int i;
5435 register rtx value = 0;
5436 register rtx tem;
5437
5438 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5439 return x;
5440
5441 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5442 && XEXP (XEXP (x, 0), 0) == reg
5443 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5444 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5445 return x;
5446
5447 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5448 {
5449 /* If REG occurs inside a MEM used in a bit-field reference,
5450 that is unacceptable. */
5451 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5452 return (rtx) (HOST_WIDE_INT) 1;
5453 }
5454
5455 if (x == reg)
5456 return (rtx) (HOST_WIDE_INT) 1;
5457
5458 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5459 {
5460 if (fmt[i] == 'e')
5461 {
5462 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5463 if (value == 0)
5464 value = tem;
5465 else if (tem != 0)
5466 return (rtx) (HOST_WIDE_INT) 1;
5467 }
5468 else if (fmt[i] == 'E')
5469 {
5470 register int j;
5471 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5472 {
5473 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5474 if (value == 0)
5475 value = tem;
5476 else if (tem != 0)
5477 return (rtx) (HOST_WIDE_INT) 1;
5478 }
5479 }
5480 }
5481
5482 return value;
5483 }
5484 \f
5485 /* Write information about registers and basic blocks into FILE.
5486 This is part of making a debugging dump. */
5487
5488 void
5489 dump_regset (r, outf)
5490 regset r;
5491 FILE *outf;
5492 {
5493 int i;
5494 if (r == NULL)
5495 {
5496 fputs (" (nil)", outf);
5497 return;
5498 }
5499
5500 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5501 {
5502 fprintf (outf, " %d", i);
5503 if (i < FIRST_PSEUDO_REGISTER)
5504 fprintf (outf, " [%s]",
5505 reg_names[i]);
5506 });
5507 }
5508
5509 void
5510 debug_regset (r)
5511 regset r;
5512 {
5513 dump_regset (r, stderr);
5514 putc ('\n', stderr);
5515 }
5516
5517 void
5518 dump_flow_info (file)
5519 FILE *file;
5520 {
5521 register int i;
5522 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5523
5524 fprintf (file, "%d registers.\n", max_regno);
5525 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5526 if (REG_N_REFS (i))
5527 {
5528 enum reg_class class, altclass;
5529 fprintf (file, "\nRegister %d used %d times across %d insns",
5530 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5531 if (REG_BASIC_BLOCK (i) >= 0)
5532 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5533 if (REG_N_SETS (i))
5534 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5535 (REG_N_SETS (i) == 1) ? "" : "s");
5536 if (REG_USERVAR_P (regno_reg_rtx[i]))
5537 fprintf (file, "; user var");
5538 if (REG_N_DEATHS (i) != 1)
5539 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5540 if (REG_N_CALLS_CROSSED (i) == 1)
5541 fprintf (file, "; crosses 1 call");
5542 else if (REG_N_CALLS_CROSSED (i))
5543 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5544 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5545 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5546 class = reg_preferred_class (i);
5547 altclass = reg_alternate_class (i);
5548 if (class != GENERAL_REGS || altclass != ALL_REGS)
5549 {
5550 if (altclass == ALL_REGS || class == ALL_REGS)
5551 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5552 else if (altclass == NO_REGS)
5553 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5554 else
5555 fprintf (file, "; pref %s, else %s",
5556 reg_class_names[(int) class],
5557 reg_class_names[(int) altclass]);
5558 }
5559 if (REGNO_POINTER_FLAG (i))
5560 fprintf (file, "; pointer");
5561 fprintf (file, ".\n");
5562 }
5563
5564 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5565 for (i = 0; i < n_basic_blocks; i++)
5566 {
5567 register basic_block bb = BASIC_BLOCK (i);
5568 register edge e;
5569
5570 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5571 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5572
5573 fprintf (file, "Predecessors: ");
5574 for (e = bb->pred; e ; e = e->pred_next)
5575 dump_edge_info (file, e, 0);
5576
5577 fprintf (file, "\nSuccessors: ");
5578 for (e = bb->succ; e ; e = e->succ_next)
5579 dump_edge_info (file, e, 1);
5580
5581 fprintf (file, "\nRegisters live at start:");
5582 dump_regset (bb->global_live_at_start, file);
5583
5584 fprintf (file, "\nRegisters live at end:");
5585 dump_regset (bb->global_live_at_end, file);
5586
5587 putc('\n', file);
5588 }
5589
5590 putc('\n', file);
5591 }
5592
5593 void
5594 debug_flow_info ()
5595 {
5596 dump_flow_info (stderr);
5597 }
5598
5599 static void
5600 dump_edge_info (file, e, do_succ)
5601 FILE *file;
5602 edge e;
5603 int do_succ;
5604 {
5605 basic_block side = (do_succ ? e->dest : e->src);
5606
5607 if (side == ENTRY_BLOCK_PTR)
5608 fputs (" ENTRY", file);
5609 else if (side == EXIT_BLOCK_PTR)
5610 fputs (" EXIT", file);
5611 else
5612 fprintf (file, " %d", side->index);
5613
5614 if (e->flags)
5615 {
5616 static const char * const bitnames[] = {
5617 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5618 };
5619 int comma = 0;
5620 int i, flags = e->flags;
5621
5622 fputc (' ', file);
5623 fputc ('(', file);
5624 for (i = 0; flags; i++)
5625 if (flags & (1 << i))
5626 {
5627 flags &= ~(1 << i);
5628
5629 if (comma)
5630 fputc (',', file);
5631 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5632 fputs (bitnames[i], file);
5633 else
5634 fprintf (file, "%d", i);
5635 comma = 1;
5636 }
5637 fputc (')', file);
5638 }
5639 }
5640
5641 \f
5642 /* Print out one basic block with live information at start and end. */
5643 void
5644 dump_bb (bb, outf)
5645 basic_block bb;
5646 FILE *outf;
5647 {
5648 rtx insn;
5649 rtx last;
5650 edge e;
5651
5652 fprintf (outf, ";; Basic block %d, loop depth %d",
5653 bb->index, bb->loop_depth);
5654 if (bb->eh_beg != -1 || bb->eh_end != -1)
5655 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5656 putc ('\n', outf);
5657
5658 fputs (";; Predecessors: ", outf);
5659 for (e = bb->pred; e ; e = e->pred_next)
5660 dump_edge_info (outf, e, 0);
5661 putc ('\n', outf);
5662
5663 fputs (";; Registers live at start:", outf);
5664 dump_regset (bb->global_live_at_start, outf);
5665 putc ('\n', outf);
5666
5667 for (insn = bb->head, last = NEXT_INSN (bb->end);
5668 insn != last;
5669 insn = NEXT_INSN (insn))
5670 print_rtl_single (outf, insn);
5671
5672 fputs (";; Registers live at end:", outf);
5673 dump_regset (bb->global_live_at_end, outf);
5674 putc ('\n', outf);
5675
5676 fputs (";; Successors: ", outf);
5677 for (e = bb->succ; e; e = e->succ_next)
5678 dump_edge_info (outf, e, 1);
5679 putc ('\n', outf);
5680 }
5681
5682 void
5683 debug_bb (bb)
5684 basic_block bb;
5685 {
5686 dump_bb (bb, stderr);
5687 }
5688
5689 void
5690 debug_bb_n (n)
5691 int n;
5692 {
5693 dump_bb (BASIC_BLOCK(n), stderr);
5694 }
5695
5696 /* Like print_rtl, but also print out live information for the start of each
5697 basic block. */
5698
5699 void
5700 print_rtl_with_bb (outf, rtx_first)
5701 FILE *outf;
5702 rtx rtx_first;
5703 {
5704 register rtx tmp_rtx;
5705
5706 if (rtx_first == 0)
5707 fprintf (outf, "(nil)\n");
5708 else
5709 {
5710 int i;
5711 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5712 int max_uid = get_max_uid ();
5713 basic_block *start = (basic_block *)
5714 xcalloc (max_uid, sizeof (basic_block));
5715 basic_block *end = (basic_block *)
5716 xcalloc (max_uid, sizeof (basic_block));
5717 enum bb_state *in_bb_p = (enum bb_state *)
5718 xcalloc (max_uid, sizeof (enum bb_state));
5719
5720 for (i = n_basic_blocks - 1; i >= 0; i--)
5721 {
5722 basic_block bb = BASIC_BLOCK (i);
5723 rtx x;
5724
5725 start[INSN_UID (bb->head)] = bb;
5726 end[INSN_UID (bb->end)] = bb;
5727 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5728 {
5729 enum bb_state state = IN_MULTIPLE_BB;
5730 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5731 state = IN_ONE_BB;
5732 in_bb_p[INSN_UID(x)] = state;
5733
5734 if (x == bb->end)
5735 break;
5736 }
5737 }
5738
5739 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5740 {
5741 int did_output;
5742 basic_block bb;
5743
5744 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5745 {
5746 fprintf (outf, ";; Start of basic block %d, registers live:",
5747 bb->index);
5748 dump_regset (bb->global_live_at_start, outf);
5749 putc ('\n', outf);
5750 }
5751
5752 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5753 && GET_CODE (tmp_rtx) != NOTE
5754 && GET_CODE (tmp_rtx) != BARRIER)
5755 fprintf (outf, ";; Insn is not within a basic block\n");
5756 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5757 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5758
5759 did_output = print_rtl_single (outf, tmp_rtx);
5760
5761 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5762 {
5763 fprintf (outf, ";; End of basic block %d, registers live:\n",
5764 bb->index);
5765 dump_regset (bb->global_live_at_end, outf);
5766 putc ('\n', outf);
5767 }
5768
5769 if (did_output)
5770 putc ('\n', outf);
5771 }
5772
5773 free (start);
5774 free (end);
5775 free (in_bb_p);
5776 }
5777
5778 if (current_function_epilogue_delay_list != 0)
5779 {
5780 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5781 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5782 tmp_rtx = XEXP (tmp_rtx, 1))
5783 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5784 }
5785 }
5786
5787 /* Compute dominator relationships using new flow graph structures. */
5788 void
5789 compute_flow_dominators (dominators, post_dominators)
5790 sbitmap *dominators;
5791 sbitmap *post_dominators;
5792 {
5793 int bb;
5794 sbitmap *temp_bitmap;
5795 edge e;
5796 basic_block *worklist, *workend, *qin, *qout;
5797 int qlen;
5798
5799 /* Allocate a worklist array/queue. Entries are only added to the
5800 list if they were not already on the list. So the size is
5801 bounded by the number of basic blocks. */
5802 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5803 workend = &worklist[n_basic_blocks];
5804
5805 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5806 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5807
5808 if (dominators)
5809 {
5810 /* The optimistic setting of dominators requires us to put every
5811 block on the work list initially. */
5812 qin = qout = worklist;
5813 for (bb = 0; bb < n_basic_blocks; bb++)
5814 {
5815 *qin++ = BASIC_BLOCK (bb);
5816 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5817 }
5818 qlen = n_basic_blocks;
5819 qin = worklist;
5820
5821 /* We want a maximal solution, so initially assume everything dominates
5822 everything else. */
5823 sbitmap_vector_ones (dominators, n_basic_blocks);
5824
5825 /* Mark successors of the entry block so we can identify them below. */
5826 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5827 e->dest->aux = ENTRY_BLOCK_PTR;
5828
5829 /* Iterate until the worklist is empty. */
5830 while (qlen)
5831 {
5832 /* Take the first entry off the worklist. */
5833 basic_block b = *qout++;
5834 if (qout >= workend)
5835 qout = worklist;
5836 qlen--;
5837
5838 bb = b->index;
5839
5840 /* Compute the intersection of the dominators of all the
5841 predecessor blocks.
5842
5843 If one of the predecessor blocks is the ENTRY block, then the
5844 intersection of the dominators of the predecessor blocks is
5845 defined as the null set. We can identify such blocks by the
5846 special value in the AUX field in the block structure. */
5847 if (b->aux == ENTRY_BLOCK_PTR)
5848 {
5849 /* Do not clear the aux field for blocks which are
5850 successors of the ENTRY block. That way we we never
5851 add them to the worklist again.
5852
5853 The intersect of dominators of the preds of this block is
5854 defined as the null set. */
5855 sbitmap_zero (temp_bitmap[bb]);
5856 }
5857 else
5858 {
5859 /* Clear the aux field of this block so it can be added to
5860 the worklist again if necessary. */
5861 b->aux = NULL;
5862 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5863 }
5864
5865 /* Make sure each block always dominates itself. */
5866 SET_BIT (temp_bitmap[bb], bb);
5867
5868 /* If the out state of this block changed, then we need to
5869 add the successors of this block to the worklist if they
5870 are not already on the worklist. */
5871 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5872 {
5873 for (e = b->succ; e; e = e->succ_next)
5874 {
5875 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5876 {
5877 *qin++ = e->dest;
5878 if (qin >= workend)
5879 qin = worklist;
5880 qlen++;
5881
5882 e->dest->aux = e;
5883 }
5884 }
5885 }
5886 }
5887 }
5888
5889 if (post_dominators)
5890 {
5891 /* The optimistic setting of dominators requires us to put every
5892 block on the work list initially. */
5893 qin = qout = worklist;
5894 for (bb = 0; bb < n_basic_blocks; bb++)
5895 {
5896 *qin++ = BASIC_BLOCK (bb);
5897 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5898 }
5899 qlen = n_basic_blocks;
5900 qin = worklist;
5901
5902 /* We want a maximal solution, so initially assume everything post
5903 dominates everything else. */
5904 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5905
5906 /* Mark predecessors of the exit block so we can identify them below. */
5907 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5908 e->src->aux = EXIT_BLOCK_PTR;
5909
5910 /* Iterate until the worklist is empty. */
5911 while (qlen)
5912 {
5913 /* Take the first entry off the worklist. */
5914 basic_block b = *qout++;
5915 if (qout >= workend)
5916 qout = worklist;
5917 qlen--;
5918
5919 bb = b->index;
5920
5921 /* Compute the intersection of the post dominators of all the
5922 successor blocks.
5923
5924 If one of the successor blocks is the EXIT block, then the
5925 intersection of the dominators of the successor blocks is
5926 defined as the null set. We can identify such blocks by the
5927 special value in the AUX field in the block structure. */
5928 if (b->aux == EXIT_BLOCK_PTR)
5929 {
5930 /* Do not clear the aux field for blocks which are
5931 predecessors of the EXIT block. That way we we never
5932 add them to the worklist again.
5933
5934 The intersect of dominators of the succs of this block is
5935 defined as the null set. */
5936 sbitmap_zero (temp_bitmap[bb]);
5937 }
5938 else
5939 {
5940 /* Clear the aux field of this block so it can be added to
5941 the worklist again if necessary. */
5942 b->aux = NULL;
5943 sbitmap_intersection_of_succs (temp_bitmap[bb],
5944 post_dominators, bb);
5945 }
5946
5947 /* Make sure each block always post dominates itself. */
5948 SET_BIT (temp_bitmap[bb], bb);
5949
5950 /* If the out state of this block changed, then we need to
5951 add the successors of this block to the worklist if they
5952 are not already on the worklist. */
5953 if (sbitmap_a_and_b (post_dominators[bb],
5954 post_dominators[bb],
5955 temp_bitmap[bb]))
5956 {
5957 for (e = b->pred; e; e = e->pred_next)
5958 {
5959 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5960 {
5961 *qin++ = e->src;
5962 if (qin >= workend)
5963 qin = worklist;
5964 qlen++;
5965
5966 e->src->aux = e;
5967 }
5968 }
5969 }
5970 }
5971 }
5972
5973 free (worklist);
5974 free (temp_bitmap);
5975 }
5976
5977 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5978
5979 void
5980 compute_immediate_dominators (idom, dominators)
5981 int *idom;
5982 sbitmap *dominators;
5983 {
5984 sbitmap *tmp;
5985 int b;
5986
5987 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5988
5989 /* Begin with tmp(n) = dom(n) - { n }. */
5990 for (b = n_basic_blocks; --b >= 0; )
5991 {
5992 sbitmap_copy (tmp[b], dominators[b]);
5993 RESET_BIT (tmp[b], b);
5994 }
5995
5996 /* Subtract out all of our dominator's dominators. */
5997 for (b = n_basic_blocks; --b >= 0; )
5998 {
5999 sbitmap tmp_b = tmp[b];
6000 int s;
6001
6002 for (s = n_basic_blocks; --s >= 0; )
6003 if (TEST_BIT (tmp_b, s))
6004 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6005 }
6006
6007 /* Find the one bit set in the bitmap and put it in the output array. */
6008 for (b = n_basic_blocks; --b >= 0; )
6009 {
6010 int t;
6011 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6012 }
6013
6014 sbitmap_vector_free (tmp);
6015 }
6016
6017 /* Recompute register set/reference counts immediately prior to register
6018 allocation.
6019
6020 This avoids problems with set/reference counts changing to/from values
6021 which have special meanings to the register allocators.
6022
6023 Additionally, the reference counts are the primary component used by the
6024 register allocators to prioritize pseudos for allocation to hard regs.
6025 More accurate reference counts generally lead to better register allocation.
6026
6027 F is the first insn to be scanned.
6028
6029 LOOP_STEP denotes how much loop_depth should be incremented per
6030 loop nesting level in order to increase the ref count more for
6031 references in a loop.
6032
6033 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6034 possibly other information which is used by the register allocators. */
6035
6036 void
6037 recompute_reg_usage (f, loop_step)
6038 rtx f ATTRIBUTE_UNUSED;
6039 int loop_step ATTRIBUTE_UNUSED;
6040 {
6041 allocate_reg_life_data ();
6042 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6043 }
6044
6045 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6046 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6047 of the number of registers that died. */
6048
6049 int
6050 count_or_remove_death_notes (blocks, kill)
6051 sbitmap blocks;
6052 int kill;
6053 {
6054 int i, count = 0;
6055
6056 for (i = n_basic_blocks - 1; i >= 0; --i)
6057 {
6058 basic_block bb;
6059 rtx insn;
6060
6061 if (blocks && ! TEST_BIT (blocks, i))
6062 continue;
6063
6064 bb = BASIC_BLOCK (i);
6065
6066 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6067 {
6068 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6069 {
6070 rtx *pprev = &REG_NOTES (insn);
6071 rtx link = *pprev;
6072
6073 while (link)
6074 {
6075 switch (REG_NOTE_KIND (link))
6076 {
6077 case REG_DEAD:
6078 if (GET_CODE (XEXP (link, 0)) == REG)
6079 {
6080 rtx reg = XEXP (link, 0);
6081 int n;
6082
6083 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6084 n = 1;
6085 else
6086 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6087 count += n;
6088 }
6089 /* FALLTHRU */
6090
6091 case REG_UNUSED:
6092 if (kill)
6093 {
6094 rtx next = XEXP (link, 1);
6095 free_EXPR_LIST_node (link);
6096 *pprev = link = next;
6097 break;
6098 }
6099 /* FALLTHRU */
6100
6101 default:
6102 pprev = &XEXP (link, 1);
6103 link = *pprev;
6104 break;
6105 }
6106 }
6107 }
6108
6109 if (insn == bb->end)
6110 break;
6111 }
6112 }
6113
6114 return count;
6115 }
6116
6117 /* Record INSN's block as BB. */
6118
6119 void
6120 set_block_for_insn (insn, bb)
6121 rtx insn;
6122 basic_block bb;
6123 {
6124 size_t uid = INSN_UID (insn);
6125 if (uid >= basic_block_for_insn->num_elements)
6126 {
6127 int new_size;
6128
6129 /* Add one-eighth the size so we don't keep calling xrealloc. */
6130 new_size = uid + (uid + 7) / 8;
6131
6132 VARRAY_GROW (basic_block_for_insn, new_size);
6133 }
6134 VARRAY_BB (basic_block_for_insn, uid) = bb;
6135 }
6136
6137 /* Record INSN's block number as BB. */
6138 /* ??? This has got to go. */
6139
6140 void
6141 set_block_num (insn, bb)
6142 rtx insn;
6143 int bb;
6144 {
6145 set_block_for_insn (insn, BASIC_BLOCK (bb));
6146 }
6147 \f
6148 /* Verify the CFG consistency. This function check some CFG invariants and
6149 aborts when something is wrong. Hope that this function will help to
6150 convert many optimization passes to preserve CFG consistent.
6151
6152 Currently it does following checks:
6153
6154 - test head/end pointers
6155 - overlapping of basic blocks
6156 - edge list corectness
6157 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6158 - tails of basic blocks (ensure that boundary is necesary)
6159 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6160 and NOTE_INSN_BASIC_BLOCK
6161 - check that all insns are in the basic blocks
6162 (except the switch handling code, barriers and notes)
6163 - check that all returns are followed by barriers
6164
6165 In future it can be extended check a lot of other stuff as well
6166 (reachability of basic blocks, life information, etc. etc.). */
6167
6168 void
6169 verify_flow_info ()
6170 {
6171 const int max_uid = get_max_uid ();
6172 const rtx rtx_first = get_insns ();
6173 basic_block *bb_info;
6174 rtx x;
6175 int i, err = 0;
6176
6177 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6178
6179 /* First pass check head/end pointers and set bb_info array used by
6180 later passes. */
6181 for (i = n_basic_blocks - 1; i >= 0; i--)
6182 {
6183 basic_block bb = BASIC_BLOCK (i);
6184
6185 /* Check the head pointer and make sure that it is pointing into
6186 insn list. */
6187 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6188 if (x == bb->head)
6189 break;
6190 if (!x)
6191 {
6192 error ("Head insn %d for block %d not found in the insn stream.",
6193 INSN_UID (bb->head), bb->index);
6194 err = 1;
6195 }
6196
6197 /* Check the end pointer and make sure that it is pointing into
6198 insn list. */
6199 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6200 {
6201 if (bb_info[INSN_UID (x)] != NULL)
6202 {
6203 error ("Insn %d is in multiple basic blocks (%d and %d)",
6204 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6205 err = 1;
6206 }
6207 bb_info[INSN_UID (x)] = bb;
6208
6209 if (x == bb->end)
6210 break;
6211 }
6212 if (!x)
6213 {
6214 error ("End insn %d for block %d not found in the insn stream.",
6215 INSN_UID (bb->end), bb->index);
6216 err = 1;
6217 }
6218 }
6219
6220 /* Now check the basic blocks (boundaries etc.) */
6221 for (i = n_basic_blocks - 1; i >= 0; i--)
6222 {
6223 basic_block bb = BASIC_BLOCK (i);
6224 /* Check corectness of edge lists */
6225 edge e;
6226
6227 e = bb->succ;
6228 while (e)
6229 {
6230 if (e->src != bb)
6231 {
6232 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6233 bb->index);
6234 fprintf (stderr, "Predecessor: ");
6235 dump_edge_info (stderr, e, 0);
6236 fprintf (stderr, "\nSuccessor: ");
6237 dump_edge_info (stderr, e, 1);
6238 fflush (stderr);
6239 err = 1;
6240 }
6241 if (e->dest != EXIT_BLOCK_PTR)
6242 {
6243 edge e2 = e->dest->pred;
6244 while (e2 && e2 != e)
6245 e2 = e2->pred_next;
6246 if (!e2)
6247 {
6248 error ("Basic block %i edge lists are corrupted", bb->index);
6249 err = 1;
6250 }
6251 }
6252 e = e->succ_next;
6253 }
6254
6255 e = bb->pred;
6256 while (e)
6257 {
6258 if (e->dest != bb)
6259 {
6260 error ("Basic block %d pred edge is corrupted", bb->index);
6261 fputs ("Predecessor: ", stderr);
6262 dump_edge_info (stderr, e, 0);
6263 fputs ("\nSuccessor: ", stderr);
6264 dump_edge_info (stderr, e, 1);
6265 fputc ('\n', stderr);
6266 err = 1;
6267 }
6268 if (e->src != ENTRY_BLOCK_PTR)
6269 {
6270 edge e2 = e->src->succ;
6271 while (e2 && e2 != e)
6272 e2 = e2->succ_next;
6273 if (!e2)
6274 {
6275 error ("Basic block %i edge lists are corrupted", bb->index);
6276 err = 1;
6277 }
6278 }
6279 e = e->pred_next;
6280 }
6281
6282 /* OK pointers are correct. Now check the header of basic
6283 block. It ought to contain optional CODE_LABEL followed
6284 by NOTE_BASIC_BLOCK. */
6285 x = bb->head;
6286 if (GET_CODE (x) == CODE_LABEL)
6287 {
6288 if (bb->end == x)
6289 {
6290 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6291 bb->index);
6292 err = 1;
6293 }
6294 x = NEXT_INSN (x);
6295 }
6296 if (GET_CODE (x) != NOTE
6297 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6298 || NOTE_BASIC_BLOCK (x) != bb)
6299 {
6300 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6301 bb->index);
6302 err = 1;
6303 }
6304
6305 if (bb->end == x)
6306 {
6307 /* Do checks for empty blocks here */
6308 }
6309 else
6310 {
6311 x = NEXT_INSN (x);
6312 while (x)
6313 {
6314 if (GET_CODE (x) == NOTE
6315 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6316 {
6317 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6318 INSN_UID (x), bb->index);
6319 err = 1;
6320 }
6321
6322 if (x == bb->end)
6323 break;
6324
6325 if (GET_CODE (x) == JUMP_INSN
6326 || GET_CODE (x) == CODE_LABEL
6327 || GET_CODE (x) == BARRIER)
6328 {
6329 error ("In basic block %d:", bb->index);
6330 fatal_insn ("Flow control insn inside a basic block", x);
6331 }
6332
6333 x = NEXT_INSN (x);
6334 }
6335 }
6336 }
6337
6338 x = rtx_first;
6339 while (x)
6340 {
6341 if (!bb_info[INSN_UID (x)])
6342 {
6343 switch (GET_CODE (x))
6344 {
6345 case BARRIER:
6346 case NOTE:
6347 break;
6348
6349 case CODE_LABEL:
6350 /* An addr_vec is placed outside any block block. */
6351 if (NEXT_INSN (x)
6352 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6353 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6354 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6355 {
6356 x = NEXT_INSN (x);
6357 }
6358
6359 /* But in any case, non-deletable labels can appear anywhere. */
6360 break;
6361
6362 default:
6363 fatal_insn ("Insn outside basic block", x);
6364 }
6365 }
6366
6367 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6368 && GET_CODE (x) == JUMP_INSN
6369 && returnjump_p (x) && ! condjump_p (x)
6370 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6371 fatal_insn ("Return not followed by barrier", x);
6372
6373 x = NEXT_INSN (x);
6374 }
6375
6376 if (err)
6377 abort ();
6378
6379 /* Clean up. */
6380 free (bb_info);
6381 }
6382 \f
6383 /* Functions to access an edge list with a vector representation.
6384 Enough data is kept such that given an index number, the
6385 pred and succ that edge reprsents can be determined, or
6386 given a pred and a succ, it's index number can be returned.
6387 This allows algorithms which comsume a lot of memory to
6388 represent the normally full matrix of edge (pred,succ) with a
6389 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6390 wasted space in the client code due to sparse flow graphs. */
6391
6392 /* This functions initializes the edge list. Basically the entire
6393 flowgraph is processed, and all edges are assigned a number,
6394 and the data structure is filed in. */
6395 struct edge_list *
6396 create_edge_list ()
6397 {
6398 struct edge_list *elist;
6399 edge e;
6400 int num_edges;
6401 int x;
6402 int block_count;
6403
6404 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6405
6406 num_edges = 0;
6407
6408 /* Determine the number of edges in the flow graph by counting successor
6409 edges on each basic block. */
6410 for (x = 0; x < n_basic_blocks; x++)
6411 {
6412 basic_block bb = BASIC_BLOCK (x);
6413
6414 for (e = bb->succ; e; e = e->succ_next)
6415 num_edges++;
6416 }
6417 /* Don't forget successors of the entry block. */
6418 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6419 num_edges++;
6420
6421 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6422 elist->num_blocks = block_count;
6423 elist->num_edges = num_edges;
6424 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6425
6426 num_edges = 0;
6427
6428 /* Follow successors of the entry block, and register these edges. */
6429 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6430 {
6431 elist->index_to_edge[num_edges] = e;
6432 num_edges++;
6433 }
6434
6435 for (x = 0; x < n_basic_blocks; x++)
6436 {
6437 basic_block bb = BASIC_BLOCK (x);
6438
6439 /* Follow all successors of blocks, and register these edges. */
6440 for (e = bb->succ; e; e = e->succ_next)
6441 {
6442 elist->index_to_edge[num_edges] = e;
6443 num_edges++;
6444 }
6445 }
6446 return elist;
6447 }
6448
6449 /* This function free's memory associated with an edge list. */
6450 void
6451 free_edge_list (elist)
6452 struct edge_list *elist;
6453 {
6454 if (elist)
6455 {
6456 free (elist->index_to_edge);
6457 free (elist);
6458 }
6459 }
6460
6461 /* This function provides debug output showing an edge list. */
6462 void
6463 print_edge_list (f, elist)
6464 FILE *f;
6465 struct edge_list *elist;
6466 {
6467 int x;
6468 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6469 elist->num_blocks - 2, elist->num_edges);
6470
6471 for (x = 0; x < elist->num_edges; x++)
6472 {
6473 fprintf (f, " %-4d - edge(", x);
6474 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6475 fprintf (f,"entry,");
6476 else
6477 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6478
6479 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6480 fprintf (f,"exit)\n");
6481 else
6482 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6483 }
6484 }
6485
6486 /* This function provides an internal consistancy check of an edge list,
6487 verifying that all edges are present, and that there are no
6488 extra edges. */
6489 void
6490 verify_edge_list (f, elist)
6491 FILE *f;
6492 struct edge_list *elist;
6493 {
6494 int x, pred, succ, index;
6495 edge e;
6496
6497 for (x = 0; x < n_basic_blocks; x++)
6498 {
6499 basic_block bb = BASIC_BLOCK (x);
6500
6501 for (e = bb->succ; e; e = e->succ_next)
6502 {
6503 pred = e->src->index;
6504 succ = e->dest->index;
6505 index = EDGE_INDEX (elist, e->src, e->dest);
6506 if (index == EDGE_INDEX_NO_EDGE)
6507 {
6508 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6509 continue;
6510 }
6511 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6512 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6513 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6514 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6515 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6516 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6517 }
6518 }
6519 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6520 {
6521 pred = e->src->index;
6522 succ = e->dest->index;
6523 index = EDGE_INDEX (elist, e->src, e->dest);
6524 if (index == EDGE_INDEX_NO_EDGE)
6525 {
6526 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6527 continue;
6528 }
6529 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6530 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6531 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6532 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6533 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6534 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6535 }
6536 /* We've verified that all the edges are in the list, no lets make sure
6537 there are no spurious edges in the list. */
6538
6539 for (pred = 0 ; pred < n_basic_blocks; pred++)
6540 for (succ = 0 ; succ < n_basic_blocks; succ++)
6541 {
6542 basic_block p = BASIC_BLOCK (pred);
6543 basic_block s = BASIC_BLOCK (succ);
6544
6545 int found_edge = 0;
6546
6547 for (e = p->succ; e; e = e->succ_next)
6548 if (e->dest == s)
6549 {
6550 found_edge = 1;
6551 break;
6552 }
6553 for (e = s->pred; e; e = e->pred_next)
6554 if (e->src == p)
6555 {
6556 found_edge = 1;
6557 break;
6558 }
6559 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6560 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6561 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6562 pred, succ);
6563 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6564 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6565 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6566 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6567 BASIC_BLOCK (succ)));
6568 }
6569 for (succ = 0 ; succ < n_basic_blocks; succ++)
6570 {
6571 basic_block p = ENTRY_BLOCK_PTR;
6572 basic_block s = BASIC_BLOCK (succ);
6573
6574 int found_edge = 0;
6575
6576 for (e = p->succ; e; e = e->succ_next)
6577 if (e->dest == s)
6578 {
6579 found_edge = 1;
6580 break;
6581 }
6582 for (e = s->pred; e; e = e->pred_next)
6583 if (e->src == p)
6584 {
6585 found_edge = 1;
6586 break;
6587 }
6588 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6589 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6590 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6591 succ);
6592 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6593 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6594 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6595 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6596 BASIC_BLOCK (succ)));
6597 }
6598 for (pred = 0 ; pred < n_basic_blocks; pred++)
6599 {
6600 basic_block p = BASIC_BLOCK (pred);
6601 basic_block s = EXIT_BLOCK_PTR;
6602
6603 int found_edge = 0;
6604
6605 for (e = p->succ; e; e = e->succ_next)
6606 if (e->dest == s)
6607 {
6608 found_edge = 1;
6609 break;
6610 }
6611 for (e = s->pred; e; e = e->pred_next)
6612 if (e->src == p)
6613 {
6614 found_edge = 1;
6615 break;
6616 }
6617 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6618 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6619 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6620 pred);
6621 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6622 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6623 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6624 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6625 EXIT_BLOCK_PTR));
6626 }
6627 }
6628
6629 /* This routine will determine what, if any, edge there is between
6630 a specified predecessor and successor. */
6631
6632 int
6633 find_edge_index (edge_list, pred, succ)
6634 struct edge_list *edge_list;
6635 basic_block pred, succ;
6636 {
6637 int x;
6638 for (x = 0; x < NUM_EDGES (edge_list); x++)
6639 {
6640 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6641 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6642 return x;
6643 }
6644 return (EDGE_INDEX_NO_EDGE);
6645 }
6646
6647 /* This function will remove an edge from the flow graph. */
6648 void
6649 remove_edge (e)
6650 edge e;
6651 {
6652 edge last_pred = NULL;
6653 edge last_succ = NULL;
6654 edge tmp;
6655 basic_block src, dest;
6656 src = e->src;
6657 dest = e->dest;
6658 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6659 last_succ = tmp;
6660
6661 if (!tmp)
6662 abort ();
6663 if (last_succ)
6664 last_succ->succ_next = e->succ_next;
6665 else
6666 src->succ = e->succ_next;
6667
6668 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6669 last_pred = tmp;
6670
6671 if (!tmp)
6672 abort ();
6673 if (last_pred)
6674 last_pred->pred_next = e->pred_next;
6675 else
6676 dest->pred = e->pred_next;
6677
6678 n_edges--;
6679 free (e);
6680 }
6681
6682 /* This routine will remove any fake successor edges for a basic block.
6683 When the edge is removed, it is also removed from whatever predecessor
6684 list it is in. */
6685 static void
6686 remove_fake_successors (bb)
6687 basic_block bb;
6688 {
6689 edge e;
6690 for (e = bb->succ; e ; )
6691 {
6692 edge tmp = e;
6693 e = e->succ_next;
6694 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6695 remove_edge (tmp);
6696 }
6697 }
6698
6699 /* This routine will remove all fake edges from the flow graph. If
6700 we remove all fake successors, it will automatically remove all
6701 fake predecessors. */
6702 void
6703 remove_fake_edges ()
6704 {
6705 int x;
6706
6707 for (x = 0; x < n_basic_blocks; x++)
6708 remove_fake_successors (BASIC_BLOCK (x));
6709
6710 /* We've handled all successors except the entry block's. */
6711 remove_fake_successors (ENTRY_BLOCK_PTR);
6712 }
6713
6714 /* This functions will add a fake edge between any block which has no
6715 successors, and the exit block. Some data flow equations require these
6716 edges to exist. */
6717 void
6718 add_noreturn_fake_exit_edges ()
6719 {
6720 int x;
6721
6722 for (x = 0; x < n_basic_blocks; x++)
6723 if (BASIC_BLOCK (x)->succ == NULL)
6724 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6725 }
6726 \f
6727 /* Dump the list of basic blocks in the bitmap NODES. */
6728 static void
6729 flow_nodes_print (str, nodes, file)
6730 const char *str;
6731 const sbitmap nodes;
6732 FILE *file;
6733 {
6734 int node;
6735
6736 fprintf (file, "%s { ", str);
6737 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6738 fputs ("}\n", file);
6739 }
6740
6741
6742 /* Dump the list of exiting edges in the array EDGES. */
6743 static void
6744 flow_exits_print (str, edges, num_edges, file)
6745 const char *str;
6746 const edge *edges;
6747 int num_edges;
6748 FILE *file;
6749 {
6750 int i;
6751
6752 fprintf (file, "%s { ", str);
6753 for (i = 0; i < num_edges; i++)
6754 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6755 fputs ("}\n", file);
6756 }
6757
6758
6759 /* Dump loop related CFG information. */
6760 static void
6761 flow_loops_cfg_dump (loops, file)
6762 const struct loops *loops;
6763 FILE *file;
6764 {
6765 int i;
6766
6767 if (! loops->num || ! file || ! loops->cfg.dom)
6768 return;
6769
6770 for (i = 0; i < n_basic_blocks; i++)
6771 {
6772 edge succ;
6773
6774 fprintf (file, ";; %d succs { ", i);
6775 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6776 fprintf (file, "%d ", succ->dest->index);
6777 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6778 }
6779
6780
6781 /* Dump the DFS node order. */
6782 if (loops->cfg.dfs_order)
6783 {
6784 fputs (";; DFS order: ", file);
6785 for (i = 0; i < n_basic_blocks; i++)
6786 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6787 fputs ("\n", file);
6788 }
6789 }
6790
6791
6792 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6793 static int
6794 flow_loop_nested_p (outer, loop)
6795 struct loop *outer;
6796 struct loop *loop;
6797 {
6798 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6799 }
6800
6801
6802 /* Dump the loop information specified by LOOPS to the stream FILE. */
6803 void
6804 flow_loops_dump (loops, file, verbose)
6805 const struct loops *loops;
6806 FILE *file;
6807 int verbose;
6808 {
6809 int i;
6810 int num_loops;
6811
6812 num_loops = loops->num;
6813 if (! num_loops || ! file)
6814 return;
6815
6816 fprintf (file, ";; %d loops found, %d levels\n",
6817 num_loops, loops->levels);
6818
6819 for (i = 0; i < num_loops; i++)
6820 {
6821 struct loop *loop = &loops->array[i];
6822
6823 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6824 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6825 loop->header->index, loop->latch->index,
6826 loop->pre_header ? loop->pre_header->index : -1,
6827 loop->depth, loop->level,
6828 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6829 fprintf (file, ";; %d", loop->num_nodes);
6830 flow_nodes_print (" nodes", loop->nodes, file);
6831 fprintf (file, ";; %d", loop->num_exits);
6832 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6833
6834 if (loop->shared)
6835 {
6836 int j;
6837
6838 for (j = 0; j < i; j++)
6839 {
6840 struct loop *oloop = &loops->array[j];
6841
6842 if (loop->header == oloop->header)
6843 {
6844 int disjoint;
6845 int smaller;
6846
6847 smaller = loop->num_nodes < oloop->num_nodes;
6848
6849 /* If the union of LOOP and OLOOP is different than
6850 the larger of LOOP and OLOOP then LOOP and OLOOP
6851 must be disjoint. */
6852 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6853 smaller ? oloop : loop);
6854 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6855 loop->header->index, i, j,
6856 disjoint ? "disjoint" : "nested");
6857 }
6858 }
6859 }
6860
6861 if (verbose)
6862 {
6863 /* Print diagnostics to compare our concept of a loop with
6864 what the loop notes say. */
6865 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6866 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6867 != NOTE_INSN_LOOP_BEG)
6868 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6869 INSN_UID (PREV_INSN (loop->first->head)));
6870 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6871 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6872 != NOTE_INSN_LOOP_END)
6873 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6874 INSN_UID (NEXT_INSN (loop->last->end)));
6875 }
6876 }
6877
6878 if (verbose)
6879 flow_loops_cfg_dump (loops, file);
6880 }
6881
6882
6883 /* Free all the memory allocated for LOOPS. */
6884 void
6885 flow_loops_free (loops)
6886 struct loops *loops;
6887 {
6888 if (loops->array)
6889 {
6890 int i;
6891
6892 if (! loops->num)
6893 abort ();
6894
6895 /* Free the loop descriptors. */
6896 for (i = 0; i < loops->num; i++)
6897 {
6898 struct loop *loop = &loops->array[i];
6899
6900 if (loop->nodes)
6901 sbitmap_free (loop->nodes);
6902 if (loop->exits)
6903 free (loop->exits);
6904 }
6905 free (loops->array);
6906 loops->array = NULL;
6907
6908 if (loops->cfg.dom)
6909 sbitmap_vector_free (loops->cfg.dom);
6910 if (loops->cfg.dfs_order)
6911 free (loops->cfg.dfs_order);
6912
6913 sbitmap_free (loops->shared_headers);
6914 }
6915 }
6916
6917
6918 /* Find the exits from the loop using the bitmap of loop nodes NODES
6919 and store in EXITS array. Return the number of exits from the
6920 loop. */
6921 static int
6922 flow_loop_exits_find (nodes, exits)
6923 const sbitmap nodes;
6924 edge **exits;
6925 {
6926 edge e;
6927 int node;
6928 int num_exits;
6929
6930 *exits = NULL;
6931
6932 /* Check all nodes within the loop to see if there are any
6933 successors not in the loop. Note that a node may have multiple
6934 exiting edges. */
6935 num_exits = 0;
6936 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6937 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6938 {
6939 basic_block dest = e->dest;
6940
6941 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6942 num_exits++;
6943 }
6944 });
6945
6946 if (! num_exits)
6947 return 0;
6948
6949 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6950
6951 /* Store all exiting edges into an array. */
6952 num_exits = 0;
6953 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6954 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6955 {
6956 basic_block dest = e->dest;
6957
6958 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6959 (*exits)[num_exits++] = e;
6960 }
6961 });
6962
6963 return num_exits;
6964 }
6965
6966
6967 /* Find the nodes contained within the loop with header HEADER and
6968 latch LATCH and store in NODES. Return the number of nodes within
6969 the loop. */
6970 static int
6971 flow_loop_nodes_find (header, latch, nodes)
6972 basic_block header;
6973 basic_block latch;
6974 sbitmap nodes;
6975 {
6976 basic_block *stack;
6977 int sp;
6978 int num_nodes = 0;
6979
6980 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6981 sp = 0;
6982
6983 /* Start with only the loop header in the set of loop nodes. */
6984 sbitmap_zero (nodes);
6985 SET_BIT (nodes, header->index);
6986 num_nodes++;
6987 header->loop_depth++;
6988
6989 /* Push the loop latch on to the stack. */
6990 if (! TEST_BIT (nodes, latch->index))
6991 {
6992 SET_BIT (nodes, latch->index);
6993 latch->loop_depth++;
6994 num_nodes++;
6995 stack[sp++] = latch;
6996 }
6997
6998 while (sp)
6999 {
7000 basic_block node;
7001 edge e;
7002
7003 node = stack[--sp];
7004 for (e = node->pred; e; e = e->pred_next)
7005 {
7006 basic_block ancestor = e->src;
7007
7008 /* If each ancestor not marked as part of loop, add to set of
7009 loop nodes and push on to stack. */
7010 if (ancestor != ENTRY_BLOCK_PTR
7011 && ! TEST_BIT (nodes, ancestor->index))
7012 {
7013 SET_BIT (nodes, ancestor->index);
7014 ancestor->loop_depth++;
7015 num_nodes++;
7016 stack[sp++] = ancestor;
7017 }
7018 }
7019 }
7020 free (stack);
7021 return num_nodes;
7022 }
7023
7024
7025 /* Compute the depth first search order and store in the array
7026 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7027 number of nodes visited. */
7028 static int
7029 flow_depth_first_order_compute (dfs_order)
7030 int *dfs_order;
7031 {
7032 edge e;
7033 edge *stack;
7034 int sp;
7035 int dfsnum = 0;
7036 sbitmap visited;
7037
7038 /* Allocate stack for back-tracking up CFG. */
7039 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
7040 sp = 0;
7041
7042 /* Allocate bitmap to track nodes that have been visited. */
7043 visited = sbitmap_alloc (n_basic_blocks);
7044
7045 /* None of the nodes in the CFG have been visited yet. */
7046 sbitmap_zero (visited);
7047
7048 /* Start with the first successor edge from the entry block. */
7049 e = ENTRY_BLOCK_PTR->succ;
7050 while (e)
7051 {
7052 basic_block src = e->src;
7053 basic_block dest = e->dest;
7054
7055 /* Mark that we have visited this node. */
7056 if (src != ENTRY_BLOCK_PTR)
7057 SET_BIT (visited, src->index);
7058
7059 /* If this node has not been visited before, push the current
7060 edge on to the stack and proceed with the first successor
7061 edge of this node. */
7062 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7063 && dest->succ)
7064 {
7065 stack[sp++] = e;
7066 e = dest->succ;
7067 }
7068 else
7069 {
7070 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7071 && ! dest->succ)
7072 {
7073 /* DEST has no successors (for example, a non-returning
7074 function is called) so do not push the current edge
7075 but carry on with its next successor. */
7076 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
7077 SET_BIT (visited, dest->index);
7078 }
7079
7080 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
7081 {
7082 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
7083
7084 /* Pop edge off stack. */
7085 e = stack[--sp];
7086 src = e->src;
7087 }
7088 e = e->succ_next;
7089 }
7090 }
7091 free (stack);
7092 sbitmap_free (visited);
7093
7094 /* The number of nodes visited should not be greater than
7095 n_basic_blocks. */
7096 if (dfsnum > n_basic_blocks)
7097 abort ();
7098
7099 /* There are some nodes left in the CFG that are unreachable. */
7100 if (dfsnum < n_basic_blocks)
7101 abort ();
7102 return dfsnum;
7103 }
7104
7105
7106 /* Return the block for the pre-header of the loop with header
7107 HEADER where DOM specifies the dominator information. Return NULL if
7108 there is no pre-header. */
7109 static basic_block
7110 flow_loop_pre_header_find (header, dom)
7111 basic_block header;
7112 const sbitmap *dom;
7113 {
7114 basic_block pre_header;
7115 edge e;
7116
7117 /* If block p is a predecessor of the header and is the only block
7118 that the header does not dominate, then it is the pre-header. */
7119 pre_header = NULL;
7120 for (e = header->pred; e; e = e->pred_next)
7121 {
7122 basic_block node = e->src;
7123
7124 if (node != ENTRY_BLOCK_PTR
7125 && ! TEST_BIT (dom[node->index], header->index))
7126 {
7127 if (pre_header == NULL)
7128 pre_header = node;
7129 else
7130 {
7131 /* There are multiple edges into the header from outside
7132 the loop so there is no pre-header block. */
7133 pre_header = NULL;
7134 break;
7135 }
7136 }
7137 }
7138 return pre_header;
7139 }
7140
7141
7142 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7143 previously added. The insertion algorithm assumes that the loops
7144 are added in the order found by a depth first search of the CFG. */
7145 static void
7146 flow_loop_tree_node_add (prevloop, loop)
7147 struct loop *prevloop;
7148 struct loop *loop;
7149 {
7150
7151 if (flow_loop_nested_p (prevloop, loop))
7152 {
7153 prevloop->inner = loop;
7154 loop->outer = prevloop;
7155 return;
7156 }
7157
7158 while (prevloop->outer)
7159 {
7160 if (flow_loop_nested_p (prevloop->outer, loop))
7161 {
7162 prevloop->next = loop;
7163 loop->outer = prevloop->outer;
7164 return;
7165 }
7166 prevloop = prevloop->outer;
7167 }
7168
7169 prevloop->next = loop;
7170 loop->outer = NULL;
7171 }
7172
7173
7174 /* Build the loop hierarchy tree for LOOPS. */
7175 static void
7176 flow_loops_tree_build (loops)
7177 struct loops *loops;
7178 {
7179 int i;
7180 int num_loops;
7181
7182 num_loops = loops->num;
7183 if (! num_loops)
7184 return;
7185
7186 /* Root the loop hierarchy tree with the first loop found.
7187 Since we used a depth first search this should be the
7188 outermost loop. */
7189 loops->tree = &loops->array[0];
7190 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7191
7192 /* Add the remaining loops to the tree. */
7193 for (i = 1; i < num_loops; i++)
7194 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7195 }
7196
7197
7198 /* Helper function to compute loop nesting depth and enclosed loop level
7199 for the natural loop specified by LOOP at the loop depth DEPTH.
7200 Returns the loop level. */
7201 static int
7202 flow_loop_level_compute (loop, depth)
7203 struct loop *loop;
7204 int depth;
7205 {
7206 struct loop *inner;
7207 int level = 1;
7208
7209 if (! loop)
7210 return 0;
7211
7212 /* Traverse loop tree assigning depth and computing level as the
7213 maximum level of all the inner loops of this loop. The loop
7214 level is equivalent to the height of the loop in the loop tree
7215 and corresponds to the number of enclosed loop levels (including
7216 itself). */
7217 for (inner = loop->inner; inner; inner = inner->next)
7218 {
7219 int ilevel;
7220
7221 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7222
7223 if (ilevel > level)
7224 level = ilevel;
7225 }
7226 loop->level = level;
7227 loop->depth = depth;
7228 return level;
7229 }
7230
7231
7232 /* Compute the loop nesting depth and enclosed loop level for the loop
7233 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7234 level. */
7235
7236 static int
7237 flow_loops_level_compute (loops)
7238 struct loops *loops;
7239 {
7240 struct loop *loop;
7241 int level;
7242 int levels = 0;
7243
7244 /* Traverse all the outer level loops. */
7245 for (loop = loops->tree; loop; loop = loop->next)
7246 {
7247 level = flow_loop_level_compute (loop, 1);
7248 if (level > levels)
7249 levels = level;
7250 }
7251 return levels;
7252 }
7253
7254
7255 /* Find all the natural loops in the function and save in LOOPS structure
7256 and recalculate loop_depth information in basic block structures.
7257 Return the number of natural loops found. */
7258
7259 int
7260 flow_loops_find (loops)
7261 struct loops *loops;
7262 {
7263 int i;
7264 int b;
7265 int num_loops;
7266 edge e;
7267 sbitmap headers;
7268 sbitmap *dom;
7269 int *dfs_order;
7270
7271 loops->num = 0;
7272 loops->array = NULL;
7273 loops->tree = NULL;
7274 dfs_order = NULL;
7275
7276 /* Taking care of this degenerate case makes the rest of
7277 this code simpler. */
7278 if (n_basic_blocks == 0)
7279 return 0;
7280
7281 /* Compute the dominators. */
7282 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7283 compute_flow_dominators (dom, NULL);
7284
7285 /* Count the number of loop edges (back edges). This should be the
7286 same as the number of natural loops. Also clear the loop_depth
7287 and as we work from inner->outer in a loop nest we call
7288 find_loop_nodes_find which will increment loop_depth for nodes
7289 within the current loop, which happens to enclose inner loops. */
7290
7291 num_loops = 0;
7292 for (b = 0; b < n_basic_blocks; b++)
7293 {
7294 BASIC_BLOCK (b)->loop_depth = 0;
7295 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7296 {
7297 basic_block latch = e->src;
7298
7299 /* Look for back edges where a predecessor is dominated
7300 by this block. A natural loop has a single entry
7301 node (header) that dominates all the nodes in the
7302 loop. It also has single back edge to the header
7303 from a latch node. Note that multiple natural loops
7304 may share the same header. */
7305 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7306 num_loops++;
7307 }
7308 }
7309
7310 if (num_loops)
7311 {
7312 /* Compute depth first search order of the CFG so that outer
7313 natural loops will be found before inner natural loops. */
7314 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7315 flow_depth_first_order_compute (dfs_order);
7316
7317 /* Allocate loop structures. */
7318 loops->array
7319 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7320
7321 headers = sbitmap_alloc (n_basic_blocks);
7322 sbitmap_zero (headers);
7323
7324 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7325 sbitmap_zero (loops->shared_headers);
7326
7327 /* Find and record information about all the natural loops
7328 in the CFG. */
7329 num_loops = 0;
7330 for (b = 0; b < n_basic_blocks; b++)
7331 {
7332 basic_block header;
7333
7334 /* Search the nodes of the CFG in DFS order that we can find
7335 outer loops first. */
7336 header = BASIC_BLOCK (dfs_order[b]);
7337
7338 /* Look for all the possible latch blocks for this header. */
7339 for (e = header->pred; e; e = e->pred_next)
7340 {
7341 basic_block latch = e->src;
7342
7343 /* Look for back edges where a predecessor is dominated
7344 by this block. A natural loop has a single entry
7345 node (header) that dominates all the nodes in the
7346 loop. It also has single back edge to the header
7347 from a latch node. Note that multiple natural loops
7348 may share the same header. */
7349 if (latch != ENTRY_BLOCK_PTR
7350 && TEST_BIT (dom[latch->index], header->index))
7351 {
7352 struct loop *loop;
7353
7354 loop = loops->array + num_loops;
7355
7356 loop->header = header;
7357 loop->latch = latch;
7358
7359 /* Keep track of blocks that are loop headers so
7360 that we can tell which loops should be merged. */
7361 if (TEST_BIT (headers, header->index))
7362 SET_BIT (loops->shared_headers, header->index);
7363 SET_BIT (headers, header->index);
7364
7365 /* Find nodes contained within the loop. */
7366 loop->nodes = sbitmap_alloc (n_basic_blocks);
7367 loop->num_nodes
7368 = flow_loop_nodes_find (header, latch, loop->nodes);
7369
7370 /* Compute first and last blocks within the loop.
7371 These are often the same as the loop header and
7372 loop latch respectively, but this is not always
7373 the case. */
7374 loop->first
7375 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7376 loop->last
7377 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7378
7379 /* Find edges which exit the loop. Note that a node
7380 may have several exit edges. */
7381 loop->num_exits
7382 = flow_loop_exits_find (loop->nodes, &loop->exits);
7383
7384 /* Look to see if the loop has a pre-header node. */
7385 loop->pre_header
7386 = flow_loop_pre_header_find (header, dom);
7387
7388 num_loops++;
7389 }
7390 }
7391 }
7392
7393 /* Natural loops with shared headers may either be disjoint or
7394 nested. Disjoint loops with shared headers cannot be inner
7395 loops and should be merged. For now just mark loops that share
7396 headers. */
7397 for (i = 0; i < num_loops; i++)
7398 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7399 loops->array[i].shared = 1;
7400
7401 sbitmap_free (headers);
7402 }
7403
7404 loops->num = num_loops;
7405
7406 /* Save CFG derived information to avoid recomputing it. */
7407 loops->cfg.dom = dom;
7408 loops->cfg.dfs_order = dfs_order;
7409
7410 /* Build the loop hierarchy tree. */
7411 flow_loops_tree_build (loops);
7412
7413 /* Assign the loop nesting depth and enclosed loop level for each
7414 loop. */
7415 loops->levels = flow_loops_level_compute (loops);
7416
7417 return num_loops;
7418 }
7419
7420
7421 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7422 int
7423 flow_loop_outside_edge_p (loop, e)
7424 const struct loop *loop;
7425 edge e;
7426 {
7427 if (e->dest != loop->header)
7428 abort ();
7429 return (e->src == ENTRY_BLOCK_PTR)
7430 || ! TEST_BIT (loop->nodes, e->src->index);
7431 }
7432
7433
7434 /* Clear LOG_LINKS fields of insns in a chain. */
7435 void
7436 clear_log_links (insns)
7437 rtx insns;
7438 {
7439 rtx i;
7440 for (i = insns; i; i = NEXT_INSN (i))
7441 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7442 LOG_LINKS (i) = 0;
7443 }