mode-switching.c (optimize_mode_switching): Make it static.
[gcc.git] / gcc / basic-block.h
1 /* Define control and data flow tables, and regsets.
2 Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 #ifndef GCC_BASIC_BLOCK_H
23 #define GCC_BASIC_BLOCK_H
24
25 #include "bitmap.h"
26 #include "sbitmap.h"
27 #include "varray.h"
28 #include "partition.h"
29 #include "hard-reg-set.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "function.h"
33
34 /* Head of register set linked list. */
35 typedef bitmap_head regset_head;
36
37 /* A pointer to a regset_head. */
38 typedef bitmap regset;
39
40 /* Allocate a register set with oballoc. */
41 #define ALLOC_REG_SET(OBSTACK) BITMAP_ALLOC (OBSTACK)
42
43 /* Do any cleanup needed on a regset when it is no longer used. */
44 #define FREE_REG_SET(REGSET) BITMAP_FREE (REGSET)
45
46 /* Initialize a new regset. */
47 #define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, &reg_obstack)
48
49 /* Clear a register set by freeing up the linked list. */
50 #define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
51
52 /* Copy a register set to another register set. */
53 #define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
54
55 /* Compare two register sets. */
56 #define REG_SET_EQUAL_P(A, B) bitmap_equal_p (A, B)
57
58 /* `and' a register set with a second register set. */
59 #define AND_REG_SET(TO, FROM) bitmap_and_into (TO, FROM)
60
61 /* `and' the complement of a register set with a register set. */
62 #define AND_COMPL_REG_SET(TO, FROM) bitmap_and_compl_into (TO, FROM)
63
64 /* Inclusive or a register set with a second register set. */
65 #define IOR_REG_SET(TO, FROM) bitmap_ior_into (TO, FROM)
66
67 /* Exclusive or a register set with a second register set. */
68 #define XOR_REG_SET(TO, FROM) bitmap_xor_into (TO, FROM)
69
70 /* Or into TO the register set FROM1 `and'ed with the complement of FROM2. */
71 #define IOR_AND_COMPL_REG_SET(TO, FROM1, FROM2) \
72 bitmap_ior_and_compl_into (TO, FROM1, FROM2)
73
74 /* Clear a single register in a register set. */
75 #define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
76
77 /* Set a single register in a register set. */
78 #define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
79
80 /* Return true if a register is set in a register set. */
81 #define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
82
83 /* Copy the hard registers in a register set to the hard register set. */
84 extern void reg_set_to_hard_reg_set (HARD_REG_SET *, bitmap);
85 #define REG_SET_TO_HARD_REG_SET(TO, FROM) \
86 do { \
87 CLEAR_HARD_REG_SET (TO); \
88 reg_set_to_hard_reg_set (&TO, FROM); \
89 } while (0)
90
91 typedef bitmap_iterator reg_set_iterator;
92
93 /* Loop over all registers in REGSET, starting with MIN, setting REGNUM to the
94 register number and executing CODE for all registers that are set. */
95 #define EXECUTE_IF_SET_IN_REG_SET(REGSET, MIN, REGNUM, RSI) \
96 EXECUTE_IF_SET_IN_BITMAP (REGSET, MIN, REGNUM, RSI)
97
98 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
99 REGNUM to the register number and executing CODE for all registers that are
100 set in the first regset and not set in the second. */
101 #define EXECUTE_IF_AND_COMPL_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
102 EXECUTE_IF_AND_COMPL_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)
103
104 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
105 REGNUM to the register number and executing CODE for all registers that are
106 set in both regsets. */
107 #define EXECUTE_IF_AND_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
108 EXECUTE_IF_AND_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI) \
109
110 /* Type we use to hold basic block counters. Should be at least
111 64bit. Although a counter cannot be negative, we use a signed
112 type, because erroneous negative counts can be generated when the
113 flow graph is manipulated by various optimizations. A signed type
114 makes those easy to detect. */
115 typedef HOST_WIDEST_INT gcov_type;
116
117 /* Control flow edge information. */
118 struct edge_def GTY(())
119 {
120 /* The two blocks at the ends of the edge. */
121 struct basic_block_def *src;
122 struct basic_block_def *dest;
123
124 /* Instructions queued on the edge. */
125 union edge_def_insns {
126 rtx GTY ((tag ("0"))) r;
127 tree GTY ((tag ("1"))) t;
128 } GTY ((desc ("ir_type ()"))) insns;
129
130 /* Auxiliary info specific to a pass. */
131 PTR GTY ((skip (""))) aux;
132
133 /* Location of any goto implicit in the edge, during tree-ssa. */
134 source_locus goto_locus;
135
136 int flags; /* see EDGE_* below */
137 int probability; /* biased by REG_BR_PROB_BASE */
138 gcov_type count; /* Expected number of executions calculated
139 in profile.c */
140
141 /* The index number corresponding to this edge in the edge vector
142 dest->preds. */
143 unsigned int dest_idx;
144 };
145
146 typedef struct edge_def *edge;
147 DEF_VEC_P(edge);
148 DEF_VEC_ALLOC_P(edge,gc);
149
150 #define EDGE_FALLTHRU 1 /* 'Straight line' flow */
151 #define EDGE_ABNORMAL 2 /* Strange flow, like computed
152 label, or eh */
153 #define EDGE_ABNORMAL_CALL 4 /* Call with abnormal exit
154 like an exception, or sibcall */
155 #define EDGE_EH 8 /* Exception throw */
156 #define EDGE_FAKE 16 /* Not a real edge (profile.c) */
157 #define EDGE_DFS_BACK 32 /* A backwards edge */
158 #define EDGE_CAN_FALLTHRU 64 /* Candidate for straight line
159 flow. */
160 #define EDGE_IRREDUCIBLE_LOOP 128 /* Part of irreducible loop. */
161 #define EDGE_SIBCALL 256 /* Edge from sibcall to exit. */
162 #define EDGE_LOOP_EXIT 512 /* Exit of a loop. */
163 #define EDGE_TRUE_VALUE 1024 /* Edge taken when controlling
164 predicate is nonzero. */
165 #define EDGE_FALSE_VALUE 2048 /* Edge taken when controlling
166 predicate is zero. */
167 #define EDGE_EXECUTABLE 4096 /* Edge is executable. Only
168 valid during SSA-CCP. */
169 #define EDGE_CROSSING 8192 /* Edge crosses between hot
170 and cold sections, when we
171 do partitioning. */
172 #define EDGE_ALL_FLAGS 16383
173
174 #define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
175
176 /* Counter summary from the last set of coverage counts read by
177 profile.c. */
178 extern const struct gcov_ctr_summary *profile_info;
179
180 /* Declared in cfgloop.h. */
181 struct loop;
182 struct loops;
183
184 /* Declared in tree-flow.h. */
185 struct edge_prediction;
186 struct rtl_bb_info;
187
188 /* A basic block is a sequence of instructions with only entry and
189 only one exit. If any one of the instructions are executed, they
190 will all be executed, and in sequence from first to last.
191
192 There may be COND_EXEC instructions in the basic block. The
193 COND_EXEC *instructions* will be executed -- but if the condition
194 is false the conditionally executed *expressions* will of course
195 not be executed. We don't consider the conditionally executed
196 expression (which might have side-effects) to be in a separate
197 basic block because the program counter will always be at the same
198 location after the COND_EXEC instruction, regardless of whether the
199 condition is true or not.
200
201 Basic blocks need not start with a label nor end with a jump insn.
202 For example, a previous basic block may just "conditionally fall"
203 into the succeeding basic block, and the last basic block need not
204 end with a jump insn. Block 0 is a descendant of the entry block.
205
206 A basic block beginning with two labels cannot have notes between
207 the labels.
208
209 Data for jump tables are stored in jump_insns that occur in no
210 basic block even though these insns can follow or precede insns in
211 basic blocks. */
212
213 /* Basic block information indexed by block number. */
214 struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
215 {
216 /* Pointers to the first and last trees of the block. */
217 tree stmt_list;
218
219 /* The edges into and out of the block. */
220 VEC(edge,gc) *preds;
221 VEC(edge,gc) *succs;
222
223 /* Auxiliary info specific to a pass. */
224 PTR GTY ((skip (""))) aux;
225
226 /* Innermost loop containing the block. */
227 struct loop * GTY ((skip (""))) loop_father;
228
229 /* The dominance and postdominance information node. */
230 struct et_node * GTY ((skip (""))) dom[2];
231
232 /* Previous and next blocks in the chain. */
233 struct basic_block_def *prev_bb;
234 struct basic_block_def *next_bb;
235
236 union basic_block_il_dependent {
237 struct rtl_bb_info * GTY ((tag ("1"))) rtl;
238 } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
239
240 /* Chain of PHI nodes for this block. */
241 tree phi_nodes;
242
243 /* A list of predictions. */
244 struct edge_prediction *predictions;
245
246 /* Expected number of executions: calculated in profile.c. */
247 gcov_type count;
248
249 /* The index of this block. */
250 int index;
251
252 /* The loop depth of this block. */
253 int loop_depth;
254
255 /* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
256 int frequency;
257
258 /* Various flags. See BB_* below. */
259 int flags;
260 };
261
262 struct rtl_bb_info GTY(())
263 {
264 /* The first and last insns of the block. */
265 rtx head_;
266 rtx end_;
267
268 /* The registers that are live on entry to this block. */
269 bitmap GTY ((skip (""))) global_live_at_start;
270
271 /* The registers that are live on exit from this block. */
272 bitmap GTY ((skip (""))) global_live_at_end;
273
274 /* In CFGlayout mode points to insn notes/jumptables to be placed just before
275 and after the block. */
276 rtx header;
277 rtx footer;
278
279 /* This field is used by the bb-reorder and tracer passes. */
280 int visited;
281 };
282
283 typedef struct basic_block_def *basic_block;
284
285 DEF_VEC_P(basic_block);
286 DEF_VEC_ALLOC_P(basic_block,gc);
287 DEF_VEC_ALLOC_P(basic_block,heap);
288
289 #define BB_FREQ_MAX 10000
290
291 /* Masks for basic_block.flags.
292
293 BB_HOT_PARTITION and BB_COLD_PARTITION should be preserved throughout
294 the compilation, so they are never cleared.
295
296 All other flags may be cleared by clear_bb_flags(). It is generally
297 a bad idea to rely on any flags being up-to-date. */
298
299 enum bb_flags
300 {
301
302 /* Set if insns in BB have are modified. Used for updating liveness info. */
303 BB_DIRTY = 1,
304
305 /* Only set on blocks that have just been created by create_bb. */
306 BB_NEW = 2,
307
308 /* Set by find_unreachable_blocks. Do not rely on this being set in any
309 pass. */
310 BB_REACHABLE = 4,
311
312 /* Set for blocks in an irreducible loop by loop analysis. */
313 BB_IRREDUCIBLE_LOOP = 8,
314
315 /* Set on blocks that may actually not be single-entry single-exit block. */
316 BB_SUPERBLOCK = 16,
317
318 /* Set on basic blocks that the scheduler should not touch. This is used
319 by SMS to prevent other schedulers from messing with the loop schedule. */
320 BB_DISABLE_SCHEDULE = 32,
321
322 /* Set on blocks that should be put in a hot section. */
323 BB_HOT_PARTITION = 64,
324
325 /* Set on blocks that should be put in a cold section. */
326 BB_COLD_PARTITION = 128,
327
328 /* Set on block that was duplicated. */
329 BB_DUPLICATED = 256,
330
331 /* Set on blocks that are in RTL format. */
332 BB_RTL = 1024,
333
334 /* Set on blocks that are forwarder blocks.
335 Only used in cfgcleanup.c. */
336 BB_FORWARDER_BLOCK = 2048,
337
338 /* Set on blocks that cannot be threaded through.
339 Only used in cfgcleanup.c. */
340 BB_NONTHREADABLE_BLOCK = 4096
341 };
342
343 /* Dummy flag for convenience in the hot/cold partitioning code. */
344 #define BB_UNPARTITIONED 0
345
346 /* Partitions, to be used when partitioning hot and cold basic blocks into
347 separate sections. */
348 #define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
349 #define BB_SET_PARTITION(bb, part) do { \
350 basic_block bb_ = (bb); \
351 bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION)) \
352 | (part)); \
353 } while (0)
354
355 #define BB_COPY_PARTITION(dstbb, srcbb) \
356 BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
357
358 /* A structure to group all the per-function control flow graph data.
359 The x_* prefixing is necessary because otherwise references to the
360 fields of this struct are interpreted as the defines for backward
361 source compatibility following the definition of this struct. */
362 struct control_flow_graph GTY(())
363 {
364 /* Block pointers for the exit and entry of a function.
365 These are always the head and tail of the basic block list. */
366 basic_block x_entry_block_ptr;
367 basic_block x_exit_block_ptr;
368
369 /* Index by basic block number, get basic block struct info. */
370 VEC(basic_block,gc) *x_basic_block_info;
371
372 /* Number of basic blocks in this flow graph. */
373 int x_n_basic_blocks;
374
375 /* Number of edges in this flow graph. */
376 int x_n_edges;
377
378 /* The first free basic block number. */
379 int x_last_basic_block;
380
381 /* Mapping of labels to their associated blocks. At present
382 only used for the tree CFG. */
383 VEC(basic_block,gc) *x_label_to_block_map;
384
385 enum profile_status {
386 PROFILE_ABSENT,
387 PROFILE_GUESSED,
388 PROFILE_READ
389 } x_profile_status;
390 };
391
392 /* Defines for accessing the fields of the CFG structure for function FN. */
393 #define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_entry_block_ptr)
394 #define EXIT_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_exit_block_ptr)
395 #define basic_block_info_for_function(FN) ((FN)->cfg->x_basic_block_info)
396 #define n_basic_blocks_for_function(FN) ((FN)->cfg->x_n_basic_blocks)
397 #define n_edges_for_function(FN) ((FN)->cfg->x_n_edges)
398 #define last_basic_block_for_function(FN) ((FN)->cfg->x_last_basic_block)
399 #define label_to_block_map_for_function(FN) ((FN)->cfg->x_label_to_block_map)
400
401 #define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
402 (VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
403
404 /* Defines for textual backward source compatibility. */
405 #define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
406 #define EXIT_BLOCK_PTR (cfun->cfg->x_exit_block_ptr)
407 #define basic_block_info (cfun->cfg->x_basic_block_info)
408 #define n_basic_blocks (cfun->cfg->x_n_basic_blocks)
409 #define n_edges (cfun->cfg->x_n_edges)
410 #define last_basic_block (cfun->cfg->x_last_basic_block)
411 #define label_to_block_map (cfun->cfg->x_label_to_block_map)
412 #define profile_status (cfun->cfg->x_profile_status)
413
414 #define BASIC_BLOCK(N) (VEC_index (basic_block, basic_block_info, (N)))
415 #define SET_BASIC_BLOCK(N,BB) (VEC_replace (basic_block, basic_block_info, (N), (BB)))
416
417 /* TRUE if we should re-run loop discovery after threading jumps, FALSE
418 otherwise. */
419 extern bool rediscover_loops_after_threading;
420
421 /* For iterating over basic blocks. */
422 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
423 for (BB = FROM; BB != TO; BB = BB->DIR)
424
425 #define FOR_EACH_BB_FN(BB, FN) \
426 FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
427
428 #define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
429
430 #define FOR_EACH_BB_REVERSE_FN(BB, FN) \
431 FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
432
433 #define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
434
435 /* For iterating over insns in basic block. */
436 #define FOR_BB_INSNS(BB, INSN) \
437 for ((INSN) = BB_HEAD (BB); \
438 (INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
439 (INSN) = NEXT_INSN (INSN))
440
441 #define FOR_BB_INSNS_REVERSE(BB, INSN) \
442 for ((INSN) = BB_END (BB); \
443 (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
444 (INSN) = PREV_INSN (INSN))
445
446 /* Cycles through _all_ basic blocks, even the fake ones (entry and
447 exit block). */
448
449 #define FOR_ALL_BB(BB) \
450 for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
451
452 #define FOR_ALL_BB_FN(BB, FN) \
453 for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
454
455 extern bitmap_obstack reg_obstack;
456
457 /* Indexed by n, gives number of basic block that (REG n) is used in.
458 If the value is REG_BLOCK_GLOBAL (-2),
459 it means (REG n) is used in more than one basic block.
460 REG_BLOCK_UNKNOWN (-1) means it hasn't been seen yet so we don't know.
461 This information remains valid for the rest of the compilation
462 of the current function; it is used to control register allocation. */
463
464 #define REG_BLOCK_UNKNOWN -1
465 #define REG_BLOCK_GLOBAL -2
466
467 #define REG_BASIC_BLOCK(N) (VARRAY_REG (reg_n_info, N)->basic_block)
468 \f
469 /* Stuff for recording basic block info. */
470
471 #define BB_HEAD(B) (B)->il.rtl->head_
472 #define BB_END(B) (B)->il.rtl->end_
473
474 /* Special block numbers [markers] for entry and exit. */
475 #define ENTRY_BLOCK (0)
476 #define EXIT_BLOCK (1)
477
478 /* The two blocks that are always in the cfg. */
479 #define NUM_FIXED_BLOCKS (2)
480
481
482 #define BLOCK_NUM(INSN) (BLOCK_FOR_INSN (INSN)->index + 0)
483 #define set_block_for_insn(INSN, BB) (BLOCK_FOR_INSN (INSN) = BB)
484
485 extern void compute_bb_for_insn (void);
486 extern void free_bb_for_insn (void);
487 extern void update_bb_for_insn (basic_block);
488
489 extern void free_basic_block_vars (void);
490
491 extern void insert_insn_on_edge (rtx, edge);
492 bool safe_insert_insn_on_edge (rtx, edge);
493
494 extern void commit_edge_insertions (void);
495 extern void commit_edge_insertions_watch_calls (void);
496
497 extern void remove_fake_edges (void);
498 extern void remove_fake_exit_edges (void);
499 extern void add_noreturn_fake_exit_edges (void);
500 extern void connect_infinite_loops_to_exit (void);
501 extern edge unchecked_make_edge (basic_block, basic_block, int);
502 extern edge cached_make_edge (sbitmap, basic_block, basic_block, int);
503 extern edge make_edge (basic_block, basic_block, int);
504 extern edge make_single_succ_edge (basic_block, basic_block, int);
505 extern void remove_edge (edge);
506 extern void redirect_edge_succ (edge, basic_block);
507 extern edge redirect_edge_succ_nodup (edge, basic_block);
508 extern void redirect_edge_pred (edge, basic_block);
509 extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
510 extern void clear_bb_flags (void);
511 extern int post_order_compute (int *, bool);
512 extern int pre_and_rev_post_order_compute (int *, int *, bool);
513 extern int dfs_enumerate_from (basic_block, int,
514 bool (*)(basic_block, void *),
515 basic_block *, int, void *);
516 extern void compute_dominance_frontiers (bitmap *);
517 extern void dump_edge_info (FILE *, edge, int);
518 extern void brief_dump_cfg (FILE *);
519 extern void clear_edges (void);
520 extern rtx first_insn_after_basic_block_note (basic_block);
521 extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
522 extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
523 gcov_type);
524
525 /* Structure to group all of the information to process IF-THEN and
526 IF-THEN-ELSE blocks for the conditional execution support. This
527 needs to be in a public file in case the IFCVT macros call
528 functions passing the ce_if_block data structure. */
529
530 typedef struct ce_if_block
531 {
532 basic_block test_bb; /* First test block. */
533 basic_block then_bb; /* THEN block. */
534 basic_block else_bb; /* ELSE block or NULL. */
535 basic_block join_bb; /* Join THEN/ELSE blocks. */
536 basic_block last_test_bb; /* Last bb to hold && or || tests. */
537 int num_multiple_test_blocks; /* # of && and || basic blocks. */
538 int num_and_and_blocks; /* # of && blocks. */
539 int num_or_or_blocks; /* # of || blocks. */
540 int num_multiple_test_insns; /* # of insns in && and || blocks. */
541 int and_and_p; /* Complex test is &&. */
542 int num_then_insns; /* # of insns in THEN block. */
543 int num_else_insns; /* # of insns in ELSE block. */
544 int pass; /* Pass number. */
545
546 #ifdef IFCVT_EXTRA_FIELDS
547 IFCVT_EXTRA_FIELDS /* Any machine dependent fields. */
548 #endif
549
550 } ce_if_block_t;
551
552 /* This structure maintains an edge list vector. */
553 struct edge_list
554 {
555 int num_blocks;
556 int num_edges;
557 edge *index_to_edge;
558 };
559
560 /* The base value for branch probability notes and edge probabilities. */
561 #define REG_BR_PROB_BASE 10000
562
563 /* This is the value which indicates no edge is present. */
564 #define EDGE_INDEX_NO_EDGE -1
565
566 /* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
567 if there is no edge between the 2 basic blocks. */
568 #define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
569
570 /* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
571 block which is either the pred or succ end of the indexed edge. */
572 #define INDEX_EDGE_PRED_BB(el, index) ((el)->index_to_edge[(index)]->src)
573 #define INDEX_EDGE_SUCC_BB(el, index) ((el)->index_to_edge[(index)]->dest)
574
575 /* INDEX_EDGE returns a pointer to the edge. */
576 #define INDEX_EDGE(el, index) ((el)->index_to_edge[(index)])
577
578 /* Number of edges in the compressed edge list. */
579 #define NUM_EDGES(el) ((el)->num_edges)
580
581 /* BB is assumed to contain conditional jump. Return the fallthru edge. */
582 #define FALLTHRU_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
583 ? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
584
585 /* BB is assumed to contain conditional jump. Return the branch edge. */
586 #define BRANCH_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
587 ? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
588
589 /* Return expected execution frequency of the edge E. */
590 #define EDGE_FREQUENCY(e) (((e)->src->frequency \
591 * (e)->probability \
592 + REG_BR_PROB_BASE / 2) \
593 / REG_BR_PROB_BASE)
594
595 /* Return nonzero if edge is critical. */
596 #define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
597 && EDGE_COUNT ((e)->dest->preds) >= 2)
598
599 #define EDGE_COUNT(ev) VEC_length (edge, (ev))
600 #define EDGE_I(ev,i) VEC_index (edge, (ev), (i))
601 #define EDGE_PRED(bb,i) VEC_index (edge, (bb)->preds, (i))
602 #define EDGE_SUCC(bb,i) VEC_index (edge, (bb)->succs, (i))
603
604 /* Returns true if BB has precisely one successor. */
605
606 static inline bool
607 single_succ_p (basic_block bb)
608 {
609 return EDGE_COUNT (bb->succs) == 1;
610 }
611
612 /* Returns true if BB has precisely one predecessor. */
613
614 static inline bool
615 single_pred_p (basic_block bb)
616 {
617 return EDGE_COUNT (bb->preds) == 1;
618 }
619
620 /* Returns the single successor edge of basic block BB. Aborts if
621 BB does not have exactly one successor. */
622
623 static inline edge
624 single_succ_edge (basic_block bb)
625 {
626 gcc_assert (single_succ_p (bb));
627 return EDGE_SUCC (bb, 0);
628 }
629
630 /* Returns the single predecessor edge of basic block BB. Aborts
631 if BB does not have exactly one predecessor. */
632
633 static inline edge
634 single_pred_edge (basic_block bb)
635 {
636 gcc_assert (single_pred_p (bb));
637 return EDGE_PRED (bb, 0);
638 }
639
640 /* Returns the single successor block of basic block BB. Aborts
641 if BB does not have exactly one successor. */
642
643 static inline basic_block
644 single_succ (basic_block bb)
645 {
646 return single_succ_edge (bb)->dest;
647 }
648
649 /* Returns the single predecessor block of basic block BB. Aborts
650 if BB does not have exactly one predecessor.*/
651
652 static inline basic_block
653 single_pred (basic_block bb)
654 {
655 return single_pred_edge (bb)->src;
656 }
657
658 /* Iterator object for edges. */
659
660 typedef struct {
661 unsigned index;
662 VEC(edge,gc) **container;
663 } edge_iterator;
664
665 static inline VEC(edge,gc) *
666 ei_container (edge_iterator i)
667 {
668 gcc_assert (i.container);
669 return *i.container;
670 }
671
672 #define ei_start(iter) ei_start_1 (&(iter))
673 #define ei_last(iter) ei_last_1 (&(iter))
674
675 /* Return an iterator pointing to the start of an edge vector. */
676 static inline edge_iterator
677 ei_start_1 (VEC(edge,gc) **ev)
678 {
679 edge_iterator i;
680
681 i.index = 0;
682 i.container = ev;
683
684 return i;
685 }
686
687 /* Return an iterator pointing to the last element of an edge
688 vector. */
689 static inline edge_iterator
690 ei_last_1 (VEC(edge,gc) **ev)
691 {
692 edge_iterator i;
693
694 i.index = EDGE_COUNT (*ev) - 1;
695 i.container = ev;
696
697 return i;
698 }
699
700 /* Is the iterator `i' at the end of the sequence? */
701 static inline bool
702 ei_end_p (edge_iterator i)
703 {
704 return (i.index == EDGE_COUNT (ei_container (i)));
705 }
706
707 /* Is the iterator `i' at one position before the end of the
708 sequence? */
709 static inline bool
710 ei_one_before_end_p (edge_iterator i)
711 {
712 return (i.index + 1 == EDGE_COUNT (ei_container (i)));
713 }
714
715 /* Advance the iterator to the next element. */
716 static inline void
717 ei_next (edge_iterator *i)
718 {
719 gcc_assert (i->index < EDGE_COUNT (ei_container (*i)));
720 i->index++;
721 }
722
723 /* Move the iterator to the previous element. */
724 static inline void
725 ei_prev (edge_iterator *i)
726 {
727 gcc_assert (i->index > 0);
728 i->index--;
729 }
730
731 /* Return the edge pointed to by the iterator `i'. */
732 static inline edge
733 ei_edge (edge_iterator i)
734 {
735 return EDGE_I (ei_container (i), i.index);
736 }
737
738 /* Return an edge pointed to by the iterator. Do it safely so that
739 NULL is returned when the iterator is pointing at the end of the
740 sequence. */
741 static inline edge
742 ei_safe_edge (edge_iterator i)
743 {
744 return !ei_end_p (i) ? ei_edge (i) : NULL;
745 }
746
747 /* Return 1 if we should continue to iterate. Return 0 otherwise.
748 *Edge P is set to the next edge if we are to continue to iterate
749 and NULL otherwise. */
750
751 static inline bool
752 ei_cond (edge_iterator ei, edge *p)
753 {
754 if (!ei_end_p (ei))
755 {
756 *p = ei_edge (ei);
757 return 1;
758 }
759 else
760 {
761 *p = NULL;
762 return 0;
763 }
764 }
765
766 /* This macro serves as a convenient way to iterate each edge in a
767 vector of predecessor or successor edges. It must not be used when
768 an element might be removed during the traversal, otherwise
769 elements will be missed. Instead, use a for-loop like that shown
770 in the following pseudo-code:
771
772 FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
773 {
774 IF (e != taken_edge)
775 remove_edge (e);
776 ELSE
777 ei_next (&ei);
778 }
779 */
780
781 #define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC) \
782 for ((ITER) = ei_start ((EDGE_VEC)); \
783 ei_cond ((ITER), &(EDGE)); \
784 ei_next (&(ITER)))
785
786 struct edge_list * create_edge_list (void);
787 void free_edge_list (struct edge_list *);
788 void print_edge_list (FILE *, struct edge_list *);
789 void verify_edge_list (FILE *, struct edge_list *);
790 int find_edge_index (struct edge_list *, basic_block, basic_block);
791 edge find_edge (basic_block, basic_block);
792
793
794 enum update_life_extent
795 {
796 UPDATE_LIFE_LOCAL = 0,
797 UPDATE_LIFE_GLOBAL = 1,
798 UPDATE_LIFE_GLOBAL_RM_NOTES = 2
799 };
800
801 /* Flags for life_analysis and update_life_info. */
802
803 #define PROP_DEATH_NOTES 1 /* Create DEAD and UNUSED notes. */
804 #define PROP_LOG_LINKS 2 /* Create LOG_LINKS. */
805 #define PROP_REG_INFO 4 /* Update regs_ever_live et al. */
806 #define PROP_KILL_DEAD_CODE 8 /* Remove dead code. */
807 #define PROP_SCAN_DEAD_CODE 16 /* Scan for dead code. */
808 #define PROP_ALLOW_CFG_CHANGES 32 /* Allow the CFG to be changed
809 by dead code removal. */
810 #define PROP_AUTOINC 64 /* Create autoinc mem references. */
811 #define PROP_SCAN_DEAD_STORES 128 /* Scan for dead code. */
812 #define PROP_ASM_SCAN 256 /* Internal flag used within flow.c
813 to flag analysis of asms. */
814 #define PROP_DEAD_INSN 1024 /* Internal flag used within flow.c
815 to flag analysis of dead insn. */
816 #define PROP_POST_REGSTACK 2048 /* We run after reg-stack and need
817 to preserve REG_DEAD notes for
818 stack regs. */
819 #define PROP_FINAL (PROP_DEATH_NOTES | PROP_LOG_LINKS \
820 | PROP_REG_INFO | PROP_KILL_DEAD_CODE \
821 | PROP_SCAN_DEAD_CODE | PROP_AUTOINC \
822 | PROP_ALLOW_CFG_CHANGES \
823 | PROP_SCAN_DEAD_STORES)
824 #define PROP_POSTRELOAD (PROP_DEATH_NOTES \
825 | PROP_KILL_DEAD_CODE \
826 | PROP_SCAN_DEAD_CODE \
827 | PROP_SCAN_DEAD_STORES)
828
829 #define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
830 except for edge forwarding */
831 #define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
832 #define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
833 to care REG_DEAD notes. */
834 #define CLEANUP_PRE_LOOP 8 /* Take care to preserve syntactic loop
835 notes. */
836 #define CLEANUP_UPDATE_LIFE 16 /* Keep life information up to date. */
837 #define CLEANUP_THREADING 32 /* Do jump threading. */
838 #define CLEANUP_NO_INSN_DEL 64 /* Do not try to delete trivially dead
839 insns. */
840 #define CLEANUP_CFGLAYOUT 128 /* Do cleanup in cfglayout mode. */
841 #define CLEANUP_LOG_LINKS 256 /* Update log links. */
842
843 /* The following are ORed in on top of the CLEANUP* flags in calls to
844 struct_equiv_block_eq. */
845 #define STRUCT_EQUIV_START 512 /* Initializes the search range. */
846 #define STRUCT_EQUIV_RERUN 1024 /* Rerun to find register use in
847 found equivalence. */
848 #define STRUCT_EQUIV_FINAL 2048 /* Make any changes necessary to get
849 actual equivalence. */
850 #define STRUCT_EQUIV_NEED_FULL_BLOCK 4096 /* struct_equiv_block_eq is required
851 to match only full blocks */
852 #define STRUCT_EQUIV_MATCH_JUMPS 8192 /* Also include the jumps at the end of the block in the comparison. */
853
854 extern void life_analysis (FILE *, int);
855 extern int update_life_info (sbitmap, enum update_life_extent, int);
856 extern int update_life_info_in_dirty_blocks (enum update_life_extent, int);
857 extern int count_or_remove_death_notes (sbitmap, int);
858 extern int propagate_block (basic_block, regset, regset, regset, int);
859
860 struct propagate_block_info;
861 extern rtx propagate_one_insn (struct propagate_block_info *, rtx);
862 extern struct propagate_block_info *init_propagate_block_info
863 (basic_block, regset, regset, regset, int);
864 extern void free_propagate_block_info (struct propagate_block_info *);
865
866 /* In lcm.c */
867 extern struct edge_list *pre_edge_lcm (FILE *, int, sbitmap *, sbitmap *,
868 sbitmap *, sbitmap *, sbitmap **,
869 sbitmap **);
870 extern struct edge_list *pre_edge_rev_lcm (FILE *, int, sbitmap *,
871 sbitmap *, sbitmap *,
872 sbitmap *, sbitmap **,
873 sbitmap **);
874 extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
875
876 /* In predict.c */
877 extern void estimate_probability (struct loops *);
878 extern void expected_value_to_br_prob (void);
879 extern bool maybe_hot_bb_p (basic_block);
880 extern bool probably_cold_bb_p (basic_block);
881 extern bool probably_never_executed_bb_p (basic_block);
882 extern bool tree_predicted_by_p (basic_block, enum br_predictor);
883 extern bool rtl_predicted_by_p (basic_block, enum br_predictor);
884 extern void tree_predict_edge (edge, enum br_predictor, int);
885 extern void rtl_predict_edge (edge, enum br_predictor, int);
886 extern void predict_edge_def (edge, enum br_predictor, enum prediction);
887 extern void guess_outgoing_edge_probabilities (basic_block);
888 extern void remove_predictions_associated_with_edge (edge);
889
890 /* In flow.c */
891 extern void init_flow (void);
892 extern void debug_bb (basic_block);
893 extern basic_block debug_bb_n (int);
894 extern void dump_regset (regset, FILE *);
895 extern void debug_regset (regset);
896 extern void allocate_reg_life_data (void);
897 extern void expunge_block (basic_block);
898 extern void link_block (basic_block, basic_block);
899 extern void unlink_block (basic_block);
900 extern void compact_blocks (void);
901 extern basic_block alloc_block (void);
902 extern void find_unreachable_blocks (void);
903 extern int delete_noop_moves (void);
904 extern basic_block force_nonfallthru (edge);
905 extern rtx block_label (basic_block);
906 extern bool forwarder_block_p (basic_block);
907 extern bool purge_all_dead_edges (void);
908 extern bool purge_dead_edges (basic_block);
909 extern void find_many_sub_basic_blocks (sbitmap);
910 extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
911 extern bool can_fallthru (basic_block, basic_block);
912 extern bool could_fall_through (basic_block, basic_block);
913 extern void flow_nodes_print (const char *, const sbitmap, FILE *);
914 extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
915 extern void alloc_aux_for_block (basic_block, int);
916 extern void alloc_aux_for_blocks (int);
917 extern void clear_aux_for_blocks (void);
918 extern void free_aux_for_blocks (void);
919 extern void alloc_aux_for_edge (edge, int);
920 extern void alloc_aux_for_edges (int);
921 extern void clear_aux_for_edges (void);
922 extern void free_aux_for_edges (void);
923 extern void find_basic_blocks (rtx);
924 extern bool cleanup_cfg (int);
925 extern bool delete_unreachable_blocks (void);
926 extern bool merge_seq_blocks (void);
927
928 typedef struct conflict_graph_def *conflict_graph;
929
930 /* Callback function when enumerating conflicts. The arguments are
931 the smaller and larger regno in the conflict. Returns zero if
932 enumeration is to continue, nonzero to halt enumeration. */
933 typedef int (*conflict_graph_enum_fn) (int, int, void *);
934
935
936 /* Prototypes of operations on conflict graphs. */
937
938 extern conflict_graph conflict_graph_new
939 (int);
940 extern void conflict_graph_delete (conflict_graph);
941 extern int conflict_graph_add (conflict_graph, int, int);
942 extern int conflict_graph_conflict_p (conflict_graph, int, int);
943 extern void conflict_graph_enum (conflict_graph, int, conflict_graph_enum_fn,
944 void *);
945 extern void conflict_graph_merge_regs (conflict_graph, int, int);
946 extern void conflict_graph_print (conflict_graph, FILE*);
947 extern bool mark_dfs_back_edges (void);
948 extern void set_edge_can_fallthru_flag (void);
949 extern void update_br_prob_note (basic_block);
950 extern void fixup_abnormal_edges (void);
951 extern bool inside_basic_block_p (rtx);
952 extern bool control_flow_insn_p (rtx);
953
954 /* In bb-reorder.c */
955 extern void reorder_basic_blocks (unsigned int);
956
957 /* In dominance.c */
958
959 enum cdi_direction
960 {
961 CDI_DOMINATORS,
962 CDI_POST_DOMINATORS
963 };
964
965 enum dom_state
966 {
967 DOM_NONE, /* Not computed at all. */
968 DOM_NO_FAST_QUERY, /* The data is OK, but the fast query data are not usable. */
969 DOM_OK /* Everything is ok. */
970 };
971
972 extern enum dom_state dom_computed[2];
973
974 extern bool dom_info_available_p (enum cdi_direction);
975 extern void calculate_dominance_info (enum cdi_direction);
976 extern void free_dominance_info (enum cdi_direction);
977 extern basic_block nearest_common_dominator (enum cdi_direction,
978 basic_block, basic_block);
979 extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
980 bitmap);
981 extern void set_immediate_dominator (enum cdi_direction, basic_block,
982 basic_block);
983 extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
984 extern bool dominated_by_p (enum cdi_direction, basic_block, basic_block);
985 extern int get_dominated_by (enum cdi_direction, basic_block, basic_block **);
986 extern unsigned get_dominated_by_region (enum cdi_direction, basic_block *,
987 unsigned, basic_block *);
988 extern void add_to_dominance_info (enum cdi_direction, basic_block);
989 extern void delete_from_dominance_info (enum cdi_direction, basic_block);
990 basic_block recount_dominator (enum cdi_direction, basic_block);
991 extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
992 basic_block);
993 extern void iterate_fix_dominators (enum cdi_direction, basic_block *, int);
994 extern void verify_dominators (enum cdi_direction);
995 extern basic_block first_dom_son (enum cdi_direction, basic_block);
996 extern basic_block next_dom_son (enum cdi_direction, basic_block);
997 extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
998 extern void break_superblocks (void);
999 extern void check_bb_profile (basic_block, FILE *);
1000 extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
1001 extern void init_rtl_bb_info (basic_block);
1002
1003 extern void initialize_original_copy_tables (void);
1004 extern void free_original_copy_tables (void);
1005 extern void set_bb_original (basic_block, basic_block);
1006 extern basic_block get_bb_original (basic_block);
1007 extern void set_bb_copy (basic_block, basic_block);
1008 extern basic_block get_bb_copy (basic_block);
1009
1010 #include "cfghooks.h"
1011
1012 /* In struct-equiv.c */
1013
1014 /* Constants used to size arrays in struct equiv_info (currently only one).
1015 When these limits are exceeded, struct_equiv returns zero.
1016 The maximum number of pseudo registers that are different in the two blocks,
1017 but appear in equivalent places and are dead at the end (or where one of
1018 a pair is dead at the end). */
1019 #define STRUCT_EQUIV_MAX_LOCAL 16
1020 /* The maximum number of references to an input register that struct_equiv
1021 can handle. */
1022
1023 /* Structure used to track state during struct_equiv that can be rolled
1024 back when we find we can't match an insn, or if we want to match part
1025 of it in a different way.
1026 This information pertains to the pair of partial blocks that has been
1027 matched so far. Since this pair is structurally equivalent, this is
1028 conceptually just one partial block expressed in two potentially
1029 different ways. */
1030 struct struct_equiv_checkpoint
1031 {
1032 int ninsns; /* Insns are matched so far. */
1033 int local_count; /* Number of block-local registers. */
1034 int input_count; /* Number of inputs to the block. */
1035
1036 /* X_START and Y_START are the first insns (in insn stream order)
1037 of the partial blocks that have been considered for matching so far.
1038 Since we are scanning backwards, they are also the instructions that
1039 are currently considered - or the last ones that have been considered -
1040 for matching (Unless we tracked back to these because a preceding
1041 instruction failed to match). */
1042 rtx x_start, y_start;
1043
1044 /* INPUT_VALID indicates if we have actually set up X_INPUT / Y_INPUT
1045 during the current pass; we keep X_INPUT / Y_INPUT around between passes
1046 so that we can match REG_EQUAL / REG_EQUIV notes referring to these. */
1047 bool input_valid;
1048
1049 /* Some information would be expensive to exactly checkpoint, so we
1050 merely increment VERSION any time information about local
1051 registers, inputs and/or register liveness changes. When backtracking,
1052 it is decremented for changes that can be undone, and if a discrepancy
1053 remains, NEED_RERUN in the relevant struct equiv_info is set to indicate
1054 that a new pass should be made over the entire block match to get
1055 accurate register information. */
1056 int version;
1057 };
1058
1059 /* A struct equiv_info is used to pass information to struct_equiv and
1060 to gather state while two basic blocks are checked for structural
1061 equivalence. */
1062
1063 struct equiv_info
1064 {
1065 /* Fields set up by the caller to struct_equiv_block_eq */
1066
1067 basic_block x_block, y_block; /* The two blocks being matched. */
1068
1069 /* MODE carries the mode bits from cleanup_cfg if we are called from
1070 try_crossjump_to_edge, and additionally it carries the
1071 STRUCT_EQUIV_* bits described above. */
1072 int mode;
1073
1074 /* INPUT_COST is the cost that adding an extra input to the matched blocks
1075 is supposed to have, and is taken into account when considering if the
1076 matched sequence should be extended backwards. input_cost < 0 means
1077 don't accept any inputs at all. */
1078 int input_cost;
1079
1080
1081 /* Fields to track state inside of struct_equiv_block_eq. Some of these
1082 are also outputs. */
1083
1084 /* X_INPUT and Y_INPUT are used by struct_equiv to record a register that
1085 is used as an input parameter, i.e. where different registers are used
1086 as sources. This is only used for a register that is live at the end
1087 of the blocks, or in some identical code at the end of the blocks;
1088 Inputs that are dead at the end go into X_LOCAL / Y_LOCAL. */
1089 rtx x_input, y_input;
1090 /* When a previous pass has identified a valid input, INPUT_REG is set
1091 by struct_equiv_block_eq, and it is henceforth replaced in X_BLOCK
1092 for the input. */
1093 rtx input_reg;
1094
1095 /* COMMON_LIVE keeps track of the registers which are currently live
1096 (as we scan backwards from the end) and have the same numbers in both
1097 blocks. N.B. a register that is in common_live is unsuitable to become
1098 a local reg. */
1099 regset common_live;
1100 /* Likewise, X_LOCAL_LIVE / Y_LOCAL_LIVE keep track of registers that are
1101 local to one of the blocks; these registers must not be accepted as
1102 identical when encountered in both blocks. */
1103 regset x_local_live, y_local_live;
1104
1105 /* EQUIV_USED indicates for which insns a REG_EQUAL or REG_EQUIV note is
1106 being used, to avoid having to backtrack in the next pass, so that we
1107 get accurate life info for this insn then. For each such insn,
1108 the bit with the number corresponding to the CUR.NINSNS value at the
1109 time of scanning is set. */
1110 bitmap equiv_used;
1111
1112 /* Current state that can be saved & restored easily. */
1113 struct struct_equiv_checkpoint cur;
1114 /* BEST_MATCH is used to store the best match so far, weighing the
1115 cost of matched insns COSTS_N_INSNS (CUR.NINSNS) against the cost
1116 CUR.INPUT_COUNT * INPUT_COST of setting up the inputs. */
1117 struct struct_equiv_checkpoint best_match;
1118 /* If a checkpoint restore failed, or an input conflict newly arises,
1119 NEED_RERUN is set. This has to be tested by the caller to re-run
1120 the comparison if the match appears otherwise sound. The state kept in
1121 x_start, y_start, equiv_used and check_input_conflict ensures that
1122 we won't loop indefinitely. */
1123 bool need_rerun;
1124 /* If there is indication of an input conflict at the end,
1125 CHECK_INPUT_CONFLICT is set so that we'll check for input conflicts
1126 for each insn in the next pass. This is needed so that we won't discard
1127 a partial match if there is a longer match that has to be abandoned due
1128 to an input conflict. */
1129 bool check_input_conflict;
1130 /* HAD_INPUT_CONFLICT is set if CHECK_INPUT_CONFLICT was already set and we
1131 have passed a point where there were multiple dying inputs. This helps
1132 us decide if we should set check_input_conflict for the next pass. */
1133 bool had_input_conflict;
1134
1135 /* LIVE_UPDATE controls if we want to change any life info at all. We
1136 set it to false during REG_EQUAL / REG_EUQIV note comparison of the final
1137 pass so that we don't introduce new registers just for the note; if we
1138 can't match the notes without the current register information, we drop
1139 them. */
1140 bool live_update;
1141
1142 /* X_LOCAL and Y_LOCAL are used to gather register numbers of register pairs
1143 that are local to X_BLOCK and Y_BLOCK, with CUR.LOCAL_COUNT being the index
1144 to the next free entry. */
1145 rtx x_local[STRUCT_EQUIV_MAX_LOCAL], y_local[STRUCT_EQUIV_MAX_LOCAL];
1146 /* LOCAL_RVALUE is nonzero if the corresponding X_LOCAL / Y_LOCAL entry
1147 was a source operand (including STRICT_LOW_PART) for the last invocation
1148 of struct_equiv mentioning it, zero if it was a destination-only operand.
1149 Since we are scanning backwards, this means the register is input/local
1150 for the (partial) block scanned so far. */
1151 bool local_rvalue[STRUCT_EQUIV_MAX_LOCAL];
1152
1153
1154 /* Additional fields that are computed for the convenience of the caller. */
1155
1156 /* DYING_INPUTS is set to the number of local registers that turn out
1157 to be inputs to the (possibly partial) block. */
1158 int dying_inputs;
1159 /* X_END and Y_END are the last insns in X_BLOCK and Y_BLOCK, respectively,
1160 that are being compared. A final jump insn will not be included. */
1161 rtx x_end, y_end;
1162
1163 /* If we are matching tablejumps, X_LABEL in X_BLOCK corresponds to
1164 Y_LABEL in Y_BLOCK. */
1165 rtx x_label, y_label;
1166
1167 };
1168
1169 extern bool insns_match_p (rtx, rtx, struct equiv_info *);
1170 extern int struct_equiv_block_eq (int, struct equiv_info *);
1171 extern bool struct_equiv_init (int, struct equiv_info *);
1172 extern bool rtx_equiv_p (rtx *, rtx, int, struct equiv_info *);
1173
1174 /* In cfgrtl.c */
1175 extern bool condjump_equiv_p (struct equiv_info *, bool);
1176
1177 #endif /* GCC_BASIC_BLOCK_H */