system.h (USE_MAPPED_LOCATION): Poison.
[gcc.git] / gcc / basic-block.h
1 /* Define control and data flow tables, and regsets.
2 Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_BASIC_BLOCK_H
22 #define GCC_BASIC_BLOCK_H
23
24 #include "bitmap.h"
25 #include "sbitmap.h"
26 #include "varray.h"
27 #include "partition.h"
28 #include "hard-reg-set.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "function.h"
32
33 /* Head of register set linked list. */
34 typedef bitmap_head regset_head;
35
36 /* A pointer to a regset_head. */
37 typedef bitmap regset;
38
39 /* Allocate a register set with oballoc. */
40 #define ALLOC_REG_SET(OBSTACK) BITMAP_ALLOC (OBSTACK)
41
42 /* Do any cleanup needed on a regset when it is no longer used. */
43 #define FREE_REG_SET(REGSET) BITMAP_FREE (REGSET)
44
45 /* Initialize a new regset. */
46 #define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, &reg_obstack)
47
48 /* Clear a register set by freeing up the linked list. */
49 #define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
50
51 /* Copy a register set to another register set. */
52 #define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
53
54 /* Compare two register sets. */
55 #define REG_SET_EQUAL_P(A, B) bitmap_equal_p (A, B)
56
57 /* `and' a register set with a second register set. */
58 #define AND_REG_SET(TO, FROM) bitmap_and_into (TO, FROM)
59
60 /* `and' the complement of a register set with a register set. */
61 #define AND_COMPL_REG_SET(TO, FROM) bitmap_and_compl_into (TO, FROM)
62
63 /* Inclusive or a register set with a second register set. */
64 #define IOR_REG_SET(TO, FROM) bitmap_ior_into (TO, FROM)
65
66 /* Exclusive or a register set with a second register set. */
67 #define XOR_REG_SET(TO, FROM) bitmap_xor_into (TO, FROM)
68
69 /* Or into TO the register set FROM1 `and'ed with the complement of FROM2. */
70 #define IOR_AND_COMPL_REG_SET(TO, FROM1, FROM2) \
71 bitmap_ior_and_compl_into (TO, FROM1, FROM2)
72
73 /* Clear a single register in a register set. */
74 #define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
75
76 /* Set a single register in a register set. */
77 #define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
78
79 /* Return true if a register is set in a register set. */
80 #define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
81
82 /* Copy the hard registers in a register set to the hard register set. */
83 extern void reg_set_to_hard_reg_set (HARD_REG_SET *, const_bitmap);
84 #define REG_SET_TO_HARD_REG_SET(TO, FROM) \
85 do { \
86 CLEAR_HARD_REG_SET (TO); \
87 reg_set_to_hard_reg_set (&TO, FROM); \
88 } while (0)
89
90 typedef bitmap_iterator reg_set_iterator;
91
92 /* Loop over all registers in REGSET, starting with MIN, setting REGNUM to the
93 register number and executing CODE for all registers that are set. */
94 #define EXECUTE_IF_SET_IN_REG_SET(REGSET, MIN, REGNUM, RSI) \
95 EXECUTE_IF_SET_IN_BITMAP (REGSET, MIN, REGNUM, RSI)
96
97 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
98 REGNUM to the register number and executing CODE for all registers that are
99 set in the first regset and not set in the second. */
100 #define EXECUTE_IF_AND_COMPL_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
101 EXECUTE_IF_AND_COMPL_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)
102
103 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
104 REGNUM to the register number and executing CODE for all registers that are
105 set in both regsets. */
106 #define EXECUTE_IF_AND_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
107 EXECUTE_IF_AND_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI) \
108
109 /* Type we use to hold basic block counters. Should be at least
110 64bit. Although a counter cannot be negative, we use a signed
111 type, because erroneous negative counts can be generated when the
112 flow graph is manipulated by various optimizations. A signed type
113 makes those easy to detect. */
114 typedef HOST_WIDEST_INT gcov_type;
115
116 /* Control flow edge information. */
117 struct edge_def GTY(())
118 {
119 /* The two blocks at the ends of the edge. */
120 struct basic_block_def *src;
121 struct basic_block_def *dest;
122
123 /* Instructions queued on the edge. */
124 union edge_def_insns {
125 tree GTY ((tag ("true"))) t;
126 rtx GTY ((tag ("false"))) r;
127 } GTY ((desc ("current_ir_type () == IR_GIMPLE"))) insns;
128
129 /* Auxiliary info specific to a pass. */
130 PTR GTY ((skip (""))) aux;
131
132 /* Location of any goto implicit in the edge, during tree-ssa. */
133 location_t goto_locus;
134
135 /* The index number corresponding to this edge in the edge vector
136 dest->preds. */
137 unsigned int dest_idx;
138
139 int flags; /* see EDGE_* below */
140 int probability; /* biased by REG_BR_PROB_BASE */
141 gcov_type count; /* Expected number of executions calculated
142 in profile.c */
143 };
144
145 typedef struct edge_def *edge;
146 typedef const struct edge_def *const_edge;
147 DEF_VEC_P(edge);
148 DEF_VEC_ALLOC_P(edge,gc);
149 DEF_VEC_ALLOC_P(edge,heap);
150
151 #define EDGE_FALLTHRU 1 /* 'Straight line' flow */
152 #define EDGE_ABNORMAL 2 /* Strange flow, like computed
153 label, or eh */
154 #define EDGE_ABNORMAL_CALL 4 /* Call with abnormal exit
155 like an exception, or sibcall */
156 #define EDGE_EH 8 /* Exception throw */
157 #define EDGE_FAKE 16 /* Not a real edge (profile.c) */
158 #define EDGE_DFS_BACK 32 /* A backwards edge */
159 #define EDGE_CAN_FALLTHRU 64 /* Candidate for straight line
160 flow. */
161 #define EDGE_IRREDUCIBLE_LOOP 128 /* Part of irreducible loop. */
162 #define EDGE_SIBCALL 256 /* Edge from sibcall to exit. */
163 #define EDGE_LOOP_EXIT 512 /* Exit of a loop. */
164 #define EDGE_TRUE_VALUE 1024 /* Edge taken when controlling
165 predicate is nonzero. */
166 #define EDGE_FALSE_VALUE 2048 /* Edge taken when controlling
167 predicate is zero. */
168 #define EDGE_EXECUTABLE 4096 /* Edge is executable. Only
169 valid during SSA-CCP. */
170 #define EDGE_CROSSING 8192 /* Edge crosses between hot
171 and cold sections, when we
172 do partitioning. */
173 #define EDGE_ALL_FLAGS 16383
174
175 #define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
176
177 /* Counter summary from the last set of coverage counts read by
178 profile.c. */
179 extern const struct gcov_ctr_summary *profile_info;
180
181 /* Declared in cfgloop.h. */
182 struct loop;
183
184 /* Declared in tree-flow.h. */
185 struct edge_prediction;
186 struct rtl_bb_info;
187
188 /* A basic block is a sequence of instructions with only entry and
189 only one exit. If any one of the instructions are executed, they
190 will all be executed, and in sequence from first to last.
191
192 There may be COND_EXEC instructions in the basic block. The
193 COND_EXEC *instructions* will be executed -- but if the condition
194 is false the conditionally executed *expressions* will of course
195 not be executed. We don't consider the conditionally executed
196 expression (which might have side-effects) to be in a separate
197 basic block because the program counter will always be at the same
198 location after the COND_EXEC instruction, regardless of whether the
199 condition is true or not.
200
201 Basic blocks need not start with a label nor end with a jump insn.
202 For example, a previous basic block may just "conditionally fall"
203 into the succeeding basic block, and the last basic block need not
204 end with a jump insn. Block 0 is a descendant of the entry block.
205
206 A basic block beginning with two labels cannot have notes between
207 the labels.
208
209 Data for jump tables are stored in jump_insns that occur in no
210 basic block even though these insns can follow or precede insns in
211 basic blocks. */
212
213 /* Basic block information indexed by block number. */
214 struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
215 {
216 /* The edges into and out of the block. */
217 VEC(edge,gc) *preds;
218 VEC(edge,gc) *succs;
219
220 /* Auxiliary info specific to a pass. */
221 PTR GTY ((skip (""))) aux;
222
223 /* Innermost loop containing the block. */
224 struct loop *loop_father;
225
226 /* The dominance and postdominance information node. */
227 struct et_node * GTY ((skip (""))) dom[2];
228
229 /* Previous and next blocks in the chain. */
230 struct basic_block_def *prev_bb;
231 struct basic_block_def *next_bb;
232
233 union basic_block_il_dependent {
234 struct tree_bb_info * GTY ((tag ("0"))) tree;
235 struct rtl_bb_info * GTY ((tag ("1"))) rtl;
236 } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
237
238 /* Expected number of executions: calculated in profile.c. */
239 gcov_type count;
240
241 /* The index of this block. */
242 int index;
243
244 /* The loop depth of this block. */
245 int loop_depth;
246
247 /* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
248 int frequency;
249
250 /* Various flags. See BB_* below. */
251 int flags;
252 };
253
254 struct rtl_bb_info GTY(())
255 {
256 /* The first and last insns of the block. */
257 rtx head_;
258 rtx end_;
259
260 /* In CFGlayout mode points to insn notes/jumptables to be placed just before
261 and after the block. */
262 rtx header;
263 rtx footer;
264
265 /* This field is used by the bb-reorder and tracer passes. */
266 int visited;
267 };
268
269 struct tree_bb_info GTY(())
270 {
271 /* Pointers to the first and last trees of the block. */
272 tree stmt_list;
273
274 /* Chain of PHI nodes for this block. */
275 tree phi_nodes;
276 };
277
278 typedef struct basic_block_def *basic_block;
279 typedef const struct basic_block_def *const_basic_block;
280
281 DEF_VEC_P(basic_block);
282 DEF_VEC_ALLOC_P(basic_block,gc);
283 DEF_VEC_ALLOC_P(basic_block,heap);
284
285 #define BB_FREQ_MAX 10000
286
287 /* Masks for basic_block.flags.
288
289 BB_HOT_PARTITION and BB_COLD_PARTITION should be preserved throughout
290 the compilation, so they are never cleared.
291
292 All other flags may be cleared by clear_bb_flags(). It is generally
293 a bad idea to rely on any flags being up-to-date. */
294
295 enum bb_flags
296 {
297 /* Only set on blocks that have just been created by create_bb. */
298 BB_NEW = 1 << 0,
299
300 /* Set by find_unreachable_blocks. Do not rely on this being set in any
301 pass. */
302 BB_REACHABLE = 1 << 1,
303
304 /* Set for blocks in an irreducible loop by loop analysis. */
305 BB_IRREDUCIBLE_LOOP = 1 << 2,
306
307 /* Set on blocks that may actually not be single-entry single-exit block. */
308 BB_SUPERBLOCK = 1 << 3,
309
310 /* Set on basic blocks that the scheduler should not touch. This is used
311 by SMS to prevent other schedulers from messing with the loop schedule. */
312 BB_DISABLE_SCHEDULE = 1 << 4,
313
314 /* Set on blocks that should be put in a hot section. */
315 BB_HOT_PARTITION = 1 << 5,
316
317 /* Set on blocks that should be put in a cold section. */
318 BB_COLD_PARTITION = 1 << 6,
319
320 /* Set on block that was duplicated. */
321 BB_DUPLICATED = 1 << 7,
322
323 /* Set if the label at the top of this block is the target of a non-local goto. */
324 BB_NON_LOCAL_GOTO_TARGET = 1 << 8,
325
326 /* Set on blocks that are in RTL format. */
327 BB_RTL = 1 << 9 ,
328
329 /* Set on blocks that are forwarder blocks.
330 Only used in cfgcleanup.c. */
331 BB_FORWARDER_BLOCK = 1 << 10,
332
333 /* Set on blocks that cannot be threaded through.
334 Only used in cfgcleanup.c. */
335 BB_NONTHREADABLE_BLOCK = 1 << 11
336 };
337
338 /* Dummy flag for convenience in the hot/cold partitioning code. */
339 #define BB_UNPARTITIONED 0
340
341 /* Partitions, to be used when partitioning hot and cold basic blocks into
342 separate sections. */
343 #define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
344 #define BB_SET_PARTITION(bb, part) do { \
345 basic_block bb_ = (bb); \
346 bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION)) \
347 | (part)); \
348 } while (0)
349
350 #define BB_COPY_PARTITION(dstbb, srcbb) \
351 BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
352
353 /* State of dominance information. */
354
355 enum dom_state
356 {
357 DOM_NONE, /* Not computed at all. */
358 DOM_NO_FAST_QUERY, /* The data is OK, but the fast query data are not usable. */
359 DOM_OK /* Everything is ok. */
360 };
361
362 /* A structure to group all the per-function control flow graph data.
363 The x_* prefixing is necessary because otherwise references to the
364 fields of this struct are interpreted as the defines for backward
365 source compatibility following the definition of this struct. */
366 struct control_flow_graph GTY(())
367 {
368 /* Block pointers for the exit and entry of a function.
369 These are always the head and tail of the basic block list. */
370 basic_block x_entry_block_ptr;
371 basic_block x_exit_block_ptr;
372
373 /* Index by basic block number, get basic block struct info. */
374 VEC(basic_block,gc) *x_basic_block_info;
375
376 /* Number of basic blocks in this flow graph. */
377 int x_n_basic_blocks;
378
379 /* Number of edges in this flow graph. */
380 int x_n_edges;
381
382 /* The first free basic block number. */
383 int x_last_basic_block;
384
385 /* Mapping of labels to their associated blocks. At present
386 only used for the tree CFG. */
387 VEC(basic_block,gc) *x_label_to_block_map;
388
389 enum profile_status {
390 PROFILE_ABSENT,
391 PROFILE_GUESSED,
392 PROFILE_READ
393 } x_profile_status;
394
395 /* Whether the dominators and the postdominators are available. */
396 enum dom_state x_dom_computed[2];
397
398 /* Number of basic blocks in the dominance tree. */
399 unsigned x_n_bbs_in_dom_tree[2];
400 };
401
402 /* Defines for accessing the fields of the CFG structure for function FN. */
403 #define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_entry_block_ptr)
404 #define EXIT_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_exit_block_ptr)
405 #define basic_block_info_for_function(FN) ((FN)->cfg->x_basic_block_info)
406 #define n_basic_blocks_for_function(FN) ((FN)->cfg->x_n_basic_blocks)
407 #define n_edges_for_function(FN) ((FN)->cfg->x_n_edges)
408 #define last_basic_block_for_function(FN) ((FN)->cfg->x_last_basic_block)
409 #define label_to_block_map_for_function(FN) ((FN)->cfg->x_label_to_block_map)
410
411 #define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
412 (VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
413
414 /* Defines for textual backward source compatibility. */
415 #define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
416 #define EXIT_BLOCK_PTR (cfun->cfg->x_exit_block_ptr)
417 #define basic_block_info (cfun->cfg->x_basic_block_info)
418 #define n_basic_blocks (cfun->cfg->x_n_basic_blocks)
419 #define n_edges (cfun->cfg->x_n_edges)
420 #define last_basic_block (cfun->cfg->x_last_basic_block)
421 #define label_to_block_map (cfun->cfg->x_label_to_block_map)
422 #define profile_status (cfun->cfg->x_profile_status)
423
424 #define BASIC_BLOCK(N) (VEC_index (basic_block, basic_block_info, (N)))
425 #define SET_BASIC_BLOCK(N,BB) (VEC_replace (basic_block, basic_block_info, (N), (BB)))
426
427 /* For iterating over basic blocks. */
428 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
429 for (BB = FROM; BB != TO; BB = BB->DIR)
430
431 #define FOR_EACH_BB_FN(BB, FN) \
432 FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
433
434 #define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
435
436 #define FOR_EACH_BB_REVERSE_FN(BB, FN) \
437 FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
438
439 #define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
440
441 /* For iterating over insns in basic block. */
442 #define FOR_BB_INSNS(BB, INSN) \
443 for ((INSN) = BB_HEAD (BB); \
444 (INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
445 (INSN) = NEXT_INSN (INSN))
446
447 /* For iterating over insns in basic block when we might remove the
448 current insn. */
449 #define FOR_BB_INSNS_SAFE(BB, INSN, CURR) \
450 for ((INSN) = BB_HEAD (BB), (CURR) = (INSN) ? NEXT_INSN ((INSN)): NULL; \
451 (INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
452 (INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
453
454 #define FOR_BB_INSNS_REVERSE(BB, INSN) \
455 for ((INSN) = BB_END (BB); \
456 (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
457 (INSN) = PREV_INSN (INSN))
458
459 #define FOR_BB_INSNS_REVERSE_SAFE(BB, INSN, CURR) \
460 for ((INSN) = BB_END (BB),(CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL; \
461 (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
462 (INSN) = (CURR), (CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL)
463
464 /* Cycles through _all_ basic blocks, even the fake ones (entry and
465 exit block). */
466
467 #define FOR_ALL_BB(BB) \
468 for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
469
470 #define FOR_ALL_BB_FN(BB, FN) \
471 for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
472
473 extern bitmap_obstack reg_obstack;
474
475 \f
476 /* Stuff for recording basic block info. */
477
478 #define BB_HEAD(B) (B)->il.rtl->head_
479 #define BB_END(B) (B)->il.rtl->end_
480
481 /* Special block numbers [markers] for entry and exit. */
482 #define ENTRY_BLOCK (0)
483 #define EXIT_BLOCK (1)
484
485 /* The two blocks that are always in the cfg. */
486 #define NUM_FIXED_BLOCKS (2)
487
488
489 #define BLOCK_NUM(INSN) (BLOCK_FOR_INSN (INSN)->index + 0)
490 #define set_block_for_insn(INSN, BB) (BLOCK_FOR_INSN (INSN) = BB)
491
492 extern void compute_bb_for_insn (void);
493 extern unsigned int free_bb_for_insn (void);
494 extern void update_bb_for_insn (basic_block);
495
496 extern void insert_insn_on_edge (rtx, edge);
497 basic_block split_edge_and_insert (edge, rtx);
498
499 extern void commit_edge_insertions (void);
500
501 extern void remove_fake_edges (void);
502 extern void remove_fake_exit_edges (void);
503 extern void add_noreturn_fake_exit_edges (void);
504 extern void connect_infinite_loops_to_exit (void);
505 extern edge unchecked_make_edge (basic_block, basic_block, int);
506 extern edge cached_make_edge (sbitmap, basic_block, basic_block, int);
507 extern edge make_edge (basic_block, basic_block, int);
508 extern edge make_single_succ_edge (basic_block, basic_block, int);
509 extern void remove_edge_raw (edge);
510 extern void redirect_edge_succ (edge, basic_block);
511 extern edge redirect_edge_succ_nodup (edge, basic_block);
512 extern void redirect_edge_pred (edge, basic_block);
513 extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
514 extern void clear_bb_flags (void);
515 extern int post_order_compute (int *, bool, bool);
516 extern int inverted_post_order_compute (int *);
517 extern int pre_and_rev_post_order_compute (int *, int *, bool);
518 extern int dfs_enumerate_from (basic_block, int,
519 bool (*)(const_basic_block, const void *),
520 basic_block *, int, const void *);
521 extern void compute_dominance_frontiers (bitmap *);
522 extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
523 extern void dump_edge_info (FILE *, edge, int);
524 extern void brief_dump_cfg (FILE *);
525 extern void clear_edges (void);
526 extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
527 extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
528 gcov_type);
529
530 /* Structure to group all of the information to process IF-THEN and
531 IF-THEN-ELSE blocks for the conditional execution support. This
532 needs to be in a public file in case the IFCVT macros call
533 functions passing the ce_if_block data structure. */
534
535 typedef struct ce_if_block
536 {
537 basic_block test_bb; /* First test block. */
538 basic_block then_bb; /* THEN block. */
539 basic_block else_bb; /* ELSE block or NULL. */
540 basic_block join_bb; /* Join THEN/ELSE blocks. */
541 basic_block last_test_bb; /* Last bb to hold && or || tests. */
542 int num_multiple_test_blocks; /* # of && and || basic blocks. */
543 int num_and_and_blocks; /* # of && blocks. */
544 int num_or_or_blocks; /* # of || blocks. */
545 int num_multiple_test_insns; /* # of insns in && and || blocks. */
546 int and_and_p; /* Complex test is &&. */
547 int num_then_insns; /* # of insns in THEN block. */
548 int num_else_insns; /* # of insns in ELSE block. */
549 int pass; /* Pass number. */
550
551 #ifdef IFCVT_EXTRA_FIELDS
552 IFCVT_EXTRA_FIELDS /* Any machine dependent fields. */
553 #endif
554
555 } ce_if_block_t;
556
557 /* This structure maintains an edge list vector. */
558 struct edge_list
559 {
560 int num_blocks;
561 int num_edges;
562 edge *index_to_edge;
563 };
564
565 /* The base value for branch probability notes and edge probabilities. */
566 #define REG_BR_PROB_BASE 10000
567
568 /* This is the value which indicates no edge is present. */
569 #define EDGE_INDEX_NO_EDGE -1
570
571 /* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
572 if there is no edge between the 2 basic blocks. */
573 #define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
574
575 /* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
576 block which is either the pred or succ end of the indexed edge. */
577 #define INDEX_EDGE_PRED_BB(el, index) ((el)->index_to_edge[(index)]->src)
578 #define INDEX_EDGE_SUCC_BB(el, index) ((el)->index_to_edge[(index)]->dest)
579
580 /* INDEX_EDGE returns a pointer to the edge. */
581 #define INDEX_EDGE(el, index) ((el)->index_to_edge[(index)])
582
583 /* Number of edges in the compressed edge list. */
584 #define NUM_EDGES(el) ((el)->num_edges)
585
586 /* BB is assumed to contain conditional jump. Return the fallthru edge. */
587 #define FALLTHRU_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
588 ? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
589
590 /* BB is assumed to contain conditional jump. Return the branch edge. */
591 #define BRANCH_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
592 ? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
593
594 /* Return expected execution frequency of the edge E. */
595 #define EDGE_FREQUENCY(e) (((e)->src->frequency \
596 * (e)->probability \
597 + REG_BR_PROB_BASE / 2) \
598 / REG_BR_PROB_BASE)
599
600 /* Return nonzero if edge is critical. */
601 #define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
602 && EDGE_COUNT ((e)->dest->preds) >= 2)
603
604 #define EDGE_COUNT(ev) VEC_length (edge, (ev))
605 #define EDGE_I(ev,i) VEC_index (edge, (ev), (i))
606 #define EDGE_PRED(bb,i) VEC_index (edge, (bb)->preds, (i))
607 #define EDGE_SUCC(bb,i) VEC_index (edge, (bb)->succs, (i))
608
609 /* Returns true if BB has precisely one successor. */
610
611 static inline bool
612 single_succ_p (const_basic_block bb)
613 {
614 return EDGE_COUNT (bb->succs) == 1;
615 }
616
617 /* Returns true if BB has precisely one predecessor. */
618
619 static inline bool
620 single_pred_p (const_basic_block bb)
621 {
622 return EDGE_COUNT (bb->preds) == 1;
623 }
624
625 /* Returns the single successor edge of basic block BB. Aborts if
626 BB does not have exactly one successor. */
627
628 static inline edge
629 single_succ_edge (const_basic_block bb)
630 {
631 gcc_assert (single_succ_p (bb));
632 return EDGE_SUCC (bb, 0);
633 }
634
635 /* Returns the single predecessor edge of basic block BB. Aborts
636 if BB does not have exactly one predecessor. */
637
638 static inline edge
639 single_pred_edge (const_basic_block bb)
640 {
641 gcc_assert (single_pred_p (bb));
642 return EDGE_PRED (bb, 0);
643 }
644
645 /* Returns the single successor block of basic block BB. Aborts
646 if BB does not have exactly one successor. */
647
648 static inline basic_block
649 single_succ (const_basic_block bb)
650 {
651 return single_succ_edge (bb)->dest;
652 }
653
654 /* Returns the single predecessor block of basic block BB. Aborts
655 if BB does not have exactly one predecessor.*/
656
657 static inline basic_block
658 single_pred (const_basic_block bb)
659 {
660 return single_pred_edge (bb)->src;
661 }
662
663 /* Iterator object for edges. */
664
665 typedef struct {
666 unsigned index;
667 VEC(edge,gc) **container;
668 } edge_iterator;
669
670 static inline VEC(edge,gc) *
671 ei_container (edge_iterator i)
672 {
673 gcc_assert (i.container);
674 return *i.container;
675 }
676
677 #define ei_start(iter) ei_start_1 (&(iter))
678 #define ei_last(iter) ei_last_1 (&(iter))
679
680 /* Return an iterator pointing to the start of an edge vector. */
681 static inline edge_iterator
682 ei_start_1 (VEC(edge,gc) **ev)
683 {
684 edge_iterator i;
685
686 i.index = 0;
687 i.container = ev;
688
689 return i;
690 }
691
692 /* Return an iterator pointing to the last element of an edge
693 vector. */
694 static inline edge_iterator
695 ei_last_1 (VEC(edge,gc) **ev)
696 {
697 edge_iterator i;
698
699 i.index = EDGE_COUNT (*ev) - 1;
700 i.container = ev;
701
702 return i;
703 }
704
705 /* Is the iterator `i' at the end of the sequence? */
706 static inline bool
707 ei_end_p (edge_iterator i)
708 {
709 return (i.index == EDGE_COUNT (ei_container (i)));
710 }
711
712 /* Is the iterator `i' at one position before the end of the
713 sequence? */
714 static inline bool
715 ei_one_before_end_p (edge_iterator i)
716 {
717 return (i.index + 1 == EDGE_COUNT (ei_container (i)));
718 }
719
720 /* Advance the iterator to the next element. */
721 static inline void
722 ei_next (edge_iterator *i)
723 {
724 gcc_assert (i->index < EDGE_COUNT (ei_container (*i)));
725 i->index++;
726 }
727
728 /* Move the iterator to the previous element. */
729 static inline void
730 ei_prev (edge_iterator *i)
731 {
732 gcc_assert (i->index > 0);
733 i->index--;
734 }
735
736 /* Return the edge pointed to by the iterator `i'. */
737 static inline edge
738 ei_edge (edge_iterator i)
739 {
740 return EDGE_I (ei_container (i), i.index);
741 }
742
743 /* Return an edge pointed to by the iterator. Do it safely so that
744 NULL is returned when the iterator is pointing at the end of the
745 sequence. */
746 static inline edge
747 ei_safe_edge (edge_iterator i)
748 {
749 return !ei_end_p (i) ? ei_edge (i) : NULL;
750 }
751
752 /* Return 1 if we should continue to iterate. Return 0 otherwise.
753 *Edge P is set to the next edge if we are to continue to iterate
754 and NULL otherwise. */
755
756 static inline bool
757 ei_cond (edge_iterator ei, edge *p)
758 {
759 if (!ei_end_p (ei))
760 {
761 *p = ei_edge (ei);
762 return 1;
763 }
764 else
765 {
766 *p = NULL;
767 return 0;
768 }
769 }
770
771 /* This macro serves as a convenient way to iterate each edge in a
772 vector of predecessor or successor edges. It must not be used when
773 an element might be removed during the traversal, otherwise
774 elements will be missed. Instead, use a for-loop like that shown
775 in the following pseudo-code:
776
777 FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
778 {
779 IF (e != taken_edge)
780 remove_edge (e);
781 ELSE
782 ei_next (&ei);
783 }
784 */
785
786 #define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC) \
787 for ((ITER) = ei_start ((EDGE_VEC)); \
788 ei_cond ((ITER), &(EDGE)); \
789 ei_next (&(ITER)))
790
791 struct edge_list * create_edge_list (void);
792 void free_edge_list (struct edge_list *);
793 void print_edge_list (FILE *, struct edge_list *);
794 void verify_edge_list (FILE *, struct edge_list *);
795 int find_edge_index (struct edge_list *, basic_block, basic_block);
796 edge find_edge (basic_block, basic_block);
797
798 #define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
799 except for edge forwarding */
800 #define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
801 #define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
802 to care REG_DEAD notes. */
803 #define CLEANUP_THREADING 8 /* Do jump threading. */
804 #define CLEANUP_NO_INSN_DEL 16 /* Do not try to delete trivially dead
805 insns. */
806 #define CLEANUP_CFGLAYOUT 32 /* Do cleanup in cfglayout mode. */
807
808 /* The following are ORed in on top of the CLEANUP* flags in calls to
809 struct_equiv_block_eq. */
810 #define STRUCT_EQUIV_START 64 /* Initializes the search range. */
811 #define STRUCT_EQUIV_RERUN 128 /* Rerun to find register use in
812 found equivalence. */
813 #define STRUCT_EQUIV_FINAL 256 /* Make any changes necessary to get
814 actual equivalence. */
815 #define STRUCT_EQUIV_NEED_FULL_BLOCK 512 /* struct_equiv_block_eq is required
816 to match only full blocks */
817 #define STRUCT_EQUIV_MATCH_JUMPS 1024 /* Also include the jumps at the end of the block in the comparison. */
818
819 /* In lcm.c */
820 extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
821 sbitmap *, sbitmap *, sbitmap **,
822 sbitmap **);
823 extern struct edge_list *pre_edge_rev_lcm (int, sbitmap *,
824 sbitmap *, sbitmap *,
825 sbitmap *, sbitmap **,
826 sbitmap **);
827 extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
828
829 /* In predict.c */
830 extern bool maybe_hot_bb_p (const_basic_block);
831 extern bool probably_cold_bb_p (const_basic_block);
832 extern bool probably_never_executed_bb_p (const_basic_block);
833 extern bool tree_predicted_by_p (const_basic_block, enum br_predictor);
834 extern bool rtl_predicted_by_p (const_basic_block, enum br_predictor);
835 extern void tree_predict_edge (edge, enum br_predictor, int);
836 extern void rtl_predict_edge (edge, enum br_predictor, int);
837 extern void predict_edge_def (edge, enum br_predictor, enum prediction);
838 extern void guess_outgoing_edge_probabilities (basic_block);
839 extern void remove_predictions_associated_with_edge (edge);
840 extern bool edge_probability_reliable_p (const_edge);
841 extern bool br_prob_note_reliable_p (const_rtx);
842
843 /* In cfg.c */
844 extern void dump_regset (regset, FILE *);
845 extern void debug_regset (regset);
846 extern void init_flow (void);
847 extern void debug_bb (basic_block);
848 extern basic_block debug_bb_n (int);
849 extern void dump_regset (regset, FILE *);
850 extern void debug_regset (regset);
851 extern void expunge_block (basic_block);
852 extern void link_block (basic_block, basic_block);
853 extern void unlink_block (basic_block);
854 extern void compact_blocks (void);
855 extern basic_block alloc_block (void);
856 extern void alloc_aux_for_block (basic_block, int);
857 extern void alloc_aux_for_blocks (int);
858 extern void clear_aux_for_blocks (void);
859 extern void free_aux_for_blocks (void);
860 extern void alloc_aux_for_edge (edge, int);
861 extern void alloc_aux_for_edges (int);
862 extern void clear_aux_for_edges (void);
863 extern void free_aux_for_edges (void);
864
865 /* In cfganal.c */
866 extern void find_unreachable_blocks (void);
867 extern bool forwarder_block_p (const_basic_block);
868 extern bool can_fallthru (basic_block, basic_block);
869 extern bool could_fall_through (basic_block, basic_block);
870 extern void flow_nodes_print (const char *, const_sbitmap, FILE *);
871 extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
872
873 /* In cfgrtl.c */
874 extern basic_block force_nonfallthru (edge);
875 extern rtx block_label (basic_block);
876 extern bool purge_all_dead_edges (void);
877 extern bool purge_dead_edges (basic_block);
878
879 /* In cfgbuild.c. */
880 extern void find_many_sub_basic_blocks (sbitmap);
881 extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
882 extern void find_basic_blocks (rtx);
883
884 /* In cfgcleanup.c. */
885 extern bool cleanup_cfg (int);
886 extern bool delete_unreachable_blocks (void);
887
888 extern bool mark_dfs_back_edges (void);
889 extern void set_edge_can_fallthru_flag (void);
890 extern void update_br_prob_note (basic_block);
891 extern void fixup_abnormal_edges (void);
892 extern bool inside_basic_block_p (const_rtx);
893 extern bool control_flow_insn_p (const_rtx);
894 extern rtx get_last_bb_insn (basic_block);
895
896 /* In bb-reorder.c */
897 extern void reorder_basic_blocks (void);
898
899 /* In dominance.c */
900
901 enum cdi_direction
902 {
903 CDI_DOMINATORS = 1,
904 CDI_POST_DOMINATORS = 2
905 };
906
907 extern enum dom_state dom_info_state (enum cdi_direction);
908 extern void set_dom_info_availability (enum cdi_direction, enum dom_state);
909 extern bool dom_info_available_p (enum cdi_direction);
910 extern void calculate_dominance_info (enum cdi_direction);
911 extern void free_dominance_info (enum cdi_direction);
912 extern basic_block nearest_common_dominator (enum cdi_direction,
913 basic_block, basic_block);
914 extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
915 bitmap);
916 extern void set_immediate_dominator (enum cdi_direction, basic_block,
917 basic_block);
918 extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
919 extern bool dominated_by_p (enum cdi_direction, const_basic_block, const_basic_block);
920 extern VEC (basic_block, heap) *get_dominated_by (enum cdi_direction, basic_block);
921 extern VEC (basic_block, heap) *get_dominated_by_region (enum cdi_direction,
922 basic_block *,
923 unsigned);
924 extern void add_to_dominance_info (enum cdi_direction, basic_block);
925 extern void delete_from_dominance_info (enum cdi_direction, basic_block);
926 basic_block recompute_dominator (enum cdi_direction, basic_block);
927 extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
928 basic_block);
929 extern void iterate_fix_dominators (enum cdi_direction,
930 VEC (basic_block, heap) *, bool);
931 extern void verify_dominators (enum cdi_direction);
932 extern basic_block first_dom_son (enum cdi_direction, basic_block);
933 extern basic_block next_dom_son (enum cdi_direction, basic_block);
934 unsigned bb_dom_dfs_in (enum cdi_direction, basic_block);
935 unsigned bb_dom_dfs_out (enum cdi_direction, basic_block);
936
937 extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
938 extern void break_superblocks (void);
939 extern void relink_block_chain (bool);
940 extern void check_bb_profile (basic_block, FILE *);
941 extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
942 extern void init_rtl_bb_info (basic_block);
943
944 extern void initialize_original_copy_tables (void);
945 extern void free_original_copy_tables (void);
946 extern void set_bb_original (basic_block, basic_block);
947 extern basic_block get_bb_original (basic_block);
948 extern void set_bb_copy (basic_block, basic_block);
949 extern basic_block get_bb_copy (basic_block);
950 void set_loop_copy (struct loop *, struct loop *);
951 struct loop *get_loop_copy (struct loop *);
952
953
954 extern rtx insert_insn_end_bb_new (rtx, basic_block);
955
956 #include "cfghooks.h"
957
958 /* In struct-equiv.c */
959
960 /* Constants used to size arrays in struct equiv_info (currently only one).
961 When these limits are exceeded, struct_equiv returns zero.
962 The maximum number of pseudo registers that are different in the two blocks,
963 but appear in equivalent places and are dead at the end (or where one of
964 a pair is dead at the end). */
965 #define STRUCT_EQUIV_MAX_LOCAL 16
966 /* The maximum number of references to an input register that struct_equiv
967 can handle. */
968
969 /* Structure used to track state during struct_equiv that can be rolled
970 back when we find we can't match an insn, or if we want to match part
971 of it in a different way.
972 This information pertains to the pair of partial blocks that has been
973 matched so far. Since this pair is structurally equivalent, this is
974 conceptually just one partial block expressed in two potentially
975 different ways. */
976 struct struct_equiv_checkpoint
977 {
978 int ninsns; /* Insns are matched so far. */
979 int local_count; /* Number of block-local registers. */
980 int input_count; /* Number of inputs to the block. */
981
982 /* X_START and Y_START are the first insns (in insn stream order)
983 of the partial blocks that have been considered for matching so far.
984 Since we are scanning backwards, they are also the instructions that
985 are currently considered - or the last ones that have been considered -
986 for matching (Unless we tracked back to these because a preceding
987 instruction failed to match). */
988 rtx x_start, y_start;
989
990 /* INPUT_VALID indicates if we have actually set up X_INPUT / Y_INPUT
991 during the current pass; we keep X_INPUT / Y_INPUT around between passes
992 so that we can match REG_EQUAL / REG_EQUIV notes referring to these. */
993 bool input_valid;
994
995 /* Some information would be expensive to exactly checkpoint, so we
996 merely increment VERSION any time information about local
997 registers, inputs and/or register liveness changes. When backtracking,
998 it is decremented for changes that can be undone, and if a discrepancy
999 remains, NEED_RERUN in the relevant struct equiv_info is set to indicate
1000 that a new pass should be made over the entire block match to get
1001 accurate register information. */
1002 int version;
1003 };
1004
1005 /* A struct equiv_info is used to pass information to struct_equiv and
1006 to gather state while two basic blocks are checked for structural
1007 equivalence. */
1008
1009 struct equiv_info
1010 {
1011 /* Fields set up by the caller to struct_equiv_block_eq */
1012
1013 basic_block x_block, y_block; /* The two blocks being matched. */
1014
1015 /* MODE carries the mode bits from cleanup_cfg if we are called from
1016 try_crossjump_to_edge, and additionally it carries the
1017 STRUCT_EQUIV_* bits described above. */
1018 int mode;
1019
1020 /* INPUT_COST is the cost that adding an extra input to the matched blocks
1021 is supposed to have, and is taken into account when considering if the
1022 matched sequence should be extended backwards. input_cost < 0 means
1023 don't accept any inputs at all. */
1024 int input_cost;
1025
1026
1027 /* Fields to track state inside of struct_equiv_block_eq. Some of these
1028 are also outputs. */
1029
1030 /* X_INPUT and Y_INPUT are used by struct_equiv to record a register that
1031 is used as an input parameter, i.e. where different registers are used
1032 as sources. This is only used for a register that is live at the end
1033 of the blocks, or in some identical code at the end of the blocks;
1034 Inputs that are dead at the end go into X_LOCAL / Y_LOCAL. */
1035 rtx x_input, y_input;
1036 /* When a previous pass has identified a valid input, INPUT_REG is set
1037 by struct_equiv_block_eq, and it is henceforth replaced in X_BLOCK
1038 for the input. */
1039 rtx input_reg;
1040
1041 /* COMMON_LIVE keeps track of the registers which are currently live
1042 (as we scan backwards from the end) and have the same numbers in both
1043 blocks. N.B. a register that is in common_live is unsuitable to become
1044 a local reg. */
1045 regset common_live;
1046 /* Likewise, X_LOCAL_LIVE / Y_LOCAL_LIVE keep track of registers that are
1047 local to one of the blocks; these registers must not be accepted as
1048 identical when encountered in both blocks. */
1049 regset x_local_live, y_local_live;
1050
1051 /* EQUIV_USED indicates for which insns a REG_EQUAL or REG_EQUIV note is
1052 being used, to avoid having to backtrack in the next pass, so that we
1053 get accurate life info for this insn then. For each such insn,
1054 the bit with the number corresponding to the CUR.NINSNS value at the
1055 time of scanning is set. */
1056 bitmap equiv_used;
1057
1058 /* Current state that can be saved & restored easily. */
1059 struct struct_equiv_checkpoint cur;
1060 /* BEST_MATCH is used to store the best match so far, weighing the
1061 cost of matched insns COSTS_N_INSNS (CUR.NINSNS) against the cost
1062 CUR.INPUT_COUNT * INPUT_COST of setting up the inputs. */
1063 struct struct_equiv_checkpoint best_match;
1064 /* If a checkpoint restore failed, or an input conflict newly arises,
1065 NEED_RERUN is set. This has to be tested by the caller to re-run
1066 the comparison if the match appears otherwise sound. The state kept in
1067 x_start, y_start, equiv_used and check_input_conflict ensures that
1068 we won't loop indefinitely. */
1069 bool need_rerun;
1070 /* If there is indication of an input conflict at the end,
1071 CHECK_INPUT_CONFLICT is set so that we'll check for input conflicts
1072 for each insn in the next pass. This is needed so that we won't discard
1073 a partial match if there is a longer match that has to be abandoned due
1074 to an input conflict. */
1075 bool check_input_conflict;
1076 /* HAD_INPUT_CONFLICT is set if CHECK_INPUT_CONFLICT was already set and we
1077 have passed a point where there were multiple dying inputs. This helps
1078 us decide if we should set check_input_conflict for the next pass. */
1079 bool had_input_conflict;
1080
1081 /* LIVE_UPDATE controls if we want to change any life info at all. We
1082 set it to false during REG_EQUAL / REG_EUQIV note comparison of the final
1083 pass so that we don't introduce new registers just for the note; if we
1084 can't match the notes without the current register information, we drop
1085 them. */
1086 bool live_update;
1087
1088 /* X_LOCAL and Y_LOCAL are used to gather register numbers of register pairs
1089 that are local to X_BLOCK and Y_BLOCK, with CUR.LOCAL_COUNT being the index
1090 to the next free entry. */
1091 rtx x_local[STRUCT_EQUIV_MAX_LOCAL], y_local[STRUCT_EQUIV_MAX_LOCAL];
1092 /* LOCAL_RVALUE is nonzero if the corresponding X_LOCAL / Y_LOCAL entry
1093 was a source operand (including STRICT_LOW_PART) for the last invocation
1094 of struct_equiv mentioning it, zero if it was a destination-only operand.
1095 Since we are scanning backwards, this means the register is input/local
1096 for the (partial) block scanned so far. */
1097 bool local_rvalue[STRUCT_EQUIV_MAX_LOCAL];
1098
1099
1100 /* Additional fields that are computed for the convenience of the caller. */
1101
1102 /* DYING_INPUTS is set to the number of local registers that turn out
1103 to be inputs to the (possibly partial) block. */
1104 int dying_inputs;
1105 /* X_END and Y_END are the last insns in X_BLOCK and Y_BLOCK, respectively,
1106 that are being compared. A final jump insn will not be included. */
1107 rtx x_end, y_end;
1108
1109 /* If we are matching tablejumps, X_LABEL in X_BLOCK corresponds to
1110 Y_LABEL in Y_BLOCK. */
1111 rtx x_label, y_label;
1112
1113 };
1114
1115 extern bool insns_match_p (rtx, rtx, struct equiv_info *);
1116 extern int struct_equiv_block_eq (int, struct equiv_info *);
1117 extern bool struct_equiv_init (int, struct equiv_info *);
1118 extern bool rtx_equiv_p (rtx *, rtx, int, struct equiv_info *);
1119
1120 /* In cfgcleanup.c */
1121 extern bool condjump_equiv_p (struct equiv_info *, bool);
1122
1123 /* Return true when one of the predecessor edges of BB is marked with EDGE_EH. */
1124 static inline bool
1125 bb_has_eh_pred (basic_block bb)
1126 {
1127 edge e;
1128 edge_iterator ei;
1129
1130 FOR_EACH_EDGE (e, ei, bb->preds)
1131 {
1132 if (e->flags & EDGE_EH)
1133 return true;
1134 }
1135 return false;
1136 }
1137
1138 /* Return true when one of the predecessor edges of BB is marked with EDGE_ABNORMAL. */
1139 static inline bool
1140 bb_has_abnormal_pred (basic_block bb)
1141 {
1142 edge e;
1143 edge_iterator ei;
1144
1145 FOR_EACH_EDGE (e, ei, bb->preds)
1146 {
1147 if (e->flags & EDGE_ABNORMAL)
1148 return true;
1149 }
1150 return false;
1151 }
1152
1153 /* In cfgloopmanip.c. */
1154 extern edge mfb_kj_edge;
1155 bool mfb_keep_just (edge);
1156
1157 #endif /* GCC_BASIC_BLOCK_H */