cfgloop.c (mark_loop_for_removal): Record former header when ENABLE_CHECKING.
[gcc.git] / gcc / cfgloop.h
1 /* Natural loop functions
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #ifndef GCC_CFGLOOP_H
21 #define GCC_CFGLOOP_H
22
23 #include "double-int.h"
24 #include "wide-int.h"
25 #include "bitmap.h"
26 #include "sbitmap.h"
27 #include "function.h"
28
29 /* Structure to hold decision about unrolling/peeling. */
30 enum lpt_dec
31 {
32 LPT_NONE,
33 LPT_PEEL_COMPLETELY,
34 LPT_PEEL_SIMPLE,
35 LPT_UNROLL_CONSTANT,
36 LPT_UNROLL_RUNTIME,
37 LPT_UNROLL_STUPID
38 };
39
40 struct GTY (()) lpt_decision {
41 enum lpt_dec decision;
42 unsigned times;
43 };
44
45 /* The type of extend applied to an IV. */
46 enum iv_extend_code
47 {
48 IV_SIGN_EXTEND,
49 IV_ZERO_EXTEND,
50 IV_UNKNOWN_EXTEND
51 };
52
53 /* The structure describing a bound on number of iterations of a loop. */
54
55 struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
56 /* The statement STMT is executed at most ... */
57 gimple stmt;
58
59 /* ... BOUND + 1 times (BOUND must be an unsigned constant).
60 The + 1 is added for the following reasons:
61
62 a) 0 would otherwise be unused, while we would need to care more about
63 overflows (as MAX + 1 is sometimes produced as the estimate on number
64 of executions of STMT).
65 b) it is consistent with the result of number_of_iterations_exit. */
66 widest_int bound;
67
68 /* True if the statement will cause the loop to be leaved the (at most)
69 BOUND + 1-st time it is executed, that is, all the statements after it
70 are executed at most BOUND times. */
71 bool is_exit;
72
73 /* The next bound in the list. */
74 struct nb_iter_bound *next;
75 };
76
77 /* Description of the loop exit. */
78
79 struct GTY (()) loop_exit {
80 /* The exit edge. */
81 edge e;
82
83 /* Previous and next exit in the list of the exits of the loop. */
84 struct loop_exit *prev;
85 struct loop_exit *next;
86
87 /* Next element in the list of loops from that E exits. */
88 struct loop_exit *next_e;
89 };
90
91 typedef struct loop *loop_p;
92
93 /* An integer estimation of the number of iterations. Estimate_state
94 describes what is the state of the estimation. */
95 enum loop_estimation
96 {
97 /* Estimate was not computed yet. */
98 EST_NOT_COMPUTED,
99 /* Estimate is ready. */
100 EST_AVAILABLE,
101 EST_LAST
102 };
103
104 /* Structure to hold information for each natural loop. */
105 struct GTY ((chain_next ("%h.next"))) loop {
106 /* Index into loops array. */
107 int num;
108
109 /* Number of loop insns. */
110 unsigned ninsns;
111
112 /* Basic block of loop header. */
113 basic_block header;
114
115 /* Basic block of loop latch. */
116 basic_block latch;
117
118 /* For loop unrolling/peeling decision. */
119 struct lpt_decision lpt_decision;
120
121 /* Average number of executed insns per iteration. */
122 unsigned av_ninsns;
123
124 /* Number of blocks contained within the loop. */
125 unsigned num_nodes;
126
127 /* Superloops of the loop, starting with the outermost loop. */
128 vec<loop_p, va_gc> *superloops;
129
130 /* The first inner (child) loop or NULL if innermost loop. */
131 struct loop *inner;
132
133 /* Link to the next (sibling) loop. */
134 struct loop *next;
135
136 /* Auxiliary info specific to a pass. */
137 PTR GTY ((skip (""))) aux;
138
139 /* The number of times the latch of the loop is executed. This can be an
140 INTEGER_CST, or a symbolic expression representing the number of
141 iterations like "N - 1", or a COND_EXPR containing the runtime
142 conditions under which the number of iterations is non zero.
143
144 Don't access this field directly: number_of_latch_executions
145 computes and caches the computed information in this field. */
146 tree nb_iterations;
147
148 /* An integer guaranteed to be greater or equal to nb_iterations. Only
149 valid if any_upper_bound is true. */
150 widest_int nb_iterations_upper_bound;
151
152 /* An integer giving an estimate on nb_iterations. Unlike
153 nb_iterations_upper_bound, there is no guarantee that it is at least
154 nb_iterations. */
155 widest_int nb_iterations_estimate;
156
157 bool any_upper_bound;
158 bool any_estimate;
159
160 /* True if the loop can be parallel. */
161 bool can_be_parallel;
162
163 /* True if -Waggressive-loop-optimizations warned about this loop
164 already. */
165 bool warned_aggressive_loop_optimizations;
166
167 /* An integer estimation of the number of iterations. Estimate_state
168 describes what is the state of the estimation. */
169 enum loop_estimation estimate_state;
170
171 /* If > 0, an integer, where the user asserted that for any
172 I in [ 0, nb_iterations ) and for any J in
173 [ I, min ( I + safelen, nb_iterations ) ), the Ith and Jth iterations
174 of the loop can be safely evaluated concurrently. */
175 int safelen;
176
177 /* True if this loop should never be vectorized. */
178 bool dont_vectorize;
179
180 /* True if we should try harder to vectorize this loop. */
181 bool force_vectorize;
182
183 /* For SIMD loops, this is a unique identifier of the loop, referenced
184 by IFN_GOMP_SIMD_VF, IFN_GOMP_SIMD_LANE and IFN_GOMP_SIMD_LAST_LANE
185 builtins. */
186 tree simduid;
187
188 /* Upper bound on number of iterations of a loop. */
189 struct nb_iter_bound *bounds;
190
191 /* Head of the cyclic list of the exits of the loop. */
192 struct loop_exit *exits;
193
194 /* Number of iteration analysis data for RTL. */
195 struct niter_desc *simple_loop_desc;
196
197 #ifdef ENABLE_CHECKING
198 /* For sanity checking during loop fixup we record here the former
199 loop header for loops marked for removal. Note that this prevents
200 the basic-block from being collected but its index can still be
201 reused. */
202 basic_block former_header;
203 #endif
204 };
205
206 /* Flags for state of loop structure. */
207 enum
208 {
209 LOOPS_HAVE_PREHEADERS = 1,
210 LOOPS_HAVE_SIMPLE_LATCHES = 2,
211 LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS = 4,
212 LOOPS_HAVE_RECORDED_EXITS = 8,
213 LOOPS_MAY_HAVE_MULTIPLE_LATCHES = 16,
214 LOOP_CLOSED_SSA = 32,
215 LOOPS_NEED_FIXUP = 64,
216 LOOPS_HAVE_FALLTHRU_PREHEADERS = 128
217 };
218
219 #define LOOPS_NORMAL (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES \
220 | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
221 #define AVOID_CFG_MODIFICATIONS (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)
222
223 /* Structure to hold CFG information about natural loops within a function. */
224 struct GTY (()) loops {
225 /* State of loops. */
226 int state;
227
228 /* Array of the loops. */
229 vec<loop_p, va_gc> *larray;
230
231 /* Maps edges to the list of their descriptions as loop exits. Edges
232 whose sources or destinations have loop_father == NULL (which may
233 happen during the cfg manipulations) should not appear in EXITS. */
234 htab_t GTY((param_is (struct loop_exit))) exits;
235
236 /* Pointer to root of loop hierarchy tree. */
237 struct loop *tree_root;
238 };
239
240 /* Loop recognition. */
241 bool bb_loop_header_p (basic_block);
242 void init_loops_structure (struct function *, struct loops *, unsigned);
243 extern struct loops *flow_loops_find (struct loops *);
244 extern void disambiguate_loops_with_multiple_latches (void);
245 extern void flow_loops_free (struct loops *);
246 extern void flow_loops_dump (FILE *,
247 void (*)(const struct loop *, FILE *, int), int);
248 extern void flow_loop_dump (const struct loop *, FILE *,
249 void (*)(const struct loop *, FILE *, int), int);
250 struct loop *alloc_loop (void);
251 extern void flow_loop_free (struct loop *);
252 int flow_loop_nodes_find (basic_block, struct loop *);
253 unsigned fix_loop_structure (bitmap changed_bbs);
254 bool mark_irreducible_loops (void);
255 void release_recorded_exits (void);
256 void record_loop_exits (void);
257 void rescan_loop_exit (edge, bool, bool);
258
259 /* Loop data structure manipulation/querying. */
260 extern void flow_loop_tree_node_add (struct loop *, struct loop *);
261 extern void flow_loop_tree_node_remove (struct loop *);
262 extern void place_new_loop (struct function *, struct loop *);
263 extern void add_loop (struct loop *, struct loop *);
264 extern bool flow_loop_nested_p (const struct loop *, const struct loop *);
265 extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
266 extern struct loop * find_common_loop (struct loop *, struct loop *);
267 struct loop *superloop_at_depth (struct loop *, unsigned);
268 struct eni_weights_d;
269 extern int num_loop_insns (const struct loop *);
270 extern int average_num_loop_insns (const struct loop *);
271 extern unsigned get_loop_level (const struct loop *);
272 extern bool loop_exit_edge_p (const struct loop *, const_edge);
273 extern bool loop_exits_to_bb_p (struct loop *, basic_block);
274 extern bool loop_exits_from_bb_p (struct loop *, basic_block);
275 extern void mark_loop_exit_edges (void);
276 extern location_t get_loop_location (struct loop *loop);
277
278 /* Loops & cfg manipulation. */
279 extern basic_block *get_loop_body (const struct loop *);
280 extern unsigned get_loop_body_with_size (const struct loop *, basic_block *,
281 unsigned);
282 extern basic_block *get_loop_body_in_dom_order (const struct loop *);
283 extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
284 extern basic_block *get_loop_body_in_custom_order (const struct loop *,
285 int (*) (const void *, const void *));
286
287 extern vec<edge> get_loop_exit_edges (const struct loop *);
288 extern edge single_exit (const struct loop *);
289 extern edge single_likely_exit (struct loop *loop);
290 extern unsigned num_loop_branches (const struct loop *);
291
292 extern edge loop_preheader_edge (const struct loop *);
293 extern edge loop_latch_edge (const struct loop *);
294
295 extern void add_bb_to_loop (basic_block, struct loop *);
296 extern void remove_bb_from_loops (basic_block);
297
298 extern void cancel_loop_tree (struct loop *);
299 extern void delete_loop (struct loop *);
300
301 enum
302 {
303 CP_SIMPLE_PREHEADERS = 1,
304 CP_FALLTHRU_PREHEADERS = 2
305 };
306
307 basic_block create_preheader (struct loop *, int);
308 extern void create_preheaders (int);
309 extern void force_single_succ_latches (void);
310
311 extern void verify_loop_structure (void);
312
313 /* Loop analysis. */
314 extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
315 gcov_type expected_loop_iterations_unbounded (const struct loop *);
316 extern unsigned expected_loop_iterations (const struct loop *);
317 extern rtx doloop_condition_get (rtx);
318
319
320 /* Loop manipulation. */
321 extern bool can_duplicate_loop_p (const struct loop *loop);
322
323 #define DLTHE_FLAG_UPDATE_FREQ 1 /* Update frequencies in
324 duplicate_loop_to_header_edge. */
325 #define DLTHE_RECORD_COPY_NUMBER 2 /* Record copy number in the aux
326 field of newly create BB. */
327 #define DLTHE_FLAG_COMPLETTE_PEEL 4 /* Update frequencies expecting
328 a complete peeling. */
329
330 extern edge create_empty_if_region_on_edge (edge, tree);
331 extern struct loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
332 tree *, tree *, struct loop *);
333 extern struct loop * duplicate_loop (struct loop *, struct loop *);
334 extern void copy_loop_info (struct loop *loop, struct loop *target);
335 extern void duplicate_subloops (struct loop *, struct loop *);
336 extern bool duplicate_loop_to_header_edge (struct loop *, edge,
337 unsigned, sbitmap, edge,
338 vec<edge> *, int);
339 extern struct loop *loopify (edge, edge,
340 basic_block, edge, edge, bool,
341 unsigned, unsigned);
342 struct loop * loop_version (struct loop *, void *,
343 basic_block *, unsigned, unsigned, unsigned, bool);
344 extern bool remove_path (edge);
345 extern void unloop (struct loop *, bool *, bitmap);
346 extern void scale_loop_frequencies (struct loop *, int, int);
347 void mark_loop_for_removal (loop_p);
348
349
350 /* Induction variable analysis. */
351
352 /* The description of induction variable. The things are a bit complicated
353 due to need to handle subregs and extends. The value of the object described
354 by it can be obtained as follows (all computations are done in extend_mode):
355
356 Value in i-th iteration is
357 delta + mult * extend_{extend_mode} (subreg_{mode} (base + i * step)).
358
359 If first_special is true, the value in the first iteration is
360 delta + mult * base
361
362 If extend = UNKNOWN, first_special must be false, delta 0, mult 1 and value is
363 subreg_{mode} (base + i * step)
364
365 The get_iv_value function can be used to obtain these expressions.
366
367 ??? Add a third mode field that would specify the mode in that inner
368 computation is done, which would enable it to be different from the
369 outer one? */
370
371 struct rtx_iv
372 {
373 /* Its base and step (mode of base and step is supposed to be extend_mode,
374 see the description above). */
375 rtx base, step;
376
377 /* The type of extend applied to it (IV_SIGN_EXTEND, IV_ZERO_EXTEND,
378 or IV_UNKNOWN_EXTEND). */
379 enum iv_extend_code extend;
380
381 /* Operations applied in the extended mode. */
382 rtx delta, mult;
383
384 /* The mode it is extended to. */
385 enum machine_mode extend_mode;
386
387 /* The mode the variable iterates in. */
388 enum machine_mode mode;
389
390 /* Whether the first iteration needs to be handled specially. */
391 unsigned first_special : 1;
392 };
393
394 /* The description of an exit from the loop and of the number of iterations
395 till we take the exit. */
396
397 struct GTY(()) niter_desc
398 {
399 /* The edge out of the loop. */
400 edge out_edge;
401
402 /* The other edge leading from the condition. */
403 edge in_edge;
404
405 /* True if we are able to say anything about number of iterations of the
406 loop. */
407 bool simple_p;
408
409 /* True if the loop iterates the constant number of times. */
410 bool const_iter;
411
412 /* Number of iterations if constant. */
413 uint64_t niter;
414
415 /* Assumptions under that the rest of the information is valid. */
416 rtx assumptions;
417
418 /* Assumptions under that the loop ends before reaching the latch,
419 even if value of niter_expr says otherwise. */
420 rtx noloop_assumptions;
421
422 /* Condition under that the loop is infinite. */
423 rtx infinite;
424
425 /* Whether the comparison is signed. */
426 bool signed_p;
427
428 /* The mode in that niter_expr should be computed. */
429 enum machine_mode mode;
430
431 /* The number of iterations of the loop. */
432 rtx niter_expr;
433 };
434
435 extern void iv_analysis_loop_init (struct loop *);
436 extern bool iv_analyze (rtx_insn *, rtx, struct rtx_iv *);
437 extern bool iv_analyze_result (rtx_insn *, rtx, struct rtx_iv *);
438 extern bool iv_analyze_expr (rtx_insn *, rtx, enum machine_mode,
439 struct rtx_iv *);
440 extern rtx get_iv_value (struct rtx_iv *, rtx);
441 extern bool biv_p (rtx_insn *, rtx);
442 extern void find_simple_exit (struct loop *, struct niter_desc *);
443 extern void iv_analysis_done (void);
444
445 extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
446 extern void free_simple_loop_desc (struct loop *loop);
447
448 static inline struct niter_desc *
449 simple_loop_desc (struct loop *loop)
450 {
451 return loop->simple_loop_desc;
452 }
453
454 /* Accessors for the loop structures. */
455
456 /* Returns the loop with index NUM from FNs loop tree. */
457
458 static inline struct loop *
459 get_loop (struct function *fn, unsigned num)
460 {
461 return (*loops_for_fn (fn)->larray)[num];
462 }
463
464 /* Returns the number of superloops of LOOP. */
465
466 static inline unsigned
467 loop_depth (const struct loop *loop)
468 {
469 return vec_safe_length (loop->superloops);
470 }
471
472 /* Returns the immediate superloop of LOOP, or NULL if LOOP is the outermost
473 loop. */
474
475 static inline struct loop *
476 loop_outer (const struct loop *loop)
477 {
478 unsigned n = vec_safe_length (loop->superloops);
479
480 if (n == 0)
481 return NULL;
482
483 return (*loop->superloops)[n - 1];
484 }
485
486 /* Returns true if LOOP has at least one exit edge. */
487
488 static inline bool
489 loop_has_exit_edges (const struct loop *loop)
490 {
491 return loop->exits->next->e != NULL;
492 }
493
494 /* Returns the list of loops in FN. */
495
496 inline vec<loop_p, va_gc> *
497 get_loops (struct function *fn)
498 {
499 struct loops *loops = loops_for_fn (fn);
500 if (!loops)
501 return NULL;
502
503 return loops->larray;
504 }
505
506 /* Returns the number of loops in FN (including the removed
507 ones and the fake loop that forms the root of the loop tree). */
508
509 static inline unsigned
510 number_of_loops (struct function *fn)
511 {
512 struct loops *loops = loops_for_fn (fn);
513 if (!loops)
514 return 0;
515
516 return vec_safe_length (loops->larray);
517 }
518
519 /* Returns true if state of the loops satisfies all properties
520 described by FLAGS. */
521
522 static inline bool
523 loops_state_satisfies_p (unsigned flags)
524 {
525 return (current_loops->state & flags) == flags;
526 }
527
528 /* Sets FLAGS to the loops state. */
529
530 static inline void
531 loops_state_set (unsigned flags)
532 {
533 current_loops->state |= flags;
534 }
535
536 /* Clears FLAGS from the loops state. */
537
538 static inline void
539 loops_state_clear (unsigned flags)
540 {
541 if (!current_loops)
542 return;
543 current_loops->state &= ~flags;
544 }
545
546 /* Loop iterators. */
547
548 /* Flags for loop iteration. */
549
550 enum li_flags
551 {
552 LI_INCLUDE_ROOT = 1, /* Include the fake root of the loop tree. */
553 LI_FROM_INNERMOST = 2, /* Iterate over the loops in the reverse order,
554 starting from innermost ones. */
555 LI_ONLY_INNERMOST = 4 /* Iterate only over innermost loops. */
556 };
557
558 /* The iterator for loops. */
559
560 struct loop_iterator
561 {
562 loop_iterator (loop_p *loop, unsigned flags);
563 ~loop_iterator ();
564
565 inline loop_p next ();
566
567 /* The list of loops to visit. */
568 vec<int> to_visit;
569
570 /* The index of the actual loop. */
571 unsigned idx;
572 };
573
574 inline loop_p
575 loop_iterator::next ()
576 {
577 int anum;
578
579 while (this->to_visit.iterate (this->idx, &anum))
580 {
581 this->idx++;
582 loop_p loop = get_loop (cfun, anum);
583 if (loop)
584 return loop;
585 }
586
587 return NULL;
588 }
589
590 inline
591 loop_iterator::loop_iterator (loop_p *loop, unsigned flags)
592 {
593 struct loop *aloop;
594 unsigned i;
595 int mn;
596
597 this->idx = 0;
598 if (!current_loops)
599 {
600 this->to_visit.create (0);
601 *loop = NULL;
602 return;
603 }
604
605 this->to_visit.create (number_of_loops (cfun));
606 mn = (flags & LI_INCLUDE_ROOT) ? 0 : 1;
607
608 if (flags & LI_ONLY_INNERMOST)
609 {
610 for (i = 0; vec_safe_iterate (current_loops->larray, i, &aloop); i++)
611 if (aloop != NULL
612 && aloop->inner == NULL
613 && aloop->num >= mn)
614 this->to_visit.quick_push (aloop->num);
615 }
616 else if (flags & LI_FROM_INNERMOST)
617 {
618 /* Push the loops to LI->TO_VISIT in postorder. */
619 for (aloop = current_loops->tree_root;
620 aloop->inner != NULL;
621 aloop = aloop->inner)
622 continue;
623
624 while (1)
625 {
626 if (aloop->num >= mn)
627 this->to_visit.quick_push (aloop->num);
628
629 if (aloop->next)
630 {
631 for (aloop = aloop->next;
632 aloop->inner != NULL;
633 aloop = aloop->inner)
634 continue;
635 }
636 else if (!loop_outer (aloop))
637 break;
638 else
639 aloop = loop_outer (aloop);
640 }
641 }
642 else
643 {
644 /* Push the loops to LI->TO_VISIT in preorder. */
645 aloop = current_loops->tree_root;
646 while (1)
647 {
648 if (aloop->num >= mn)
649 this->to_visit.quick_push (aloop->num);
650
651 if (aloop->inner != NULL)
652 aloop = aloop->inner;
653 else
654 {
655 while (aloop != NULL && aloop->next == NULL)
656 aloop = loop_outer (aloop);
657 if (aloop == NULL)
658 break;
659 aloop = aloop->next;
660 }
661 }
662 }
663
664 *loop = this->next ();
665 }
666
667 inline
668 loop_iterator::~loop_iterator ()
669 {
670 this->to_visit.release ();
671 }
672
673 #define FOR_EACH_LOOP(LOOP, FLAGS) \
674 for (loop_iterator li(&(LOOP), FLAGS); \
675 (LOOP); \
676 (LOOP) = li.next ())
677
678 /* The properties of the target. */
679 struct target_cfgloop {
680 /* Number of available registers. */
681 unsigned x_target_avail_regs;
682
683 /* Number of available registers that are call-clobbered. */
684 unsigned x_target_clobbered_regs;
685
686 /* Number of registers reserved for temporary expressions. */
687 unsigned x_target_res_regs;
688
689 /* The cost for register when there still is some reserve, but we are
690 approaching the number of available registers. */
691 unsigned x_target_reg_cost[2];
692
693 /* The cost for register when we need to spill. */
694 unsigned x_target_spill_cost[2];
695 };
696
697 extern struct target_cfgloop default_target_cfgloop;
698 #if SWITCHABLE_TARGET
699 extern struct target_cfgloop *this_target_cfgloop;
700 #else
701 #define this_target_cfgloop (&default_target_cfgloop)
702 #endif
703
704 #define target_avail_regs \
705 (this_target_cfgloop->x_target_avail_regs)
706 #define target_clobbered_regs \
707 (this_target_cfgloop->x_target_clobbered_regs)
708 #define target_res_regs \
709 (this_target_cfgloop->x_target_res_regs)
710 #define target_reg_cost \
711 (this_target_cfgloop->x_target_reg_cost)
712 #define target_spill_cost \
713 (this_target_cfgloop->x_target_spill_cost)
714
715 /* Register pressure estimation for induction variable optimizations & loop
716 invariant motion. */
717 extern unsigned estimate_reg_pressure_cost (unsigned, unsigned, bool, bool);
718 extern void init_set_costs (void);
719
720 /* Loop optimizer initialization. */
721 extern void loop_optimizer_init (unsigned);
722 extern void loop_optimizer_finalize (void);
723
724 /* Optimization passes. */
725 enum
726 {
727 UAP_PEEL = 1, /* Enables loop peeling. */
728 UAP_UNROLL = 2, /* Enables unrolling of loops if it seems profitable. */
729 UAP_UNROLL_ALL = 4 /* Enables unrolling of all loops. */
730 };
731
732 extern void unroll_and_peel_loops (int);
733 extern void doloop_optimize_loops (void);
734 extern void move_loop_invariants (void);
735 extern void scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound);
736 extern vec<basic_block> get_loop_hot_path (const struct loop *loop);
737
738 /* Returns the outermost loop of the loop nest that contains LOOP.*/
739 static inline struct loop *
740 loop_outermost (struct loop *loop)
741 {
742 unsigned n = vec_safe_length (loop->superloops);
743
744 if (n <= 1)
745 return loop;
746
747 return (*loop->superloops)[1];
748 }
749
750 extern void record_niter_bound (struct loop *, const widest_int &, bool, bool);
751 extern HOST_WIDE_INT get_estimated_loop_iterations_int (struct loop *);
752 extern HOST_WIDE_INT get_max_loop_iterations_int (struct loop *);
753 extern bool get_estimated_loop_iterations (struct loop *loop, widest_int *nit);
754 extern bool get_max_loop_iterations (struct loop *loop, widest_int *nit);
755 extern int bb_loop_depth (const_basic_block);
756
757 /* Converts VAL to widest_int. */
758
759 static inline widest_int
760 gcov_type_to_wide_int (gcov_type val)
761 {
762 HOST_WIDE_INT a[2];
763
764 a[0] = (unsigned HOST_WIDE_INT) val;
765 /* If HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_WIDEST_INT, avoid shifting by
766 the size of type. */
767 val >>= HOST_BITS_PER_WIDE_INT - 1;
768 val >>= 1;
769 a[1] = (unsigned HOST_WIDE_INT) val;
770
771 return widest_int::from_array (a, 2);
772 }
773 #endif /* GCC_CFGLOOP_H */