coretypes.h: Include machmode.h...
[gcc.git] / gcc / cfgloop.c
1 /* Natural loop discovery code for GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hashtab.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "symtab.h"
29 #include "inchash.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "predict.h"
34 #include "dominance.h"
35 #include "cfg.h"
36 #include "cfganal.h"
37 #include "basic-block.h"
38 #include "cfgloop.h"
39 #include "diagnostic-core.h"
40 #include "flags.h"
41 #include "tree.h"
42 #include "fold-const.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimple-ssa.h"
50 #include "dumpfile.h"
51
52 static void flow_loops_cfg_dump (FILE *);
53 \f
54 /* Dump loop related CFG information. */
55
56 static void
57 flow_loops_cfg_dump (FILE *file)
58 {
59 basic_block bb;
60
61 if (!file)
62 return;
63
64 FOR_EACH_BB_FN (bb, cfun)
65 {
66 edge succ;
67 edge_iterator ei;
68
69 fprintf (file, ";; %d succs { ", bb->index);
70 FOR_EACH_EDGE (succ, ei, bb->succs)
71 fprintf (file, "%d ", succ->dest->index);
72 fprintf (file, "}\n");
73 }
74 }
75
76 /* Return nonzero if the nodes of LOOP are a subset of OUTER. */
77
78 bool
79 flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
80 {
81 unsigned odepth = loop_depth (outer);
82
83 return (loop_depth (loop) > odepth
84 && (*loop->superloops)[odepth] == outer);
85 }
86
87 /* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
88 loops within LOOP. */
89
90 struct loop *
91 superloop_at_depth (struct loop *loop, unsigned depth)
92 {
93 unsigned ldepth = loop_depth (loop);
94
95 gcc_assert (depth <= ldepth);
96
97 if (depth == ldepth)
98 return loop;
99
100 return (*loop->superloops)[depth];
101 }
102
103 /* Returns the list of the latch edges of LOOP. */
104
105 static vec<edge>
106 get_loop_latch_edges (const struct loop *loop)
107 {
108 edge_iterator ei;
109 edge e;
110 vec<edge> ret = vNULL;
111
112 FOR_EACH_EDGE (e, ei, loop->header->preds)
113 {
114 if (dominated_by_p (CDI_DOMINATORS, e->src, loop->header))
115 ret.safe_push (e);
116 }
117
118 return ret;
119 }
120
121 /* Dump the loop information specified by LOOP to the stream FILE
122 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
123
124 void
125 flow_loop_dump (const struct loop *loop, FILE *file,
126 void (*loop_dump_aux) (const struct loop *, FILE *, int),
127 int verbose)
128 {
129 basic_block *bbs;
130 unsigned i;
131 vec<edge> latches;
132 edge e;
133
134 if (! loop || ! loop->header)
135 return;
136
137 fprintf (file, ";;\n;; Loop %d\n", loop->num);
138
139 fprintf (file, ";; header %d, ", loop->header->index);
140 if (loop->latch)
141 fprintf (file, "latch %d\n", loop->latch->index);
142 else
143 {
144 fprintf (file, "multiple latches:");
145 latches = get_loop_latch_edges (loop);
146 FOR_EACH_VEC_ELT (latches, i, e)
147 fprintf (file, " %d", e->src->index);
148 latches.release ();
149 fprintf (file, "\n");
150 }
151
152 fprintf (file, ";; depth %d, outer %ld\n",
153 loop_depth (loop), (long) (loop_outer (loop)
154 ? loop_outer (loop)->num : -1));
155
156 fprintf (file, ";; nodes:");
157 bbs = get_loop_body (loop);
158 for (i = 0; i < loop->num_nodes; i++)
159 fprintf (file, " %d", bbs[i]->index);
160 free (bbs);
161 fprintf (file, "\n");
162
163 if (loop_dump_aux)
164 loop_dump_aux (loop, file, verbose);
165 }
166
167 /* Dump the loop information about loops to the stream FILE,
168 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
169
170 void
171 flow_loops_dump (FILE *file, void (*loop_dump_aux) (const struct loop *, FILE *, int), int verbose)
172 {
173 struct loop *loop;
174
175 if (!current_loops || ! file)
176 return;
177
178 fprintf (file, ";; %d loops found\n", number_of_loops (cfun));
179
180 FOR_EACH_LOOP (loop, LI_INCLUDE_ROOT)
181 {
182 flow_loop_dump (loop, file, loop_dump_aux, verbose);
183 }
184
185 if (verbose)
186 flow_loops_cfg_dump (file);
187 }
188
189 /* Free data allocated for LOOP. */
190
191 void
192 flow_loop_free (struct loop *loop)
193 {
194 struct loop_exit *exit, *next;
195
196 vec_free (loop->superloops);
197
198 /* Break the list of the loop exit records. They will be freed when the
199 corresponding edge is rescanned or removed, and this avoids
200 accessing the (already released) head of the list stored in the
201 loop structure. */
202 for (exit = loop->exits->next; exit != loop->exits; exit = next)
203 {
204 next = exit->next;
205 exit->next = exit;
206 exit->prev = exit;
207 }
208
209 ggc_free (loop->exits);
210 ggc_free (loop);
211 }
212
213 /* Free all the memory allocated for LOOPS. */
214
215 void
216 flow_loops_free (struct loops *loops)
217 {
218 if (loops->larray)
219 {
220 unsigned i;
221 loop_p loop;
222
223 /* Free the loop descriptors. */
224 FOR_EACH_VEC_SAFE_ELT (loops->larray, i, loop)
225 {
226 if (!loop)
227 continue;
228
229 flow_loop_free (loop);
230 }
231
232 vec_free (loops->larray);
233 }
234 }
235
236 /* Find the nodes contained within the LOOP with header HEADER.
237 Return the number of nodes within the loop. */
238
239 int
240 flow_loop_nodes_find (basic_block header, struct loop *loop)
241 {
242 vec<basic_block> stack = vNULL;
243 int num_nodes = 1;
244 edge latch;
245 edge_iterator latch_ei;
246
247 header->loop_father = loop;
248
249 FOR_EACH_EDGE (latch, latch_ei, loop->header->preds)
250 {
251 if (latch->src->loop_father == loop
252 || !dominated_by_p (CDI_DOMINATORS, latch->src, loop->header))
253 continue;
254
255 num_nodes++;
256 stack.safe_push (latch->src);
257 latch->src->loop_father = loop;
258
259 while (!stack.is_empty ())
260 {
261 basic_block node;
262 edge e;
263 edge_iterator ei;
264
265 node = stack.pop ();
266
267 FOR_EACH_EDGE (e, ei, node->preds)
268 {
269 basic_block ancestor = e->src;
270
271 if (ancestor->loop_father != loop)
272 {
273 ancestor->loop_father = loop;
274 num_nodes++;
275 stack.safe_push (ancestor);
276 }
277 }
278 }
279 }
280 stack.release ();
281
282 return num_nodes;
283 }
284
285 /* Records the vector of superloops of the loop LOOP, whose immediate
286 superloop is FATHER. */
287
288 static void
289 establish_preds (struct loop *loop, struct loop *father)
290 {
291 loop_p ploop;
292 unsigned depth = loop_depth (father) + 1;
293 unsigned i;
294
295 loop->superloops = 0;
296 vec_alloc (loop->superloops, depth);
297 FOR_EACH_VEC_SAFE_ELT (father->superloops, i, ploop)
298 loop->superloops->quick_push (ploop);
299 loop->superloops->quick_push (father);
300
301 for (ploop = loop->inner; ploop; ploop = ploop->next)
302 establish_preds (ploop, loop);
303 }
304
305 /* Add LOOP to the loop hierarchy tree where FATHER is father of the
306 added loop. If LOOP has some children, take care of that their
307 pred field will be initialized correctly. */
308
309 void
310 flow_loop_tree_node_add (struct loop *father, struct loop *loop)
311 {
312 loop->next = father->inner;
313 father->inner = loop;
314
315 establish_preds (loop, father);
316 }
317
318 /* Remove LOOP from the loop hierarchy tree. */
319
320 void
321 flow_loop_tree_node_remove (struct loop *loop)
322 {
323 struct loop *prev, *father;
324
325 father = loop_outer (loop);
326
327 /* Remove loop from the list of sons. */
328 if (father->inner == loop)
329 father->inner = loop->next;
330 else
331 {
332 for (prev = father->inner; prev->next != loop; prev = prev->next)
333 continue;
334 prev->next = loop->next;
335 }
336
337 loop->superloops = NULL;
338 }
339
340 /* Allocates and returns new loop structure. */
341
342 struct loop *
343 alloc_loop (void)
344 {
345 struct loop *loop = ggc_cleared_alloc<struct loop> ();
346
347 loop->exits = ggc_cleared_alloc<loop_exit> ();
348 loop->exits->next = loop->exits->prev = loop->exits;
349 loop->can_be_parallel = false;
350 loop->nb_iterations_upper_bound = 0;
351 loop->nb_iterations_estimate = 0;
352 return loop;
353 }
354
355 /* Initializes loops structure LOOPS, reserving place for NUM_LOOPS loops
356 (including the root of the loop tree). */
357
358 void
359 init_loops_structure (struct function *fn,
360 struct loops *loops, unsigned num_loops)
361 {
362 struct loop *root;
363
364 memset (loops, 0, sizeof *loops);
365 vec_alloc (loops->larray, num_loops);
366
367 /* Dummy loop containing whole function. */
368 root = alloc_loop ();
369 root->num_nodes = n_basic_blocks_for_fn (fn);
370 root->latch = EXIT_BLOCK_PTR_FOR_FN (fn);
371 root->header = ENTRY_BLOCK_PTR_FOR_FN (fn);
372 ENTRY_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
373 EXIT_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
374
375 loops->larray->quick_push (root);
376 loops->tree_root = root;
377 }
378
379 /* Returns whether HEADER is a loop header. */
380
381 bool
382 bb_loop_header_p (basic_block header)
383 {
384 edge_iterator ei;
385 edge e;
386
387 /* If we have an abnormal predecessor, do not consider the
388 loop (not worth the problems). */
389 if (bb_has_abnormal_pred (header))
390 return false;
391
392 /* Look for back edges where a predecessor is dominated
393 by this block. A natural loop has a single entry
394 node (header) that dominates all the nodes in the
395 loop. It also has single back edge to the header
396 from a latch node. */
397 FOR_EACH_EDGE (e, ei, header->preds)
398 {
399 basic_block latch = e->src;
400 if (latch != ENTRY_BLOCK_PTR_FOR_FN (cfun)
401 && dominated_by_p (CDI_DOMINATORS, latch, header))
402 return true;
403 }
404
405 return false;
406 }
407
408 /* Find all the natural loops in the function and save in LOOPS structure and
409 recalculate loop_father information in basic block structures.
410 If LOOPS is non-NULL then the loop structures for already recorded loops
411 will be re-used and their number will not change. We assume that no
412 stale loops exist in LOOPS.
413 When LOOPS is NULL it is allocated and re-built from scratch.
414 Return the built LOOPS structure. */
415
416 struct loops *
417 flow_loops_find (struct loops *loops)
418 {
419 bool from_scratch = (loops == NULL);
420 int *rc_order;
421 int b;
422 unsigned i;
423
424 /* Ensure that the dominators are computed. */
425 calculate_dominance_info (CDI_DOMINATORS);
426
427 if (!loops)
428 {
429 loops = ggc_cleared_alloc<struct loops> ();
430 init_loops_structure (cfun, loops, 1);
431 }
432
433 /* Ensure that loop exits were released. */
434 gcc_assert (loops->exits == NULL);
435
436 /* Taking care of this degenerate case makes the rest of
437 this code simpler. */
438 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
439 return loops;
440
441 /* The root loop node contains all basic-blocks. */
442 loops->tree_root->num_nodes = n_basic_blocks_for_fn (cfun);
443
444 /* Compute depth first search order of the CFG so that outer
445 natural loops will be found before inner natural loops. */
446 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
447 pre_and_rev_post_order_compute (NULL, rc_order, false);
448
449 /* Gather all loop headers in reverse completion order and allocate
450 loop structures for loops that are not already present. */
451 auto_vec<loop_p> larray (loops->larray->length ());
452 for (b = 0; b < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; b++)
453 {
454 basic_block header = BASIC_BLOCK_FOR_FN (cfun, rc_order[b]);
455 if (bb_loop_header_p (header))
456 {
457 struct loop *loop;
458
459 /* The current active loop tree has valid loop-fathers for
460 header blocks. */
461 if (!from_scratch
462 && header->loop_father->header == header)
463 {
464 loop = header->loop_father;
465 /* If we found an existing loop remove it from the
466 loop tree. It is going to be inserted again
467 below. */
468 flow_loop_tree_node_remove (loop);
469 }
470 else
471 {
472 /* Otherwise allocate a new loop structure for the loop. */
473 loop = alloc_loop ();
474 /* ??? We could re-use unused loop slots here. */
475 loop->num = loops->larray->length ();
476 vec_safe_push (loops->larray, loop);
477 loop->header = header;
478
479 if (!from_scratch
480 && dump_file && (dump_flags & TDF_DETAILS))
481 fprintf (dump_file, "flow_loops_find: discovered new "
482 "loop %d with header %d\n",
483 loop->num, header->index);
484 }
485 /* Reset latch, we recompute it below. */
486 loop->latch = NULL;
487 larray.safe_push (loop);
488 }
489
490 /* Make blocks part of the loop root node at start. */
491 header->loop_father = loops->tree_root;
492 }
493
494 free (rc_order);
495
496 /* Now iterate over the loops found, insert them into the loop tree
497 and assign basic-block ownership. */
498 for (i = 0; i < larray.length (); ++i)
499 {
500 struct loop *loop = larray[i];
501 basic_block header = loop->header;
502 edge_iterator ei;
503 edge e;
504
505 flow_loop_tree_node_add (header->loop_father, loop);
506 loop->num_nodes = flow_loop_nodes_find (loop->header, loop);
507
508 /* Look for the latch for this header block, if it has just a
509 single one. */
510 FOR_EACH_EDGE (e, ei, header->preds)
511 {
512 basic_block latch = e->src;
513
514 if (flow_bb_inside_loop_p (loop, latch))
515 {
516 if (loop->latch != NULL)
517 {
518 /* More than one latch edge. */
519 loop->latch = NULL;
520 break;
521 }
522 loop->latch = latch;
523 }
524 }
525 }
526
527 return loops;
528 }
529
530 /* Ratio of frequencies of edges so that one of more latch edges is
531 considered to belong to inner loop with same header. */
532 #define HEAVY_EDGE_RATIO 8
533
534 /* Minimum number of samples for that we apply
535 find_subloop_latch_edge_by_profile heuristics. */
536 #define HEAVY_EDGE_MIN_SAMPLES 10
537
538 /* If the profile info is available, finds an edge in LATCHES that much more
539 frequent than the remaining edges. Returns such an edge, or NULL if we do
540 not find one.
541
542 We do not use guessed profile here, only the measured one. The guessed
543 profile is usually too flat and unreliable for this (and it is mostly based
544 on the loop structure of the program, so it does not make much sense to
545 derive the loop structure from it). */
546
547 static edge
548 find_subloop_latch_edge_by_profile (vec<edge> latches)
549 {
550 unsigned i;
551 edge e, me = NULL;
552 gcov_type mcount = 0, tcount = 0;
553
554 FOR_EACH_VEC_ELT (latches, i, e)
555 {
556 if (e->count > mcount)
557 {
558 me = e;
559 mcount = e->count;
560 }
561 tcount += e->count;
562 }
563
564 if (tcount < HEAVY_EDGE_MIN_SAMPLES
565 || (tcount - mcount) * HEAVY_EDGE_RATIO > tcount)
566 return NULL;
567
568 if (dump_file)
569 fprintf (dump_file,
570 "Found latch edge %d -> %d using profile information.\n",
571 me->src->index, me->dest->index);
572 return me;
573 }
574
575 /* Among LATCHES, guesses a latch edge of LOOP corresponding to subloop, based
576 on the structure of induction variables. Returns this edge, or NULL if we
577 do not find any.
578
579 We are quite conservative, and look just for an obvious simple innermost
580 loop (which is the case where we would lose the most performance by not
581 disambiguating the loop). More precisely, we look for the following
582 situation: The source of the chosen latch edge dominates sources of all
583 the other latch edges. Additionally, the header does not contain a phi node
584 such that the argument from the chosen edge is equal to the argument from
585 another edge. */
586
587 static edge
588 find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> latches)
589 {
590 edge e, latch = latches[0];
591 unsigned i;
592 gphi *phi;
593 gphi_iterator psi;
594 tree lop;
595 basic_block bb;
596
597 /* Find the candidate for the latch edge. */
598 for (i = 1; latches.iterate (i, &e); i++)
599 if (dominated_by_p (CDI_DOMINATORS, latch->src, e->src))
600 latch = e;
601
602 /* Verify that it dominates all the latch edges. */
603 FOR_EACH_VEC_ELT (latches, i, e)
604 if (!dominated_by_p (CDI_DOMINATORS, e->src, latch->src))
605 return NULL;
606
607 /* Check for a phi node that would deny that this is a latch edge of
608 a subloop. */
609 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
610 {
611 phi = psi.phi ();
612 lop = PHI_ARG_DEF_FROM_EDGE (phi, latch);
613
614 /* Ignore the values that are not changed inside the subloop. */
615 if (TREE_CODE (lop) != SSA_NAME
616 || SSA_NAME_DEF_STMT (lop) == phi)
617 continue;
618 bb = gimple_bb (SSA_NAME_DEF_STMT (lop));
619 if (!bb || !flow_bb_inside_loop_p (loop, bb))
620 continue;
621
622 FOR_EACH_VEC_ELT (latches, i, e)
623 if (e != latch
624 && PHI_ARG_DEF_FROM_EDGE (phi, e) == lop)
625 return NULL;
626 }
627
628 if (dump_file)
629 fprintf (dump_file,
630 "Found latch edge %d -> %d using iv structure.\n",
631 latch->src->index, latch->dest->index);
632 return latch;
633 }
634
635 /* If we can determine that one of the several latch edges of LOOP behaves
636 as a latch edge of a separate subloop, returns this edge. Otherwise
637 returns NULL. */
638
639 static edge
640 find_subloop_latch_edge (struct loop *loop)
641 {
642 vec<edge> latches = get_loop_latch_edges (loop);
643 edge latch = NULL;
644
645 if (latches.length () > 1)
646 {
647 latch = find_subloop_latch_edge_by_profile (latches);
648
649 if (!latch
650 /* We consider ivs to guess the latch edge only in SSA. Perhaps we
651 should use cfghook for this, but it is hard to imagine it would
652 be useful elsewhere. */
653 && current_ir_type () == IR_GIMPLE)
654 latch = find_subloop_latch_edge_by_ivs (loop, latches);
655 }
656
657 latches.release ();
658 return latch;
659 }
660
661 /* Callback for make_forwarder_block. Returns true if the edge E is marked
662 in the set MFB_REIS_SET. */
663
664 static hash_set<edge> *mfb_reis_set;
665 static bool
666 mfb_redirect_edges_in_set (edge e)
667 {
668 return mfb_reis_set->contains (e);
669 }
670
671 /* Creates a subloop of LOOP with latch edge LATCH. */
672
673 static void
674 form_subloop (struct loop *loop, edge latch)
675 {
676 edge_iterator ei;
677 edge e, new_entry;
678 struct loop *new_loop;
679
680 mfb_reis_set = new hash_set<edge>;
681 FOR_EACH_EDGE (e, ei, loop->header->preds)
682 {
683 if (e != latch)
684 mfb_reis_set->add (e);
685 }
686 new_entry = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
687 NULL);
688 delete mfb_reis_set;
689
690 loop->header = new_entry->src;
691
692 /* Find the blocks and subloops that belong to the new loop, and add it to
693 the appropriate place in the loop tree. */
694 new_loop = alloc_loop ();
695 new_loop->header = new_entry->dest;
696 new_loop->latch = latch->src;
697 add_loop (new_loop, loop);
698 }
699
700 /* Make all the latch edges of LOOP to go to a single forwarder block --
701 a new latch of LOOP. */
702
703 static void
704 merge_latch_edges (struct loop *loop)
705 {
706 vec<edge> latches = get_loop_latch_edges (loop);
707 edge latch, e;
708 unsigned i;
709
710 gcc_assert (latches.length () > 0);
711
712 if (latches.length () == 1)
713 loop->latch = latches[0]->src;
714 else
715 {
716 if (dump_file)
717 fprintf (dump_file, "Merged latch edges of loop %d\n", loop->num);
718
719 mfb_reis_set = new hash_set<edge>;
720 FOR_EACH_VEC_ELT (latches, i, e)
721 mfb_reis_set->add (e);
722 latch = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
723 NULL);
724 delete mfb_reis_set;
725
726 loop->header = latch->dest;
727 loop->latch = latch->src;
728 }
729
730 latches.release ();
731 }
732
733 /* LOOP may have several latch edges. Transform it into (possibly several)
734 loops with single latch edge. */
735
736 static void
737 disambiguate_multiple_latches (struct loop *loop)
738 {
739 edge e;
740
741 /* We eliminate the multiple latches by splitting the header to the forwarder
742 block F and the rest R, and redirecting the edges. There are two cases:
743
744 1) If there is a latch edge E that corresponds to a subloop (we guess
745 that based on profile -- if it is taken much more often than the
746 remaining edges; and on trees, using the information about induction
747 variables of the loops), we redirect E to R, all the remaining edges to
748 F, then rescan the loops and try again for the outer loop.
749 2) If there is no such edge, we redirect all latch edges to F, and the
750 entry edges to R, thus making F the single latch of the loop. */
751
752 if (dump_file)
753 fprintf (dump_file, "Disambiguating loop %d with multiple latches\n",
754 loop->num);
755
756 /* During latch merging, we may need to redirect the entry edges to a new
757 block. This would cause problems if the entry edge was the one from the
758 entry block. To avoid having to handle this case specially, split
759 such entry edge. */
760 e = find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), loop->header);
761 if (e)
762 split_edge (e);
763
764 while (1)
765 {
766 e = find_subloop_latch_edge (loop);
767 if (!e)
768 break;
769
770 form_subloop (loop, e);
771 }
772
773 merge_latch_edges (loop);
774 }
775
776 /* Split loops with multiple latch edges. */
777
778 void
779 disambiguate_loops_with_multiple_latches (void)
780 {
781 struct loop *loop;
782
783 FOR_EACH_LOOP (loop, 0)
784 {
785 if (!loop->latch)
786 disambiguate_multiple_latches (loop);
787 }
788 }
789
790 /* Return nonzero if basic block BB belongs to LOOP. */
791 bool
792 flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
793 {
794 struct loop *source_loop;
795
796 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
797 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
798 return 0;
799
800 source_loop = bb->loop_father;
801 return loop == source_loop || flow_loop_nested_p (loop, source_loop);
802 }
803
804 /* Enumeration predicate for get_loop_body_with_size. */
805 static bool
806 glb_enum_p (const_basic_block bb, const void *glb_loop)
807 {
808 const struct loop *const loop = (const struct loop *) glb_loop;
809 return (bb != loop->header
810 && dominated_by_p (CDI_DOMINATORS, bb, loop->header));
811 }
812
813 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
814 order against direction of edges from latch. Specially, if
815 header != latch, latch is the 1-st block. LOOP cannot be the fake
816 loop tree root, and its size must be at most MAX_SIZE. The blocks
817 in the LOOP body are stored to BODY, and the size of the LOOP is
818 returned. */
819
820 unsigned
821 get_loop_body_with_size (const struct loop *loop, basic_block *body,
822 unsigned max_size)
823 {
824 return dfs_enumerate_from (loop->header, 1, glb_enum_p,
825 body, max_size, loop);
826 }
827
828 /* Gets basic blocks of a LOOP. Header is the 0-th block, rest is in dfs
829 order against direction of edges from latch. Specially, if
830 header != latch, latch is the 1-st block. */
831
832 basic_block *
833 get_loop_body (const struct loop *loop)
834 {
835 basic_block *body, bb;
836 unsigned tv = 0;
837
838 gcc_assert (loop->num_nodes);
839
840 body = XNEWVEC (basic_block, loop->num_nodes);
841
842 if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
843 {
844 /* There may be blocks unreachable from EXIT_BLOCK, hence we need to
845 special-case the fake loop that contains the whole function. */
846 gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun));
847 body[tv++] = loop->header;
848 body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun);
849 FOR_EACH_BB_FN (bb, cfun)
850 body[tv++] = bb;
851 }
852 else
853 tv = get_loop_body_with_size (loop, body, loop->num_nodes);
854
855 gcc_assert (tv == loop->num_nodes);
856 return body;
857 }
858
859 /* Fills dominance descendants inside LOOP of the basic block BB into
860 array TOVISIT from index *TV. */
861
862 static void
863 fill_sons_in_loop (const struct loop *loop, basic_block bb,
864 basic_block *tovisit, int *tv)
865 {
866 basic_block son, postpone = NULL;
867
868 tovisit[(*tv)++] = bb;
869 for (son = first_dom_son (CDI_DOMINATORS, bb);
870 son;
871 son = next_dom_son (CDI_DOMINATORS, son))
872 {
873 if (!flow_bb_inside_loop_p (loop, son))
874 continue;
875
876 if (dominated_by_p (CDI_DOMINATORS, loop->latch, son))
877 {
878 postpone = son;
879 continue;
880 }
881 fill_sons_in_loop (loop, son, tovisit, tv);
882 }
883
884 if (postpone)
885 fill_sons_in_loop (loop, postpone, tovisit, tv);
886 }
887
888 /* Gets body of a LOOP (that must be different from the outermost loop)
889 sorted by dominance relation. Additionally, if a basic block s dominates
890 the latch, then only blocks dominated by s are be after it. */
891
892 basic_block *
893 get_loop_body_in_dom_order (const struct loop *loop)
894 {
895 basic_block *tovisit;
896 int tv;
897
898 gcc_assert (loop->num_nodes);
899
900 tovisit = XNEWVEC (basic_block, loop->num_nodes);
901
902 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
903
904 tv = 0;
905 fill_sons_in_loop (loop, loop->header, tovisit, &tv);
906
907 gcc_assert (tv == (int) loop->num_nodes);
908
909 return tovisit;
910 }
911
912 /* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
913
914 basic_block *
915 get_loop_body_in_custom_order (const struct loop *loop,
916 int (*bb_comparator) (const void *, const void *))
917 {
918 basic_block *bbs = get_loop_body (loop);
919
920 qsort (bbs, loop->num_nodes, sizeof (basic_block), bb_comparator);
921
922 return bbs;
923 }
924
925 /* Get body of a LOOP in breadth first sort order. */
926
927 basic_block *
928 get_loop_body_in_bfs_order (const struct loop *loop)
929 {
930 basic_block *blocks;
931 basic_block bb;
932 bitmap visited;
933 unsigned int i = 0;
934 unsigned int vc = 1;
935
936 gcc_assert (loop->num_nodes);
937 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
938
939 blocks = XNEWVEC (basic_block, loop->num_nodes);
940 visited = BITMAP_ALLOC (NULL);
941
942 bb = loop->header;
943 while (i < loop->num_nodes)
944 {
945 edge e;
946 edge_iterator ei;
947
948 if (bitmap_set_bit (visited, bb->index))
949 /* This basic block is now visited */
950 blocks[i++] = bb;
951
952 FOR_EACH_EDGE (e, ei, bb->succs)
953 {
954 if (flow_bb_inside_loop_p (loop, e->dest))
955 {
956 if (bitmap_set_bit (visited, e->dest->index))
957 blocks[i++] = e->dest;
958 }
959 }
960
961 gcc_assert (i >= vc);
962
963 bb = blocks[vc++];
964 }
965
966 BITMAP_FREE (visited);
967 return blocks;
968 }
969
970 /* Hash function for struct loop_exit. */
971
972 hashval_t
973 loop_exit_hasher::hash (loop_exit *exit)
974 {
975 return htab_hash_pointer (exit->e);
976 }
977
978 /* Equality function for struct loop_exit. Compares with edge. */
979
980 bool
981 loop_exit_hasher::equal (loop_exit *exit, edge e)
982 {
983 return exit->e == e;
984 }
985
986 /* Frees the list of loop exit descriptions EX. */
987
988 void
989 loop_exit_hasher::remove (loop_exit *exit)
990 {
991 loop_exit *next;
992 for (; exit; exit = next)
993 {
994 next = exit->next_e;
995
996 exit->next->prev = exit->prev;
997 exit->prev->next = exit->next;
998
999 ggc_free (exit);
1000 }
1001 }
1002
1003 /* Returns the list of records for E as an exit of a loop. */
1004
1005 static struct loop_exit *
1006 get_exit_descriptions (edge e)
1007 {
1008 return current_loops->exits->find_with_hash (e, htab_hash_pointer (e));
1009 }
1010
1011 /* Updates the lists of loop exits in that E appears.
1012 If REMOVED is true, E is being removed, and we
1013 just remove it from the lists of exits.
1014 If NEW_EDGE is true and E is not a loop exit, we
1015 do not try to remove it from loop exit lists. */
1016
1017 void
1018 rescan_loop_exit (edge e, bool new_edge, bool removed)
1019 {
1020 struct loop_exit *exits = NULL, *exit;
1021 struct loop *aloop, *cloop;
1022
1023 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1024 return;
1025
1026 if (!removed
1027 && e->src->loop_father != NULL
1028 && e->dest->loop_father != NULL
1029 && !flow_bb_inside_loop_p (e->src->loop_father, e->dest))
1030 {
1031 cloop = find_common_loop (e->src->loop_father, e->dest->loop_father);
1032 for (aloop = e->src->loop_father;
1033 aloop != cloop;
1034 aloop = loop_outer (aloop))
1035 {
1036 exit = ggc_alloc<loop_exit> ();
1037 exit->e = e;
1038
1039 exit->next = aloop->exits->next;
1040 exit->prev = aloop->exits;
1041 exit->next->prev = exit;
1042 exit->prev->next = exit;
1043
1044 exit->next_e = exits;
1045 exits = exit;
1046 }
1047 }
1048
1049 if (!exits && new_edge)
1050 return;
1051
1052 loop_exit **slot
1053 = current_loops->exits->find_slot_with_hash (e, htab_hash_pointer (e),
1054 exits ? INSERT : NO_INSERT);
1055 if (!slot)
1056 return;
1057
1058 if (exits)
1059 {
1060 if (*slot)
1061 loop_exit_hasher::remove (*slot);
1062 *slot = exits;
1063 }
1064 else
1065 current_loops->exits->clear_slot (slot);
1066 }
1067
1068 /* For each loop, record list of exit edges, and start maintaining these
1069 lists. */
1070
1071 void
1072 record_loop_exits (void)
1073 {
1074 basic_block bb;
1075 edge_iterator ei;
1076 edge e;
1077
1078 if (!current_loops)
1079 return;
1080
1081 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1082 return;
1083 loops_state_set (LOOPS_HAVE_RECORDED_EXITS);
1084
1085 gcc_assert (current_loops->exits == NULL);
1086 current_loops->exits
1087 = hash_table<loop_exit_hasher>::create_ggc (2 * number_of_loops (cfun));
1088
1089 FOR_EACH_BB_FN (bb, cfun)
1090 {
1091 FOR_EACH_EDGE (e, ei, bb->succs)
1092 {
1093 rescan_loop_exit (e, true, false);
1094 }
1095 }
1096 }
1097
1098 /* Dumps information about the exit in *SLOT to FILE.
1099 Callback for htab_traverse. */
1100
1101 int
1102 dump_recorded_exit (loop_exit **slot, FILE *file)
1103 {
1104 struct loop_exit *exit = *slot;
1105 unsigned n = 0;
1106 edge e = exit->e;
1107
1108 for (; exit != NULL; exit = exit->next_e)
1109 n++;
1110
1111 fprintf (file, "Edge %d->%d exits %u loops\n",
1112 e->src->index, e->dest->index, n);
1113
1114 return 1;
1115 }
1116
1117 /* Dumps the recorded exits of loops to FILE. */
1118
1119 extern void dump_recorded_exits (FILE *);
1120 void
1121 dump_recorded_exits (FILE *file)
1122 {
1123 if (!current_loops->exits)
1124 return;
1125 current_loops->exits->traverse<FILE *, dump_recorded_exit> (file);
1126 }
1127
1128 /* Releases lists of loop exits. */
1129
1130 void
1131 release_recorded_exits (void)
1132 {
1133 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS));
1134 current_loops->exits->empty ();
1135 current_loops->exits = NULL;
1136 loops_state_clear (LOOPS_HAVE_RECORDED_EXITS);
1137 }
1138
1139 /* Returns the list of the exit edges of a LOOP. */
1140
1141 vec<edge>
1142 get_loop_exit_edges (const struct loop *loop)
1143 {
1144 vec<edge> edges = vNULL;
1145 edge e;
1146 unsigned i;
1147 basic_block *body;
1148 edge_iterator ei;
1149 struct loop_exit *exit;
1150
1151 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
1152
1153 /* If we maintain the lists of exits, use them. Otherwise we must
1154 scan the body of the loop. */
1155 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1156 {
1157 for (exit = loop->exits->next; exit->e; exit = exit->next)
1158 edges.safe_push (exit->e);
1159 }
1160 else
1161 {
1162 body = get_loop_body (loop);
1163 for (i = 0; i < loop->num_nodes; i++)
1164 FOR_EACH_EDGE (e, ei, body[i]->succs)
1165 {
1166 if (!flow_bb_inside_loop_p (loop, e->dest))
1167 edges.safe_push (e);
1168 }
1169 free (body);
1170 }
1171
1172 return edges;
1173 }
1174
1175 /* Counts the number of conditional branches inside LOOP. */
1176
1177 unsigned
1178 num_loop_branches (const struct loop *loop)
1179 {
1180 unsigned i, n;
1181 basic_block * body;
1182
1183 gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
1184
1185 body = get_loop_body (loop);
1186 n = 0;
1187 for (i = 0; i < loop->num_nodes; i++)
1188 if (EDGE_COUNT (body[i]->succs) >= 2)
1189 n++;
1190 free (body);
1191
1192 return n;
1193 }
1194
1195 /* Adds basic block BB to LOOP. */
1196 void
1197 add_bb_to_loop (basic_block bb, struct loop *loop)
1198 {
1199 unsigned i;
1200 loop_p ploop;
1201 edge_iterator ei;
1202 edge e;
1203
1204 gcc_assert (bb->loop_father == NULL);
1205 bb->loop_father = loop;
1206 loop->num_nodes++;
1207 FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
1208 ploop->num_nodes++;
1209
1210 FOR_EACH_EDGE (e, ei, bb->succs)
1211 {
1212 rescan_loop_exit (e, true, false);
1213 }
1214 FOR_EACH_EDGE (e, ei, bb->preds)
1215 {
1216 rescan_loop_exit (e, true, false);
1217 }
1218 }
1219
1220 /* Remove basic block BB from loops. */
1221 void
1222 remove_bb_from_loops (basic_block bb)
1223 {
1224 unsigned i;
1225 struct loop *loop = bb->loop_father;
1226 loop_p ploop;
1227 edge_iterator ei;
1228 edge e;
1229
1230 gcc_assert (loop != NULL);
1231 loop->num_nodes--;
1232 FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
1233 ploop->num_nodes--;
1234 bb->loop_father = NULL;
1235
1236 FOR_EACH_EDGE (e, ei, bb->succs)
1237 {
1238 rescan_loop_exit (e, false, true);
1239 }
1240 FOR_EACH_EDGE (e, ei, bb->preds)
1241 {
1242 rescan_loop_exit (e, false, true);
1243 }
1244 }
1245
1246 /* Finds nearest common ancestor in loop tree for given loops. */
1247 struct loop *
1248 find_common_loop (struct loop *loop_s, struct loop *loop_d)
1249 {
1250 unsigned sdepth, ddepth;
1251
1252 if (!loop_s) return loop_d;
1253 if (!loop_d) return loop_s;
1254
1255 sdepth = loop_depth (loop_s);
1256 ddepth = loop_depth (loop_d);
1257
1258 if (sdepth < ddepth)
1259 loop_d = (*loop_d->superloops)[sdepth];
1260 else if (sdepth > ddepth)
1261 loop_s = (*loop_s->superloops)[ddepth];
1262
1263 while (loop_s != loop_d)
1264 {
1265 loop_s = loop_outer (loop_s);
1266 loop_d = loop_outer (loop_d);
1267 }
1268 return loop_s;
1269 }
1270
1271 /* Removes LOOP from structures and frees its data. */
1272
1273 void
1274 delete_loop (struct loop *loop)
1275 {
1276 /* Remove the loop from structure. */
1277 flow_loop_tree_node_remove (loop);
1278
1279 /* Remove loop from loops array. */
1280 (*current_loops->larray)[loop->num] = NULL;
1281
1282 /* Free loop data. */
1283 flow_loop_free (loop);
1284 }
1285
1286 /* Cancels the LOOP; it must be innermost one. */
1287
1288 static void
1289 cancel_loop (struct loop *loop)
1290 {
1291 basic_block *bbs;
1292 unsigned i;
1293 struct loop *outer = loop_outer (loop);
1294
1295 gcc_assert (!loop->inner);
1296
1297 /* Move blocks up one level (they should be removed as soon as possible). */
1298 bbs = get_loop_body (loop);
1299 for (i = 0; i < loop->num_nodes; i++)
1300 bbs[i]->loop_father = outer;
1301
1302 free (bbs);
1303 delete_loop (loop);
1304 }
1305
1306 /* Cancels LOOP and all its subloops. */
1307 void
1308 cancel_loop_tree (struct loop *loop)
1309 {
1310 while (loop->inner)
1311 cancel_loop_tree (loop->inner);
1312 cancel_loop (loop);
1313 }
1314
1315 /* Checks that information about loops is correct
1316 -- sizes of loops are all right
1317 -- results of get_loop_body really belong to the loop
1318 -- loop header have just single entry edge and single latch edge
1319 -- loop latches have only single successor that is header of their loop
1320 -- irreducible loops are correctly marked
1321 -- the cached loop depth and loop father of each bb is correct
1322 */
1323 DEBUG_FUNCTION void
1324 verify_loop_structure (void)
1325 {
1326 unsigned *sizes, i, j;
1327 sbitmap irreds;
1328 basic_block bb, *bbs;
1329 struct loop *loop;
1330 int err = 0;
1331 edge e;
1332 unsigned num = number_of_loops (cfun);
1333 struct loop_exit *exit, *mexit;
1334 bool dom_available = dom_info_available_p (CDI_DOMINATORS);
1335 sbitmap visited;
1336
1337 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1338 {
1339 error ("loop verification on loop tree that needs fixup");
1340 err = 1;
1341 }
1342
1343 /* We need up-to-date dominators, compute or verify them. */
1344 if (!dom_available)
1345 calculate_dominance_info (CDI_DOMINATORS);
1346 else
1347 verify_dominators (CDI_DOMINATORS);
1348
1349 /* Check the loop tree root. */
1350 if (current_loops->tree_root->header != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1351 || current_loops->tree_root->latch != EXIT_BLOCK_PTR_FOR_FN (cfun)
1352 || (current_loops->tree_root->num_nodes
1353 != (unsigned) n_basic_blocks_for_fn (cfun)))
1354 {
1355 error ("corrupt loop tree root");
1356 err = 1;
1357 }
1358
1359 /* Check the headers. */
1360 FOR_EACH_BB_FN (bb, cfun)
1361 if (bb_loop_header_p (bb))
1362 {
1363 if (bb->loop_father->header == NULL)
1364 {
1365 error ("loop with header %d marked for removal", bb->index);
1366 err = 1;
1367 }
1368 else if (bb->loop_father->header != bb)
1369 {
1370 error ("loop with header %d not in loop tree", bb->index);
1371 err = 1;
1372 }
1373 }
1374 else if (bb->loop_father->header == bb)
1375 {
1376 error ("non-loop with header %d not marked for removal", bb->index);
1377 err = 1;
1378 }
1379
1380 /* Check the recorded loop father and sizes of loops. */
1381 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
1382 bitmap_clear (visited);
1383 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1384 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
1385 {
1386 unsigned n;
1387
1388 if (loop->header == NULL)
1389 {
1390 error ("removed loop %d in loop tree", loop->num);
1391 err = 1;
1392 continue;
1393 }
1394
1395 n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
1396 if (loop->num_nodes != n)
1397 {
1398 error ("size of loop %d should be %d, not %d",
1399 loop->num, n, loop->num_nodes);
1400 err = 1;
1401 }
1402
1403 for (j = 0; j < n; j++)
1404 {
1405 bb = bbs[j];
1406
1407 if (!flow_bb_inside_loop_p (loop, bb))
1408 {
1409 error ("bb %d does not belong to loop %d",
1410 bb->index, loop->num);
1411 err = 1;
1412 }
1413
1414 /* Ignore this block if it is in an inner loop. */
1415 if (bitmap_bit_p (visited, bb->index))
1416 continue;
1417 bitmap_set_bit (visited, bb->index);
1418
1419 if (bb->loop_father != loop)
1420 {
1421 error ("bb %d has father loop %d, should be loop %d",
1422 bb->index, bb->loop_father->num, loop->num);
1423 err = 1;
1424 }
1425 }
1426 }
1427 free (bbs);
1428 sbitmap_free (visited);
1429
1430 /* Check headers and latches. */
1431 FOR_EACH_LOOP (loop, 0)
1432 {
1433 i = loop->num;
1434 if (loop->header == NULL)
1435 continue;
1436 if (!bb_loop_header_p (loop->header))
1437 {
1438 error ("loop %d%'s header is not a loop header", i);
1439 err = 1;
1440 }
1441 if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS)
1442 && EDGE_COUNT (loop->header->preds) != 2)
1443 {
1444 error ("loop %d%'s header does not have exactly 2 entries", i);
1445 err = 1;
1446 }
1447 if (loop->latch)
1448 {
1449 if (!find_edge (loop->latch, loop->header))
1450 {
1451 error ("loop %d%'s latch does not have an edge to its header", i);
1452 err = 1;
1453 }
1454 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, loop->header))
1455 {
1456 error ("loop %d%'s latch is not dominated by its header", i);
1457 err = 1;
1458 }
1459 }
1460 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
1461 {
1462 if (!single_succ_p (loop->latch))
1463 {
1464 error ("loop %d%'s latch does not have exactly 1 successor", i);
1465 err = 1;
1466 }
1467 if (single_succ (loop->latch) != loop->header)
1468 {
1469 error ("loop %d%'s latch does not have header as successor", i);
1470 err = 1;
1471 }
1472 if (loop->latch->loop_father != loop)
1473 {
1474 error ("loop %d%'s latch does not belong directly to it", i);
1475 err = 1;
1476 }
1477 }
1478 if (loop->header->loop_father != loop)
1479 {
1480 error ("loop %d%'s header does not belong directly to it", i);
1481 err = 1;
1482 }
1483 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
1484 && (loop_latch_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP))
1485 {
1486 error ("loop %d%'s latch is marked as part of irreducible region", i);
1487 err = 1;
1488 }
1489 }
1490
1491 /* Check irreducible loops. */
1492 if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
1493 {
1494 /* Record old info. */
1495 irreds = sbitmap_alloc (last_basic_block_for_fn (cfun));
1496 FOR_EACH_BB_FN (bb, cfun)
1497 {
1498 edge_iterator ei;
1499 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1500 bitmap_set_bit (irreds, bb->index);
1501 else
1502 bitmap_clear_bit (irreds, bb->index);
1503 FOR_EACH_EDGE (e, ei, bb->succs)
1504 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
1505 e->flags |= EDGE_ALL_FLAGS + 1;
1506 }
1507
1508 /* Recount it. */
1509 mark_irreducible_loops ();
1510
1511 /* Compare. */
1512 FOR_EACH_BB_FN (bb, cfun)
1513 {
1514 edge_iterator ei;
1515
1516 if ((bb->flags & BB_IRREDUCIBLE_LOOP)
1517 && !bitmap_bit_p (irreds, bb->index))
1518 {
1519 error ("basic block %d should be marked irreducible", bb->index);
1520 err = 1;
1521 }
1522 else if (!(bb->flags & BB_IRREDUCIBLE_LOOP)
1523 && bitmap_bit_p (irreds, bb->index))
1524 {
1525 error ("basic block %d should not be marked irreducible", bb->index);
1526 err = 1;
1527 }
1528 FOR_EACH_EDGE (e, ei, bb->succs)
1529 {
1530 if ((e->flags & EDGE_IRREDUCIBLE_LOOP)
1531 && !(e->flags & (EDGE_ALL_FLAGS + 1)))
1532 {
1533 error ("edge from %d to %d should be marked irreducible",
1534 e->src->index, e->dest->index);
1535 err = 1;
1536 }
1537 else if (!(e->flags & EDGE_IRREDUCIBLE_LOOP)
1538 && (e->flags & (EDGE_ALL_FLAGS + 1)))
1539 {
1540 error ("edge from %d to %d should not be marked irreducible",
1541 e->src->index, e->dest->index);
1542 err = 1;
1543 }
1544 e->flags &= ~(EDGE_ALL_FLAGS + 1);
1545 }
1546 }
1547 free (irreds);
1548 }
1549
1550 /* Check the recorded loop exits. */
1551 FOR_EACH_LOOP (loop, 0)
1552 {
1553 if (!loop->exits || loop->exits->e != NULL)
1554 {
1555 error ("corrupted head of the exits list of loop %d",
1556 loop->num);
1557 err = 1;
1558 }
1559 else
1560 {
1561 /* Check that the list forms a cycle, and all elements except
1562 for the head are nonnull. */
1563 for (mexit = loop->exits, exit = mexit->next, i = 0;
1564 exit->e && exit != mexit;
1565 exit = exit->next)
1566 {
1567 if (i++ & 1)
1568 mexit = mexit->next;
1569 }
1570
1571 if (exit != loop->exits)
1572 {
1573 error ("corrupted exits list of loop %d", loop->num);
1574 err = 1;
1575 }
1576 }
1577
1578 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1579 {
1580 if (loop->exits->next != loop->exits)
1581 {
1582 error ("nonempty exits list of loop %d, but exits are not recorded",
1583 loop->num);
1584 err = 1;
1585 }
1586 }
1587 }
1588
1589 if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1590 {
1591 unsigned n_exits = 0, eloops;
1592
1593 sizes = XCNEWVEC (unsigned, num);
1594 memset (sizes, 0, sizeof (unsigned) * num);
1595 FOR_EACH_BB_FN (bb, cfun)
1596 {
1597 edge_iterator ei;
1598 if (bb->loop_father == current_loops->tree_root)
1599 continue;
1600 FOR_EACH_EDGE (e, ei, bb->succs)
1601 {
1602 if (flow_bb_inside_loop_p (bb->loop_father, e->dest))
1603 continue;
1604
1605 n_exits++;
1606 exit = get_exit_descriptions (e);
1607 if (!exit)
1608 {
1609 error ("exit %d->%d not recorded",
1610 e->src->index, e->dest->index);
1611 err = 1;
1612 }
1613 eloops = 0;
1614 for (; exit; exit = exit->next_e)
1615 eloops++;
1616
1617 for (loop = bb->loop_father;
1618 loop != e->dest->loop_father
1619 /* When a loop exit is also an entry edge which
1620 can happen when avoiding CFG manipulations
1621 then the last loop exited is the outer loop
1622 of the loop entered. */
1623 && loop != loop_outer (e->dest->loop_father);
1624 loop = loop_outer (loop))
1625 {
1626 eloops--;
1627 sizes[loop->num]++;
1628 }
1629
1630 if (eloops != 0)
1631 {
1632 error ("wrong list of exited loops for edge %d->%d",
1633 e->src->index, e->dest->index);
1634 err = 1;
1635 }
1636 }
1637 }
1638
1639 if (n_exits != current_loops->exits->elements ())
1640 {
1641 error ("too many loop exits recorded");
1642 err = 1;
1643 }
1644
1645 FOR_EACH_LOOP (loop, 0)
1646 {
1647 eloops = 0;
1648 for (exit = loop->exits->next; exit->e; exit = exit->next)
1649 eloops++;
1650 if (eloops != sizes[loop->num])
1651 {
1652 error ("%d exits recorded for loop %d (having %d exits)",
1653 eloops, loop->num, sizes[loop->num]);
1654 err = 1;
1655 }
1656 }
1657
1658 free (sizes);
1659 }
1660
1661 gcc_assert (!err);
1662
1663 if (!dom_available)
1664 free_dominance_info (CDI_DOMINATORS);
1665 }
1666
1667 /* Returns latch edge of LOOP. */
1668 edge
1669 loop_latch_edge (const struct loop *loop)
1670 {
1671 return find_edge (loop->latch, loop->header);
1672 }
1673
1674 /* Returns preheader edge of LOOP. */
1675 edge
1676 loop_preheader_edge (const struct loop *loop)
1677 {
1678 edge e;
1679 edge_iterator ei;
1680
1681 gcc_assert (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS));
1682
1683 FOR_EACH_EDGE (e, ei, loop->header->preds)
1684 if (e->src != loop->latch)
1685 break;
1686
1687 return e;
1688 }
1689
1690 /* Returns true if E is an exit of LOOP. */
1691
1692 bool
1693 loop_exit_edge_p (const struct loop *loop, const_edge e)
1694 {
1695 return (flow_bb_inside_loop_p (loop, e->src)
1696 && !flow_bb_inside_loop_p (loop, e->dest));
1697 }
1698
1699 /* Returns the single exit edge of LOOP, or NULL if LOOP has either no exit
1700 or more than one exit. If loops do not have the exits recorded, NULL
1701 is returned always. */
1702
1703 edge
1704 single_exit (const struct loop *loop)
1705 {
1706 struct loop_exit *exit = loop->exits->next;
1707
1708 if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1709 return NULL;
1710
1711 if (exit->e && exit->next == loop->exits)
1712 return exit->e;
1713 else
1714 return NULL;
1715 }
1716
1717 /* Returns true when BB has an incoming edge exiting LOOP. */
1718
1719 bool
1720 loop_exits_to_bb_p (struct loop *loop, basic_block bb)
1721 {
1722 edge e;
1723 edge_iterator ei;
1724
1725 FOR_EACH_EDGE (e, ei, bb->preds)
1726 if (loop_exit_edge_p (loop, e))
1727 return true;
1728
1729 return false;
1730 }
1731
1732 /* Returns true when BB has an outgoing edge exiting LOOP. */
1733
1734 bool
1735 loop_exits_from_bb_p (struct loop *loop, basic_block bb)
1736 {
1737 edge e;
1738 edge_iterator ei;
1739
1740 FOR_EACH_EDGE (e, ei, bb->succs)
1741 if (loop_exit_edge_p (loop, e))
1742 return true;
1743
1744 return false;
1745 }
1746
1747 /* Return location corresponding to the loop control condition if possible. */
1748
1749 location_t
1750 get_loop_location (struct loop *loop)
1751 {
1752 rtx_insn *insn = NULL;
1753 struct niter_desc *desc = NULL;
1754 edge exit;
1755
1756 /* For a for or while loop, we would like to return the location
1757 of the for or while statement, if possible. To do this, look
1758 for the branch guarding the loop back-edge. */
1759
1760 /* If this is a simple loop with an in_edge, then the loop control
1761 branch is typically at the end of its source. */
1762 desc = get_simple_loop_desc (loop);
1763 if (desc->in_edge)
1764 {
1765 FOR_BB_INSNS_REVERSE (desc->in_edge->src, insn)
1766 {
1767 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1768 return INSN_LOCATION (insn);
1769 }
1770 }
1771 /* If loop has a single exit, then the loop control branch
1772 must be at the end of its source. */
1773 if ((exit = single_exit (loop)))
1774 {
1775 FOR_BB_INSNS_REVERSE (exit->src, insn)
1776 {
1777 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1778 return INSN_LOCATION (insn);
1779 }
1780 }
1781 /* Next check the latch, to see if it is non-empty. */
1782 FOR_BB_INSNS_REVERSE (loop->latch, insn)
1783 {
1784 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1785 return INSN_LOCATION (insn);
1786 }
1787 /* Finally, if none of the above identifies the loop control branch,
1788 return the first location in the loop header. */
1789 FOR_BB_INSNS (loop->header, insn)
1790 {
1791 if (INSN_P (insn) && INSN_HAS_LOCATION (insn))
1792 return INSN_LOCATION (insn);
1793 }
1794 /* If all else fails, simply return the current function location. */
1795 return DECL_SOURCE_LOCATION (current_function_decl);
1796 }
1797
1798 /* Records that every statement in LOOP is executed I_BOUND times.
1799 REALISTIC is true if I_BOUND is expected to be close to the real number
1800 of iterations. UPPER is true if we are sure the loop iterates at most
1801 I_BOUND times. */
1802
1803 void
1804 record_niter_bound (struct loop *loop, const widest_int &i_bound,
1805 bool realistic, bool upper)
1806 {
1807 /* Update the bounds only when there is no previous estimation, or when the
1808 current estimation is smaller. */
1809 if (upper
1810 && (!loop->any_upper_bound
1811 || wi::ltu_p (i_bound, loop->nb_iterations_upper_bound)))
1812 {
1813 loop->any_upper_bound = true;
1814 loop->nb_iterations_upper_bound = i_bound;
1815 }
1816 if (realistic
1817 && (!loop->any_estimate
1818 || wi::ltu_p (i_bound, loop->nb_iterations_estimate)))
1819 {
1820 loop->any_estimate = true;
1821 loop->nb_iterations_estimate = i_bound;
1822 }
1823
1824 /* If an upper bound is smaller than the realistic estimate of the
1825 number of iterations, use the upper bound instead. */
1826 if (loop->any_upper_bound
1827 && loop->any_estimate
1828 && wi::ltu_p (loop->nb_iterations_upper_bound,
1829 loop->nb_iterations_estimate))
1830 loop->nb_iterations_estimate = loop->nb_iterations_upper_bound;
1831 }
1832
1833 /* Similar to get_estimated_loop_iterations, but returns the estimate only
1834 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1835 on the number of iterations of LOOP could not be derived, returns -1. */
1836
1837 HOST_WIDE_INT
1838 get_estimated_loop_iterations_int (struct loop *loop)
1839 {
1840 widest_int nit;
1841 HOST_WIDE_INT hwi_nit;
1842
1843 if (!get_estimated_loop_iterations (loop, &nit))
1844 return -1;
1845
1846 if (!wi::fits_shwi_p (nit))
1847 return -1;
1848 hwi_nit = nit.to_shwi ();
1849
1850 return hwi_nit < 0 ? -1 : hwi_nit;
1851 }
1852
1853 /* Returns an upper bound on the number of executions of statements
1854 in the LOOP. For statements before the loop exit, this exceeds
1855 the number of execution of the latch by one. */
1856
1857 HOST_WIDE_INT
1858 max_stmt_executions_int (struct loop *loop)
1859 {
1860 HOST_WIDE_INT nit = get_max_loop_iterations_int (loop);
1861 HOST_WIDE_INT snit;
1862
1863 if (nit == -1)
1864 return -1;
1865
1866 snit = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) nit + 1);
1867
1868 /* If the computation overflows, return -1. */
1869 return snit < 0 ? -1 : snit;
1870 }
1871
1872 /* Sets NIT to the estimated number of executions of the latch of the
1873 LOOP. If we have no reliable estimate, the function returns false, otherwise
1874 returns true. */
1875
1876 bool
1877 get_estimated_loop_iterations (struct loop *loop, widest_int *nit)
1878 {
1879 /* Even if the bound is not recorded, possibly we can derrive one from
1880 profile. */
1881 if (!loop->any_estimate)
1882 {
1883 if (loop->header->count)
1884 {
1885 *nit = gcov_type_to_wide_int
1886 (expected_loop_iterations_unbounded (loop) + 1);
1887 return true;
1888 }
1889 return false;
1890 }
1891
1892 *nit = loop->nb_iterations_estimate;
1893 return true;
1894 }
1895
1896 /* Sets NIT to an upper bound for the maximum number of executions of the
1897 latch of the LOOP. If we have no reliable estimate, the function returns
1898 false, otherwise returns true. */
1899
1900 bool
1901 get_max_loop_iterations (struct loop *loop, widest_int *nit)
1902 {
1903 if (!loop->any_upper_bound)
1904 return false;
1905
1906 *nit = loop->nb_iterations_upper_bound;
1907 return true;
1908 }
1909
1910 /* Similar to get_max_loop_iterations, but returns the estimate only
1911 if it fits to HOST_WIDE_INT. If this is not the case, or the estimate
1912 on the number of iterations of LOOP could not be derived, returns -1. */
1913
1914 HOST_WIDE_INT
1915 get_max_loop_iterations_int (struct loop *loop)
1916 {
1917 widest_int nit;
1918 HOST_WIDE_INT hwi_nit;
1919
1920 if (!get_max_loop_iterations (loop, &nit))
1921 return -1;
1922
1923 if (!wi::fits_shwi_p (nit))
1924 return -1;
1925 hwi_nit = nit.to_shwi ();
1926
1927 return hwi_nit < 0 ? -1 : hwi_nit;
1928 }
1929
1930 /* Returns the loop depth of the loop BB belongs to. */
1931
1932 int
1933 bb_loop_depth (const_basic_block bb)
1934 {
1935 return bb->loop_father ? loop_depth (bb->loop_father) : 0;
1936 }
1937
1938 /* Marks LOOP for removal and sets LOOPS_NEED_FIXUP. */
1939
1940 void
1941 mark_loop_for_removal (loop_p loop)
1942 {
1943 if (loop->header == NULL)
1944 return;
1945 loop->former_header = loop->header;
1946 loop->header = NULL;
1947 loop->latch = NULL;
1948 loops_state_set (LOOPS_NEED_FIXUP);
1949 }