Makefile.in (tree-optimize.o): Add CFGLOOP_H dependence.
[gcc.git] / gcc / cfgloopmanip.c
1 /* Loop manipulation code for GNU compiler.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "obstack.h"
28 #include "basic-block.h"
29 #include "cfgloop.h"
30 #include "cfglayout.h"
31 #include "output.h"
32
33 static void duplicate_subloops (struct loops *, struct loop *, struct loop *);
34 static void copy_loops_to (struct loops *, struct loop **, int,
35 struct loop *);
36 static void loop_redirect_edge (edge, basic_block);
37 static bool loop_delete_branch_edge (edge, int);
38 static void remove_bbs (basic_block *, int);
39 static bool rpe_enum_p (basic_block, void *);
40 static int find_path (edge, basic_block **);
41 static bool alp_enum_p (basic_block, void *);
42 static void add_loop (struct loops *, struct loop *);
43 static void fix_loop_placements (struct loops *, struct loop *);
44 static bool fix_bb_placement (struct loops *, basic_block);
45 static void fix_bb_placements (struct loops *, basic_block);
46 static void place_new_loop (struct loops *, struct loop *);
47 static void scale_loop_frequencies (struct loop *, int, int);
48 static void scale_bbs_frequencies (basic_block *, int, int, int);
49 static basic_block create_preheader (struct loop *, int);
50 static void fix_irreducible_loops (basic_block);
51 static void unloop (struct loops *, struct loop *);
52
53 #define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
54
55 /* Splits basic block BB after INSN, returns created edge. Updates loops
56 and dominators. */
57 edge
58 split_loop_bb (basic_block bb, void *insn)
59 {
60 edge e;
61
62 /* Split the block. */
63 e = split_block (bb, insn);
64
65 /* Add dest to loop. */
66 add_bb_to_loop (e->dest, e->src->loop_father);
67
68 return e;
69 }
70
71 /* Checks whether basic block BB is dominated by DATA. */
72 static bool
73 rpe_enum_p (basic_block bb, void *data)
74 {
75 return dominated_by_p (CDI_DOMINATORS, bb, data);
76 }
77
78 /* Remove basic blocks BBS from loop structure and dominance info,
79 and delete them afterwards. */
80 static void
81 remove_bbs (basic_block *bbs, int nbbs)
82 {
83 int i;
84
85 for (i = 0; i < nbbs; i++)
86 {
87 remove_bb_from_loops (bbs[i]);
88 delete_basic_block (bbs[i]);
89 }
90 }
91
92 /* Find path -- i.e. the basic blocks dominated by edge E and put them
93 into array BBS, that will be allocated large enough to contain them.
94 E->dest must have exactly one predecessor for this to work (it is
95 easy to achieve and we do not put it here because we do not want to
96 alter anything by this function). The number of basic blocks in the
97 path is returned. */
98 static int
99 find_path (edge e, basic_block **bbs)
100 {
101 gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
102
103 /* Find bbs in the path. */
104 *bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
105 return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
106 n_basic_blocks, e->dest);
107 }
108
109 /* Fix placement of basic block BB inside loop hierarchy stored in LOOPS --
110 Let L be a loop to that BB belongs. Then every successor of BB must either
111 1) belong to some superloop of loop L, or
112 2) be a header of loop K such that K->outer is superloop of L
113 Returns true if we had to move BB into other loop to enforce this condition,
114 false if the placement of BB was already correct (provided that placements
115 of its successors are correct). */
116 static bool
117 fix_bb_placement (struct loops *loops, basic_block bb)
118 {
119 edge e;
120 edge_iterator ei;
121 struct loop *loop = loops->tree_root, *act;
122
123 FOR_EACH_EDGE (e, ei, bb->succs)
124 {
125 if (e->dest == EXIT_BLOCK_PTR)
126 continue;
127
128 act = e->dest->loop_father;
129 if (act->header == e->dest)
130 act = act->outer;
131
132 if (flow_loop_nested_p (loop, act))
133 loop = act;
134 }
135
136 if (loop == bb->loop_father)
137 return false;
138
139 remove_bb_from_loops (bb);
140 add_bb_to_loop (bb, loop);
141
142 return true;
143 }
144
145 /* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
146 enforce condition condition stated in description of fix_bb_placement. We
147 start from basic block FROM that had some of its successors removed, so that
148 his placement no longer has to be correct, and iteratively fix placement of
149 its predecessors that may change if placement of FROM changed. Also fix
150 placement of subloops of FROM->loop_father, that might also be altered due
151 to this change; the condition for them is similar, except that instead of
152 successors we consider edges coming out of the loops. */
153 static void
154 fix_bb_placements (struct loops *loops, basic_block from)
155 {
156 sbitmap in_queue;
157 basic_block *queue, *qtop, *qbeg, *qend;
158 struct loop *base_loop;
159 edge e;
160
161 /* We pass through blocks back-reachable from FROM, testing whether some
162 of their successors moved to outer loop. It may be necessary to
163 iterate several times, but it is finite, as we stop unless we move
164 the basic block up the loop structure. The whole story is a bit
165 more complicated due to presence of subloops, those are moved using
166 fix_loop_placement. */
167
168 base_loop = from->loop_father;
169 if (base_loop == loops->tree_root)
170 return;
171
172 in_queue = sbitmap_alloc (last_basic_block);
173 sbitmap_zero (in_queue);
174 SET_BIT (in_queue, from->index);
175 /* Prevent us from going out of the base_loop. */
176 SET_BIT (in_queue, base_loop->header->index);
177
178 queue = xmalloc ((base_loop->num_nodes + 1) * sizeof (basic_block));
179 qtop = queue + base_loop->num_nodes + 1;
180 qbeg = queue;
181 qend = queue + 1;
182 *qbeg = from;
183
184 while (qbeg != qend)
185 {
186 edge_iterator ei;
187 from = *qbeg;
188 qbeg++;
189 if (qbeg == qtop)
190 qbeg = queue;
191 RESET_BIT (in_queue, from->index);
192
193 if (from->loop_father->header == from)
194 {
195 /* Subloop header, maybe move the loop upward. */
196 if (!fix_loop_placement (from->loop_father))
197 continue;
198 }
199 else
200 {
201 /* Ordinary basic block. */
202 if (!fix_bb_placement (loops, from))
203 continue;
204 }
205
206 /* Something has changed, insert predecessors into queue. */
207 FOR_EACH_EDGE (e, ei, from->preds)
208 {
209 basic_block pred = e->src;
210 struct loop *nca;
211
212 if (TEST_BIT (in_queue, pred->index))
213 continue;
214
215 /* If it is subloop, then it either was not moved, or
216 the path up the loop tree from base_loop do not contain
217 it. */
218 nca = find_common_loop (pred->loop_father, base_loop);
219 if (pred->loop_father != base_loop
220 && (nca == base_loop
221 || nca != pred->loop_father))
222 pred = pred->loop_father->header;
223 else if (!flow_loop_nested_p (from->loop_father, pred->loop_father))
224 {
225 /* No point in processing it. */
226 continue;
227 }
228
229 if (TEST_BIT (in_queue, pred->index))
230 continue;
231
232 /* Schedule the basic block. */
233 *qend = pred;
234 qend++;
235 if (qend == qtop)
236 qend = queue;
237 SET_BIT (in_queue, pred->index);
238 }
239 }
240 free (in_queue);
241 free (queue);
242 }
243
244 /* Basic block from has lost one or more of its predecessors, so it might
245 mo longer be part irreducible loop. Fix it and proceed recursively
246 for its successors if needed. */
247 static void
248 fix_irreducible_loops (basic_block from)
249 {
250 basic_block bb;
251 basic_block *stack;
252 int stack_top;
253 sbitmap on_stack;
254 edge *edges, e;
255 unsigned n_edges, i;
256
257 if (!(from->flags & BB_IRREDUCIBLE_LOOP))
258 return;
259
260 on_stack = sbitmap_alloc (last_basic_block);
261 sbitmap_zero (on_stack);
262 SET_BIT (on_stack, from->index);
263 stack = xmalloc (from->loop_father->num_nodes * sizeof (basic_block));
264 stack[0] = from;
265 stack_top = 1;
266
267 while (stack_top)
268 {
269 edge_iterator ei;
270 bb = stack[--stack_top];
271 RESET_BIT (on_stack, bb->index);
272
273 FOR_EACH_EDGE (e, ei, bb->preds)
274 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
275 break;
276 if (e)
277 continue;
278
279 bb->flags &= ~BB_IRREDUCIBLE_LOOP;
280 if (bb->loop_father->header == bb)
281 edges = get_loop_exit_edges (bb->loop_father, &n_edges);
282 else
283 {
284 n_edges = EDGE_COUNT (bb->succs);
285 edges = xmalloc (n_edges * sizeof (edge));
286 FOR_EACH_EDGE (e, ei, bb->succs)
287 edges[ei.index] = e;
288 }
289
290 for (i = 0; i < n_edges; i++)
291 {
292 e = edges[i];
293
294 if (e->flags & EDGE_IRREDUCIBLE_LOOP)
295 {
296 if (!flow_bb_inside_loop_p (from->loop_father, e->dest))
297 continue;
298
299 e->flags &= ~EDGE_IRREDUCIBLE_LOOP;
300 if (TEST_BIT (on_stack, e->dest->index))
301 continue;
302
303 SET_BIT (on_stack, e->dest->index);
304 stack[stack_top++] = e->dest;
305 }
306 }
307 free (edges);
308 }
309
310 free (on_stack);
311 free (stack);
312 }
313
314 /* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
315 and update loop structure stored in LOOPS and dominators. Return true if
316 we were able to remove the path, false otherwise (and nothing is affected
317 then). */
318 bool
319 remove_path (struct loops *loops, edge e)
320 {
321 edge ae;
322 basic_block *rem_bbs, *bord_bbs, *dom_bbs, from, bb;
323 int i, nrem, n_bord_bbs, n_dom_bbs;
324 sbitmap seen;
325 bool deleted;
326
327 if (!loop_delete_branch_edge (e, 0))
328 return false;
329
330 /* We need to check whether basic blocks are dominated by the edge
331 e, but we only have basic block dominators. This is easy to
332 fix -- when e->dest has exactly one predecessor, this corresponds
333 to blocks dominated by e->dest, if not, split the edge. */
334 if (EDGE_COUNT (e->dest->preds) > 1)
335 e = EDGE_PRED (loop_split_edge_with (e, NULL_RTX), 0);
336
337 /* It may happen that by removing path we remove one or more loops
338 we belong to. In this case first unloop the loops, then proceed
339 normally. We may assume that e->dest is not a header of any loop,
340 as it now has exactly one predecessor. */
341 while (e->src->loop_father->outer
342 && dominated_by_p (CDI_DOMINATORS,
343 e->src->loop_father->latch, e->dest))
344 unloop (loops, e->src->loop_father);
345
346 /* Identify the path. */
347 nrem = find_path (e, &rem_bbs);
348
349 n_bord_bbs = 0;
350 bord_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
351 seen = sbitmap_alloc (last_basic_block);
352 sbitmap_zero (seen);
353
354 /* Find "border" hexes -- i.e. those with predecessor in removed path. */
355 for (i = 0; i < nrem; i++)
356 SET_BIT (seen, rem_bbs[i]->index);
357 for (i = 0; i < nrem; i++)
358 {
359 edge_iterator ei;
360 bb = rem_bbs[i];
361 FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
362 if (ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index))
363 {
364 SET_BIT (seen, ae->dest->index);
365 bord_bbs[n_bord_bbs++] = ae->dest;
366 }
367 }
368
369 /* Remove the path. */
370 from = e->src;
371 deleted = loop_delete_branch_edge (e, 1);
372 gcc_assert (deleted);
373 dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
374
375 /* Cancel loops contained in the path. */
376 for (i = 0; i < nrem; i++)
377 if (rem_bbs[i]->loop_father->header == rem_bbs[i])
378 cancel_loop_tree (loops, rem_bbs[i]->loop_father);
379
380 remove_bbs (rem_bbs, nrem);
381 free (rem_bbs);
382
383 /* Find blocks whose dominators may be affected. */
384 n_dom_bbs = 0;
385 sbitmap_zero (seen);
386 for (i = 0; i < n_bord_bbs; i++)
387 {
388 basic_block ldom;
389
390 bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
391 if (TEST_BIT (seen, bb->index))
392 continue;
393 SET_BIT (seen, bb->index);
394
395 for (ldom = first_dom_son (CDI_DOMINATORS, bb);
396 ldom;
397 ldom = next_dom_son (CDI_DOMINATORS, ldom))
398 if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
399 dom_bbs[n_dom_bbs++] = ldom;
400 }
401
402 free (seen);
403
404 /* Recount dominators. */
405 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, n_dom_bbs);
406 free (dom_bbs);
407
408 /* These blocks have lost some predecessor(s), thus their irreducible
409 status could be changed. */
410 for (i = 0; i < n_bord_bbs; i++)
411 fix_irreducible_loops (bord_bbs[i]);
412 free (bord_bbs);
413
414 /* Fix placements of basic blocks inside loops and the placement of
415 loops in the loop tree. */
416 fix_bb_placements (loops, from);
417 fix_loop_placements (loops, from->loop_father);
418
419 return true;
420 }
421
422 /* Predicate for enumeration in add_loop. */
423 static bool
424 alp_enum_p (basic_block bb, void *alp_header)
425 {
426 return bb != (basic_block) alp_header;
427 }
428
429 /* Given LOOP structure with filled header and latch, find the body of the
430 corresponding loop and add it to LOOPS tree. */
431 static void
432 add_loop (struct loops *loops, struct loop *loop)
433 {
434 basic_block *bbs;
435 int i, n;
436
437 /* Add it to loop structure. */
438 place_new_loop (loops, loop);
439 loop->level = 1;
440
441 /* Find its nodes. */
442 bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
443 n = dfs_enumerate_from (loop->latch, 1, alp_enum_p,
444 bbs, n_basic_blocks, loop->header);
445
446 for (i = 0; i < n; i++)
447 add_bb_to_loop (bbs[i], loop);
448 add_bb_to_loop (loop->header, loop);
449
450 free (bbs);
451 }
452
453 /* Multiply all frequencies of basic blocks in array BBS of length NBBS
454 by NUM/DEN. */
455 static void
456 scale_bbs_frequencies (basic_block *bbs, int nbbs, int num, int den)
457 {
458 int i;
459 edge e;
460
461 for (i = 0; i < nbbs; i++)
462 {
463 edge_iterator ei;
464 bbs[i]->frequency = (bbs[i]->frequency * num) / den;
465 bbs[i]->count = RDIV (bbs[i]->count * num, den);
466 FOR_EACH_EDGE (e, ei, bbs[i]->succs)
467 e->count = (e->count * num) /den;
468 }
469 }
470
471 /* Multiply all frequencies in LOOP by NUM/DEN. */
472 static void
473 scale_loop_frequencies (struct loop *loop, int num, int den)
474 {
475 basic_block *bbs;
476
477 bbs = get_loop_body (loop);
478 scale_bbs_frequencies (bbs, loop->num_nodes, num, den);
479 free (bbs);
480 }
481
482 /* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
483 latch to header and update loop tree stored in LOOPS and dominators
484 accordingly. Everything between them plus LATCH_EDGE destination must
485 be dominated by HEADER_EDGE destination, and back-reachable from
486 LATCH_EDGE source. HEADER_EDGE is redirected to basic block SWITCH_BB,
487 FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
488 TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
489 Returns newly created loop. */
490
491 struct loop *
492 loopify (struct loops *loops, edge latch_edge, edge header_edge,
493 basic_block switch_bb, edge true_edge, edge false_edge,
494 bool redirect_all_edges)
495 {
496 basic_block succ_bb = latch_edge->dest;
497 basic_block pred_bb = header_edge->src;
498 basic_block *dom_bbs, *body;
499 unsigned n_dom_bbs, i;
500 sbitmap seen;
501 struct loop *loop = xcalloc (1, sizeof (struct loop));
502 struct loop *outer = succ_bb->loop_father->outer;
503 int freq, prob, tot_prob;
504 gcov_type cnt;
505 edge e;
506 edge_iterator ei;
507
508 loop->header = header_edge->dest;
509 loop->latch = latch_edge->src;
510
511 freq = EDGE_FREQUENCY (header_edge);
512 cnt = header_edge->count;
513 prob = EDGE_SUCC (switch_bb, 0)->probability;
514 tot_prob = prob + EDGE_SUCC (switch_bb, 1)->probability;
515 if (tot_prob == 0)
516 tot_prob = 1;
517
518 /* Redirect edges. */
519 loop_redirect_edge (latch_edge, loop->header);
520 loop_redirect_edge (true_edge, succ_bb);
521
522 /* During loop versioning, one of the switch_bb edge is already properly
523 set. Do not redirect it again unless redirect_all_edges is true. */
524 if (redirect_all_edges)
525 {
526 loop_redirect_edge (header_edge, switch_bb);
527 loop_redirect_edge (false_edge, loop->header);
528
529 /* Update dominators. */
530 set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
531 set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
532 }
533
534 set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
535
536 /* Compute new loop. */
537 add_loop (loops, loop);
538 flow_loop_tree_node_add (outer, loop);
539
540 /* Add switch_bb to appropriate loop. */
541 add_bb_to_loop (switch_bb, outer);
542
543 /* Fix frequencies. */
544 switch_bb->frequency = freq;
545 switch_bb->count = cnt;
546 FOR_EACH_EDGE (e, ei, switch_bb->succs)
547 e->count = (switch_bb->count * e->probability) / REG_BR_PROB_BASE;
548 scale_loop_frequencies (loop, prob, tot_prob);
549 scale_loop_frequencies (succ_bb->loop_father, tot_prob - prob, tot_prob);
550
551 /* Update dominators of blocks outside of LOOP. */
552 dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
553 n_dom_bbs = 0;
554 seen = sbitmap_alloc (last_basic_block);
555 sbitmap_zero (seen);
556 body = get_loop_body (loop);
557
558 for (i = 0; i < loop->num_nodes; i++)
559 SET_BIT (seen, body[i]->index);
560
561 for (i = 0; i < loop->num_nodes; i++)
562 {
563 basic_block ldom;
564
565 for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
566 ldom;
567 ldom = next_dom_son (CDI_DOMINATORS, ldom))
568 if (!TEST_BIT (seen, ldom->index))
569 {
570 SET_BIT (seen, ldom->index);
571 dom_bbs[n_dom_bbs++] = ldom;
572 }
573 }
574
575 iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, n_dom_bbs);
576
577 free (body);
578 free (seen);
579 free (dom_bbs);
580
581 return loop;
582 }
583
584 /* Remove the latch edge of a LOOP and update LOOPS tree to indicate that
585 the LOOP was removed. After this function, original loop latch will
586 have no successor, which caller is expected to fix somehow. */
587 static void
588 unloop (struct loops *loops, struct loop *loop)
589 {
590 basic_block *body;
591 struct loop *ploop;
592 unsigned i, n;
593 basic_block latch = loop->latch;
594 edge *edges;
595 unsigned n_edges;
596
597 /* This is relatively straightforward. The dominators are unchanged, as
598 loop header dominates loop latch, so the only thing we have to care of
599 is the placement of loops and basic blocks inside the loop tree. We
600 move them all to the loop->outer, and then let fix_bb_placements do
601 its work. */
602
603 body = get_loop_body (loop);
604 edges = get_loop_exit_edges (loop, &n_edges);
605 n = loop->num_nodes;
606 for (i = 0; i < n; i++)
607 if (body[i]->loop_father == loop)
608 {
609 remove_bb_from_loops (body[i]);
610 add_bb_to_loop (body[i], loop->outer);
611 }
612 free(body);
613
614 while (loop->inner)
615 {
616 ploop = loop->inner;
617 flow_loop_tree_node_remove (ploop);
618 flow_loop_tree_node_add (loop->outer, ploop);
619 }
620
621 /* Remove the loop and free its data. */
622 flow_loop_tree_node_remove (loop);
623 loops->parray[loop->num] = NULL;
624 flow_loop_free (loop);
625
626 remove_edge (EDGE_SUCC (latch, 0));
627 fix_bb_placements (loops, latch);
628
629 /* If the loop was inside an irreducible region, we would have to somehow
630 update the irreducible marks inside its body. While it is certainly
631 possible to do, it is a bit complicated and this situation should be
632 very rare, so we just remark all loops in this case. */
633 for (i = 0; i < n_edges; i++)
634 if (edges[i]->flags & EDGE_IRREDUCIBLE_LOOP)
635 break;
636 if (i != n_edges)
637 mark_irreducible_loops (loops);
638 free (edges);
639 }
640
641 /* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
642 FATHER of LOOP such that all of the edges coming out of LOOP belong to
643 FATHER, and set it as outer loop of LOOP. Return 1 if placement of
644 LOOP changed. */
645 int
646 fix_loop_placement (struct loop *loop)
647 {
648 basic_block *body;
649 unsigned i;
650 edge e;
651 edge_iterator ei;
652 struct loop *father = loop->pred[0], *act;
653
654 body = get_loop_body (loop);
655 for (i = 0; i < loop->num_nodes; i++)
656 FOR_EACH_EDGE (e, ei, body[i]->succs)
657 if (!flow_bb_inside_loop_p (loop, e->dest))
658 {
659 act = find_common_loop (loop, e->dest->loop_father);
660 if (flow_loop_nested_p (father, act))
661 father = act;
662 }
663 free (body);
664
665 if (father != loop->outer)
666 {
667 for (act = loop->outer; act != father; act = act->outer)
668 act->num_nodes -= loop->num_nodes;
669 flow_loop_tree_node_remove (loop);
670 flow_loop_tree_node_add (father, loop);
671 return 1;
672 }
673 return 0;
674 }
675
676 /* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
677 condition stated in description of fix_loop_placement holds for them.
678 It is used in case when we removed some edges coming out of LOOP, which
679 may cause the right placement of LOOP inside loop tree to change. */
680 static void
681 fix_loop_placements (struct loops *loops, struct loop *loop)
682 {
683 struct loop *outer;
684
685 while (loop->outer)
686 {
687 outer = loop->outer;
688 if (!fix_loop_placement (loop))
689 break;
690
691 /* Changing the placement of a loop in the loop tree may alter the
692 validity of condition 2) of the description of fix_bb_placement
693 for its preheader, because the successor is the header and belongs
694 to the loop. So call fix_bb_placements to fix up the placement
695 of the preheader and (possibly) of its predecessors. */
696 fix_bb_placements (loops, loop_preheader_edge (loop)->src);
697 loop = outer;
698 }
699 }
700
701 /* Creates place for a new LOOP in LOOPS structure. */
702 static void
703 place_new_loop (struct loops *loops, struct loop *loop)
704 {
705 loops->parray =
706 xrealloc (loops->parray, (loops->num + 1) * sizeof (struct loop *));
707 loops->parray[loops->num] = loop;
708
709 loop->num = loops->num++;
710 }
711
712 /* Copies copy of LOOP as subloop of TARGET loop, placing newly
713 created loop into LOOPS structure. */
714 struct loop *
715 duplicate_loop (struct loops *loops, struct loop *loop, struct loop *target)
716 {
717 struct loop *cloop;
718 cloop = xcalloc (1, sizeof (struct loop));
719 place_new_loop (loops, cloop);
720
721 /* Initialize copied loop. */
722 cloop->level = loop->level;
723
724 /* Set it as copy of loop. */
725 loop->copy = cloop;
726
727 /* Add it to target. */
728 flow_loop_tree_node_add (target, cloop);
729
730 return cloop;
731 }
732
733 /* Copies structure of subloops of LOOP into TARGET loop, placing
734 newly created loops into loop tree stored in LOOPS. */
735 static void
736 duplicate_subloops (struct loops *loops, struct loop *loop, struct loop *target)
737 {
738 struct loop *aloop, *cloop;
739
740 for (aloop = loop->inner; aloop; aloop = aloop->next)
741 {
742 cloop = duplicate_loop (loops, aloop, target);
743 duplicate_subloops (loops, aloop, cloop);
744 }
745 }
746
747 /* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
748 into TARGET loop, placing newly created loops into loop tree LOOPS. */
749 static void
750 copy_loops_to (struct loops *loops, struct loop **copied_loops, int n, struct loop *target)
751 {
752 struct loop *aloop;
753 int i;
754
755 for (i = 0; i < n; i++)
756 {
757 aloop = duplicate_loop (loops, copied_loops[i], target);
758 duplicate_subloops (loops, copied_loops[i], aloop);
759 }
760 }
761
762 /* Redirects edge E to basic block DEST. */
763 static void
764 loop_redirect_edge (edge e, basic_block dest)
765 {
766 if (e->dest == dest)
767 return;
768
769 redirect_edge_and_branch_force (e, dest);
770 }
771
772 /* Deletes edge E from a branch if possible. Unless REALLY_DELETE is set,
773 just test whether it is possible to remove the edge. */
774 static bool
775 loop_delete_branch_edge (edge e, int really_delete)
776 {
777 basic_block src = e->src;
778 basic_block newdest;
779 int irr;
780 edge snd;
781
782 gcc_assert (EDGE_COUNT (src->succs) > 1);
783
784 /* Cannot handle more than two exit edges. */
785 if (EDGE_COUNT (src->succs) > 2)
786 return false;
787 /* And it must be just a simple branch. */
788 if (!any_condjump_p (BB_END (src)))
789 return false;
790
791 snd = e == EDGE_SUCC (src, 0) ? EDGE_SUCC (src, 1) : EDGE_SUCC (src, 0);
792 newdest = snd->dest;
793 if (newdest == EXIT_BLOCK_PTR)
794 return false;
795
796 /* Hopefully the above conditions should suffice. */
797 if (!really_delete)
798 return true;
799
800 /* Redirecting behaves wrongly wrto this flag. */
801 irr = snd->flags & EDGE_IRREDUCIBLE_LOOP;
802
803 if (!redirect_edge_and_branch (e, newdest))
804 return false;
805 EDGE_SUCC (src, 0)->flags &= ~EDGE_IRREDUCIBLE_LOOP;
806 EDGE_SUCC (src, 0)->flags |= irr;
807
808 return true;
809 }
810
811 /* Check whether LOOP's body can be duplicated. */
812 bool
813 can_duplicate_loop_p (struct loop *loop)
814 {
815 int ret;
816 basic_block *bbs = get_loop_body (loop);
817
818 ret = can_copy_bbs_p (bbs, loop->num_nodes);
819 free (bbs);
820
821 return ret;
822 }
823
824 /* The NBBS blocks in BBS will get duplicated and the copies will be placed
825 to LOOP. Update the single_exit information in superloops of LOOP. */
826
827 static void
828 update_single_exits_after_duplication (basic_block *bbs, unsigned nbbs,
829 struct loop *loop)
830 {
831 unsigned i;
832
833 for (i = 0; i < nbbs; i++)
834 bbs[i]->rbi->duplicated = 1;
835
836 for (; loop->outer; loop = loop->outer)
837 {
838 if (!loop->single_exit)
839 continue;
840
841 if (loop->single_exit->src->rbi->duplicated)
842 loop->single_exit = NULL;
843 }
844
845 for (i = 0; i < nbbs; i++)
846 bbs[i]->rbi->duplicated = 0;
847 }
848
849 /* Duplicates body of LOOP to given edge E NDUPL times. Takes care of updating
850 LOOPS structure and dominators. E's destination must be LOOP header for
851 this to work, i.e. it must be entry or latch edge of this loop; these are
852 unique, as the loops must have preheaders for this function to work
853 correctly (in case E is latch, the function unrolls the loop, if E is entry
854 edge, it peels the loop). Store edges created by copying ORIG edge from
855 copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
856 original LOOP body, the other copies are numbered in order given by control
857 flow through them) into TO_REMOVE array. Returns false if duplication is
858 impossible. */
859 int
860 duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
861 unsigned int ndupl, sbitmap wont_exit,
862 edge orig, edge *to_remove,
863 unsigned int *n_to_remove, int flags)
864 {
865 struct loop *target, *aloop;
866 struct loop **orig_loops;
867 unsigned n_orig_loops;
868 basic_block header = loop->header, latch = loop->latch;
869 basic_block *new_bbs, *bbs, *first_active;
870 basic_block new_bb, bb, first_active_latch = NULL;
871 edge ae, latch_edge;
872 edge spec_edges[2], new_spec_edges[2];
873 #define SE_LATCH 0
874 #define SE_ORIG 1
875 unsigned i, j, n;
876 int is_latch = (latch == e->src);
877 int scale_act = 0, *scale_step = NULL, scale_main = 0;
878 int p, freq_in, freq_le, freq_out_orig;
879 int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
880 int add_irreducible_flag;
881
882 gcc_assert (e->dest == loop->header);
883 gcc_assert (ndupl > 0);
884
885 if (orig)
886 {
887 /* Orig must be edge out of the loop. */
888 gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
889 gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
890 }
891
892 bbs = get_loop_body (loop);
893
894 /* Check whether duplication is possible. */
895 if (!can_copy_bbs_p (bbs, loop->num_nodes))
896 {
897 free (bbs);
898 return false;
899 }
900 new_bbs = xmalloc (sizeof (basic_block) * loop->num_nodes);
901
902 /* In case we are doing loop peeling and the loop is in the middle of
903 irreducible region, the peeled copies will be inside it too. */
904 add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
905 gcc_assert (!is_latch || !add_irreducible_flag);
906
907 /* Find edge from latch. */
908 latch_edge = loop_latch_edge (loop);
909
910 if (flags & DLTHE_FLAG_UPDATE_FREQ)
911 {
912 /* Calculate coefficients by that we have to scale frequencies
913 of duplicated loop bodies. */
914 freq_in = header->frequency;
915 freq_le = EDGE_FREQUENCY (latch_edge);
916 if (freq_in == 0)
917 freq_in = 1;
918 if (freq_in < freq_le)
919 freq_in = freq_le;
920 freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
921 if (freq_out_orig > freq_in - freq_le)
922 freq_out_orig = freq_in - freq_le;
923 prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
924 prob_pass_wont_exit =
925 RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
926
927 scale_step = xmalloc (ndupl * sizeof (int));
928
929 for (i = 1; i <= ndupl; i++)
930 scale_step[i - 1] = TEST_BIT (wont_exit, i)
931 ? prob_pass_wont_exit
932 : prob_pass_thru;
933
934 if (is_latch)
935 {
936 prob_pass_main = TEST_BIT (wont_exit, 0)
937 ? prob_pass_wont_exit
938 : prob_pass_thru;
939 p = prob_pass_main;
940 scale_main = REG_BR_PROB_BASE;
941 for (i = 0; i < ndupl; i++)
942 {
943 scale_main += p;
944 p = RDIV (p * scale_step[i], REG_BR_PROB_BASE);
945 }
946 scale_main = RDIV (REG_BR_PROB_BASE * REG_BR_PROB_BASE, scale_main);
947 scale_act = RDIV (scale_main * prob_pass_main, REG_BR_PROB_BASE);
948 }
949 else
950 {
951 scale_main = REG_BR_PROB_BASE;
952 for (i = 0; i < ndupl; i++)
953 scale_main = RDIV (scale_main * scale_step[i], REG_BR_PROB_BASE);
954 scale_act = REG_BR_PROB_BASE - prob_pass_thru;
955 }
956 for (i = 0; i < ndupl; i++)
957 gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
958 gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
959 && scale_act >= 0 && scale_act <= REG_BR_PROB_BASE);
960 }
961
962 /* Loop the new bbs will belong to. */
963 target = e->src->loop_father;
964
965 /* Original loops. */
966 n_orig_loops = 0;
967 for (aloop = loop->inner; aloop; aloop = aloop->next)
968 n_orig_loops++;
969 orig_loops = xcalloc (n_orig_loops, sizeof (struct loop *));
970 for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
971 orig_loops[i] = aloop;
972
973 loop->copy = target;
974
975 n = loop->num_nodes;
976
977 first_active = xmalloc (n * sizeof (basic_block));
978 if (is_latch)
979 {
980 memcpy (first_active, bbs, n * sizeof (basic_block));
981 first_active_latch = latch;
982 }
983
984 /* Update the information about single exits. */
985 if (loops->state & LOOPS_HAVE_MARKED_SINGLE_EXITS)
986 update_single_exits_after_duplication (bbs, n, target);
987
988 /* Record exit edge in original loop body. */
989 if (orig && TEST_BIT (wont_exit, 0))
990 to_remove[(*n_to_remove)++] = orig;
991
992 spec_edges[SE_ORIG] = orig;
993 spec_edges[SE_LATCH] = latch_edge;
994
995 for (j = 0; j < ndupl; j++)
996 {
997 /* Copy loops. */
998 copy_loops_to (loops, orig_loops, n_orig_loops, target);
999
1000 /* Copy bbs. */
1001 copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop);
1002
1003 for (i = 0; i < n; i++)
1004 new_bbs[i]->rbi->copy_number = j + 1;
1005
1006 /* Note whether the blocks and edges belong to an irreducible loop. */
1007 if (add_irreducible_flag)
1008 {
1009 for (i = 0; i < n; i++)
1010 new_bbs[i]->rbi->duplicated = 1;
1011 for (i = 0; i < n; i++)
1012 {
1013 edge_iterator ei;
1014 new_bb = new_bbs[i];
1015 if (new_bb->loop_father == target)
1016 new_bb->flags |= BB_IRREDUCIBLE_LOOP;
1017
1018 FOR_EACH_EDGE (ae, ei, new_bb->succs)
1019 if (ae->dest->rbi->duplicated
1020 && (ae->src->loop_father == target
1021 || ae->dest->loop_father == target))
1022 ae->flags |= EDGE_IRREDUCIBLE_LOOP;
1023 }
1024 for (i = 0; i < n; i++)
1025 new_bbs[i]->rbi->duplicated = 0;
1026 }
1027
1028 /* Redirect the special edges. */
1029 if (is_latch)
1030 {
1031 redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
1032 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1033 loop->header);
1034 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
1035 latch = loop->latch = new_bbs[1];
1036 e = latch_edge = new_spec_edges[SE_LATCH];
1037 }
1038 else
1039 {
1040 redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1041 loop->header);
1042 redirect_edge_and_branch_force (e, new_bbs[0]);
1043 set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
1044 e = new_spec_edges[SE_LATCH];
1045 }
1046
1047 /* Record exit edge in this copy. */
1048 if (orig && TEST_BIT (wont_exit, j + 1))
1049 to_remove[(*n_to_remove)++] = new_spec_edges[SE_ORIG];
1050
1051 /* Record the first copy in the control flow order if it is not
1052 the original loop (i.e. in case of peeling). */
1053 if (!first_active_latch)
1054 {
1055 memcpy (first_active, new_bbs, n * sizeof (basic_block));
1056 first_active_latch = new_bbs[1];
1057 }
1058
1059 /* Set counts and frequencies. */
1060 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1061 {
1062 scale_bbs_frequencies (new_bbs, n, scale_act, REG_BR_PROB_BASE);
1063 scale_act = RDIV (scale_act * scale_step[j], REG_BR_PROB_BASE);
1064 }
1065 }
1066 free (new_bbs);
1067 free (orig_loops);
1068
1069 /* Update the original loop. */
1070 if (!is_latch)
1071 set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
1072 if (flags & DLTHE_FLAG_UPDATE_FREQ)
1073 {
1074 scale_bbs_frequencies (bbs, n, scale_main, REG_BR_PROB_BASE);
1075 free (scale_step);
1076 }
1077
1078 /* Update dominators of outer blocks if affected. */
1079 for (i = 0; i < n; i++)
1080 {
1081 basic_block dominated, dom_bb, *dom_bbs;
1082 int n_dom_bbs,j;
1083
1084 bb = bbs[i];
1085 bb->rbi->copy_number = 0;
1086
1087 n_dom_bbs = get_dominated_by (CDI_DOMINATORS, bb, &dom_bbs);
1088 for (j = 0; j < n_dom_bbs; j++)
1089 {
1090 dominated = dom_bbs[j];
1091 if (flow_bb_inside_loop_p (loop, dominated))
1092 continue;
1093 dom_bb = nearest_common_dominator (
1094 CDI_DOMINATORS, first_active[i], first_active_latch);
1095 set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
1096 }
1097 free (dom_bbs);
1098 }
1099 free (first_active);
1100
1101 free (bbs);
1102
1103 return true;
1104 }
1105
1106 /* A callback for make_forwarder block, to redirect all edges except for
1107 MFB_KJ_EDGE to the entry part. E is the edge for that we should decide
1108 whether to redirect it. */
1109
1110 static edge mfb_kj_edge;
1111 static bool
1112 mfb_keep_just (edge e)
1113 {
1114 return e != mfb_kj_edge;
1115 }
1116
1117 /* A callback for make_forwarder block, to update data structures for a basic
1118 block JUMP created by redirecting an edge (only the latch edge is being
1119 redirected). */
1120
1121 static void
1122 mfb_update_loops (basic_block jump)
1123 {
1124 struct loop *loop = EDGE_SUCC (jump, 0)->dest->loop_father;
1125
1126 if (dom_computed[CDI_DOMINATORS])
1127 set_immediate_dominator (CDI_DOMINATORS, jump, EDGE_PRED (jump, 0)->src);
1128 add_bb_to_loop (jump, loop);
1129 loop->latch = jump;
1130 }
1131
1132 /* Creates a pre-header for a LOOP. Returns newly created block. Unless
1133 CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
1134 entry; otherwise we also force preheader block to have only one successor.
1135 The function also updates dominators. */
1136
1137 static basic_block
1138 create_preheader (struct loop *loop, int flags)
1139 {
1140 edge e, fallthru;
1141 basic_block dummy;
1142 struct loop *cloop, *ploop;
1143 int nentry = 0;
1144 bool irred = false;
1145 bool latch_edge_was_fallthru;
1146 edge one_succ_pred = 0;
1147 edge_iterator ei;
1148
1149 cloop = loop->outer;
1150
1151 FOR_EACH_EDGE (e, ei, loop->header->preds)
1152 {
1153 if (e->src == loop->latch)
1154 continue;
1155 irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
1156 nentry++;
1157 if (EDGE_COUNT (e->src->succs) == 1)
1158 one_succ_pred = e;
1159 }
1160 gcc_assert (nentry);
1161 if (nentry == 1)
1162 {
1163 /* Get an edge that is different from the one from loop->latch
1164 to loop->header. */
1165 e = EDGE_PRED (loop->header,
1166 EDGE_PRED (loop->header, 0)->src == loop->latch);
1167
1168 if (!(flags & CP_SIMPLE_PREHEADERS) || EDGE_COUNT (e->src->succs) == 1)
1169 return NULL;
1170 }
1171
1172 mfb_kj_edge = loop_latch_edge (loop);
1173 latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
1174 fallthru = make_forwarder_block (loop->header, mfb_keep_just,
1175 mfb_update_loops);
1176 dummy = fallthru->src;
1177 loop->header = fallthru->dest;
1178
1179 /* The header could be a latch of some superloop(s); due to design of
1180 split_block, it would now move to fallthru->dest. */
1181 for (ploop = loop; ploop; ploop = ploop->outer)
1182 if (ploop->latch == dummy)
1183 ploop->latch = fallthru->dest;
1184
1185 /* Try to be clever in placing the newly created preheader. The idea is to
1186 avoid breaking any "fallthruness" relationship between blocks.
1187
1188 The preheader was created just before the header and all incoming edges
1189 to the header were redirected to the preheader, except the latch edge.
1190 So the only problematic case is when this latch edge was a fallthru
1191 edge: it is not anymore after the preheader creation so we have broken
1192 the fallthruness. We're therefore going to look for a better place. */
1193 if (latch_edge_was_fallthru)
1194 {
1195 if (one_succ_pred)
1196 e = one_succ_pred;
1197 else
1198 e = EDGE_PRED (dummy, 0);
1199
1200 move_block_after (dummy, e->src);
1201 }
1202
1203 loop->header->loop_father = loop;
1204 add_bb_to_loop (dummy, cloop);
1205
1206 if (irred)
1207 {
1208 dummy->flags |= BB_IRREDUCIBLE_LOOP;
1209 EDGE_SUCC (dummy, 0)->flags |= EDGE_IRREDUCIBLE_LOOP;
1210 }
1211
1212 if (dump_file)
1213 fprintf (dump_file, "Created preheader block for loop %i\n",
1214 loop->num);
1215
1216 return dummy;
1217 }
1218
1219 /* Create preheaders for each loop from loop tree stored in LOOPS; for meaning
1220 of FLAGS see create_preheader. */
1221 void
1222 create_preheaders (struct loops *loops, int flags)
1223 {
1224 unsigned i;
1225 for (i = 1; i < loops->num; i++)
1226 create_preheader (loops->parray[i], flags);
1227 loops->state |= LOOPS_HAVE_PREHEADERS;
1228 }
1229
1230 /* Forces all loop latches of loops from loop tree LOOPS to have only single
1231 successor. */
1232 void
1233 force_single_succ_latches (struct loops *loops)
1234 {
1235 unsigned i;
1236 struct loop *loop;
1237 edge e;
1238
1239 for (i = 1; i < loops->num; i++)
1240 {
1241 loop = loops->parray[i];
1242 if (loop->latch != loop->header && EDGE_COUNT (loop->latch->succs) == 1)
1243 continue;
1244
1245 e = find_edge (loop->latch, loop->header);
1246
1247 loop_split_edge_with (e, NULL_RTX);
1248 }
1249 loops->state |= LOOPS_HAVE_SIMPLE_LATCHES;
1250 }
1251
1252 /* A quite stupid function to put INSNS on edge E. They are supposed to form
1253 just one basic block. Jumps in INSNS are not handled, so cfg do not have to
1254 be ok after this function. The created block is placed on correct place
1255 in LOOPS structure and its dominator is set. */
1256 basic_block
1257 loop_split_edge_with (edge e, rtx insns)
1258 {
1259 basic_block src, dest, new_bb;
1260 struct loop *loop_c;
1261
1262 src = e->src;
1263 dest = e->dest;
1264
1265 loop_c = find_common_loop (src->loop_father, dest->loop_father);
1266
1267 /* Create basic block for it. */
1268
1269 new_bb = split_edge (e);
1270 add_bb_to_loop (new_bb, loop_c);
1271 new_bb->flags |= (insns ? BB_SUPERBLOCK : 0);
1272
1273 if (insns)
1274 emit_insn_after (insns, BB_END (new_bb));
1275
1276 if (dest->loop_father->latch == src)
1277 dest->loop_father->latch = new_bb;
1278
1279 return new_bb;
1280 }
1281
1282 /* Uses the natural loop discovery to recreate loop notes. */
1283 void
1284 create_loop_notes (void)
1285 {
1286 rtx insn, head, end;
1287 struct loops loops;
1288 struct loop *loop;
1289 basic_block *first, *last, bb, pbb;
1290 struct loop **stack, **top;
1291
1292 #ifdef ENABLE_CHECKING
1293 /* Verify that there really are no loop notes. */
1294 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1295 gcc_assert (!NOTE_P (insn) ||
1296 NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG);
1297 #endif
1298
1299 flow_loops_find (&loops);
1300 free_dominance_info (CDI_DOMINATORS);
1301 if (loops.num > 1)
1302 {
1303 last = xcalloc (loops.num, sizeof (basic_block));
1304
1305 FOR_EACH_BB (bb)
1306 {
1307 for (loop = bb->loop_father; loop->outer; loop = loop->outer)
1308 last[loop->num] = bb;
1309 }
1310
1311 first = xcalloc (loops.num, sizeof (basic_block));
1312 stack = xcalloc (loops.num, sizeof (struct loop *));
1313 top = stack;
1314
1315 FOR_EACH_BB (bb)
1316 {
1317 for (loop = bb->loop_father; loop->outer; loop = loop->outer)
1318 {
1319 if (!first[loop->num])
1320 {
1321 *top++ = loop;
1322 first[loop->num] = bb;
1323 }
1324
1325 if (bb == last[loop->num])
1326 {
1327 /* Prevent loops from overlapping. */
1328 while (*--top != loop)
1329 last[(*top)->num] = EXIT_BLOCK_PTR;
1330
1331 /* If loop starts with jump into it, place the note in
1332 front of the jump. */
1333 insn = PREV_INSN (BB_HEAD (first[loop->num]));
1334 if (insn
1335 && BARRIER_P (insn))
1336 insn = PREV_INSN (insn);
1337
1338 if (insn
1339 && JUMP_P (insn)
1340 && any_uncondjump_p (insn)
1341 && onlyjump_p (insn))
1342 {
1343 pbb = BLOCK_FOR_INSN (insn);
1344 gcc_assert (pbb && EDGE_COUNT (pbb->succs) == 1);
1345
1346 if (!flow_bb_inside_loop_p (loop, EDGE_SUCC (pbb, 0)->dest))
1347 insn = BB_HEAD (first[loop->num]);
1348 }
1349 else
1350 insn = BB_HEAD (first[loop->num]);
1351
1352 head = BB_HEAD (first[loop->num]);
1353 emit_note_before (NOTE_INSN_LOOP_BEG, insn);
1354 BB_HEAD (first[loop->num]) = head;
1355
1356 /* Position the note correctly wrto barrier. */
1357 insn = BB_END (last[loop->num]);
1358 if (NEXT_INSN (insn)
1359 && BARRIER_P (NEXT_INSN (insn)))
1360 insn = NEXT_INSN (insn);
1361
1362 end = BB_END (last[loop->num]);
1363 emit_note_after (NOTE_INSN_LOOP_END, insn);
1364 BB_END (last[loop->num]) = end;
1365 }
1366 }
1367 }
1368
1369 free (first);
1370 free (last);
1371 free (stack);
1372 }
1373 flow_loops_free (&loops);
1374 }
1375
1376 /* The structure of LOOPS might have changed. Some loops might get removed
1377 (and their headers and latches were set to NULL), loop exists might get
1378 removed (thus the loop nesting may be wrong), and some blocks and edges
1379 were changed (so the information about bb --> loop mapping does not have
1380 to be correct). But still for the remaining loops the header dominates
1381 the latch, and loops did not get new subloobs (new loops might possibly
1382 get created, but we are not interested in them). Fix up the mess.
1383
1384 If CHANGED_BBS is not NULL, basic blocks whose loop has changed are
1385 marked in it. */
1386
1387 void
1388 fix_loop_structure (struct loops *loops, bitmap changed_bbs)
1389 {
1390 basic_block bb;
1391 struct loop *loop, *ploop;
1392 unsigned i;
1393
1394 /* Remove the old bb -> loop mapping. */
1395 FOR_EACH_BB (bb)
1396 {
1397 bb->aux = (void *) (size_t) bb->loop_father->depth;
1398 bb->loop_father = loops->tree_root;
1399 }
1400
1401 /* Remove the dead loops from structures. */
1402 loops->tree_root->num_nodes = n_basic_blocks + 2;
1403 for (i = 1; i < loops->num; i++)
1404 {
1405 loop = loops->parray[i];
1406 if (!loop)
1407 continue;
1408
1409 loop->num_nodes = 0;
1410 if (loop->header)
1411 continue;
1412
1413 while (loop->inner)
1414 {
1415 ploop = loop->inner;
1416 flow_loop_tree_node_remove (ploop);
1417 flow_loop_tree_node_add (loop->outer, ploop);
1418 }
1419
1420 /* Remove the loop and free its data. */
1421 flow_loop_tree_node_remove (loop);
1422 loops->parray[loop->num] = NULL;
1423 flow_loop_free (loop);
1424 }
1425
1426 /* Rescan the bodies of loops, starting from the outermost. */
1427 loop = loops->tree_root;
1428 while (1)
1429 {
1430 if (loop->inner)
1431 loop = loop->inner;
1432 else
1433 {
1434 while (!loop->next
1435 && loop != loops->tree_root)
1436 loop = loop->outer;
1437 if (loop == loops->tree_root)
1438 break;
1439
1440 loop = loop->next;
1441 }
1442
1443 loop->num_nodes = flow_loop_nodes_find (loop->header, loop);
1444 }
1445
1446 /* Now fix the loop nesting. */
1447 for (i = 1; i < loops->num; i++)
1448 {
1449 loop = loops->parray[i];
1450 if (!loop)
1451 continue;
1452
1453 bb = loop_preheader_edge (loop)->src;
1454 if (bb->loop_father != loop->outer)
1455 {
1456 flow_loop_tree_node_remove (loop);
1457 flow_loop_tree_node_add (bb->loop_father, loop);
1458 }
1459 }
1460
1461 /* Mark the blocks whose loop has changed. */
1462 FOR_EACH_BB (bb)
1463 {
1464 if (changed_bbs
1465 && (void *) (size_t) bb->loop_father->depth != bb->aux)
1466 bitmap_set_bit (changed_bbs, bb->index);
1467
1468 bb->aux = NULL;
1469 }
1470
1471 mark_single_exit_loops (loops);
1472 mark_irreducible_loops (loops);
1473 }