+/* Process loops from innermost to outer, stopping at the innermost
+ loop we unrolled. */
+
+static bool
+tree_unroll_loops_completely_1 (bool may_increase_size, bool unroll_outer,
+ vec<loop_p, va_stack>& father_stack,
+ struct loop *loop)
+{
+ struct loop *loop_father;
+ bool changed = false;
+ struct loop *inner;
+ enum unroll_level ul;
+
+ /* Process inner loops first. */
+ for (inner = loop->inner; inner != NULL; inner = inner->next)
+ changed |= tree_unroll_loops_completely_1 (may_increase_size,
+ unroll_outer, father_stack,
+ inner);
+
+ /* If we changed an inner loop we cannot process outer loops in this
+ iteration because SSA form is not up-to-date. Continue with
+ siblings of outer loops instead. */
+ if (changed)
+ return true;
+
+ /* Try to unroll this loop. */
+ loop_father = loop_outer (loop);
+ if (!loop_father)
+ return false;
+
+ if (may_increase_size && optimize_loop_nest_for_speed_p (loop)
+ /* Unroll outermost loops only if asked to do so or they do
+ not cause code growth. */
+ && (unroll_outer || loop_outer (loop_father)))
+ ul = UL_ALL;
+ else
+ ul = UL_NO_GROWTH;
+
+ if (canonicalize_loop_induction_variables
+ (loop, false, ul, !flag_tree_loop_ivcanon))
+ {
+ /* If we'll continue unrolling, we need to propagate constants
+ within the new basic blocks to fold away induction variable
+ computations; otherwise, the size might blow up before the
+ iteration is complete and the IR eventually cleaned up. */
+ if (loop_outer (loop_father) && !loop_father->aux)
+ {
+ father_stack.safe_push (loop_father);
+ loop_father->aux = loop_father;
+ }
+
+ return true;
+ }
+
+ return false;
+}
+