}
if (changed_p)
- scev_reset ();
- return changed_p ? (TODO_update_ssa_only_virtuals) : 0;
+ {
+ unsigned todo = TODO_update_ssa_only_virtuals;
+ todo |= loop_invariant_motion_in_fun (cfun, false);
+ scev_reset ();
+ return todo;
+ }
+ return 0;
}
} // anon namespace
}
/* Moves invariants from loops. Only "expensive" invariants are moved out --
- i.e. those that are likely to be win regardless of the register pressure. */
+ i.e. those that are likely to be win regardless of the register pressure.
+ Only perform store motion if STORE_MOTION is true. */
-static unsigned int
-tree_ssa_lim (function *fun)
+unsigned int
+loop_invariant_motion_in_fun (function *fun, bool store_motion)
{
unsigned int todo = 0;
/* Execute store motion. Force the necessary invariants to be moved
out of the loops as well. */
- do_store_motion ();
+ if (store_motion)
+ do_store_motion ();
free (rpo);
rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
if (number_of_loops (fun) <= 1)
return 0;
- unsigned int todo = tree_ssa_lim (fun);
+ unsigned int todo = loop_invariant_motion_in_fun (fun, true);
if (!in_loop_pipeline)
loop_optimizer_finalize ();
extern void tree_unroll_loop (class loop *, unsigned,
edge, class tree_niter_desc *);
extern tree canonicalize_loop_ivs (class loop *, tree *, bool);
-
+extern unsigned int loop_invariant_motion_in_fun (function *, bool);
#endif /* GCC_TREE_SSA_LOOP_MANIP_H */