* de.po: Update.
[gcc.git] / gcc / tree-ssa-loop-im.c
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "output.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "domwalk.h"
36 #include "params.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "hashtab.h"
40 #include "tree-affine.h"
41 #include "pointer-set.h"
42 #include "tree-ssa-propagate.h"
43
44 /* TODO: Support for predicated code motion. I.e.
45
46 while (1)
47 {
48 if (cond)
49 {
50 a = inv;
51 something;
52 }
53 }
54
55 Where COND and INV are is invariants, but evaluating INV may trap or be
56 invalid from some other reason if !COND. This may be transformed to
57
58 if (cond)
59 a = inv;
60 while (1)
61 {
62 if (cond)
63 something;
64 } */
65
66 /* A type for the list of statements that have to be moved in order to be able
67 to hoist an invariant computation. */
68
69 struct depend
70 {
71 gimple stmt;
72 struct depend *next;
73 };
74
75 /* The auxiliary data kept for each statement. */
76
77 struct lim_aux_data
78 {
79 struct loop *max_loop; /* The outermost loop in that the statement
80 is invariant. */
81
82 struct loop *tgt_loop; /* The loop out of that we want to move the
83 invariant. */
84
85 struct loop *always_executed_in;
86 /* The outermost loop for that we are sure
87 the statement is executed if the loop
88 is entered. */
89
90 unsigned cost; /* Cost of the computation performed by the
91 statement. */
92
93 struct depend *depends; /* List of statements that must be also hoisted
94 out of the loop when this statement is
95 hoisted; i.e. those that define the operands
96 of the statement and are inside of the
97 MAX_LOOP loop. */
98 };
99
100 /* Maps statements to their lim_aux_data. */
101
102 static struct pointer_map_t *lim_aux_data_map;
103
104 /* Description of a memory reference location. */
105
106 typedef struct mem_ref_loc
107 {
108 tree *ref; /* The reference itself. */
109 gimple stmt; /* The statement in that it occurs. */
110 } *mem_ref_loc_p;
111
112 DEF_VEC_P(mem_ref_loc_p);
113 DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
114
115 /* The list of memory reference locations in a loop. */
116
117 typedef struct mem_ref_locs
118 {
119 VEC (mem_ref_loc_p, heap) *locs;
120 } *mem_ref_locs_p;
121
122 DEF_VEC_P(mem_ref_locs_p);
123 DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
124
125 /* Description of a memory reference. */
126
127 typedef struct mem_ref
128 {
129 tree mem; /* The memory itself. */
130 unsigned id; /* ID assigned to the memory reference
131 (its index in memory_accesses.refs_list) */
132 hashval_t hash; /* Its hash value. */
133 bitmap stored; /* The set of loops in that this memory location
134 is stored to. */
135 VEC (mem_ref_locs_p, heap) *accesses_in_loop;
136 /* The locations of the accesses. Vector
137 indexed by the loop number. */
138
139 /* The following sets are computed on demand. We keep both set and
140 its complement, so that we know whether the information was
141 already computed or not. */
142 bitmap indep_loop; /* The set of loops in that the memory
143 reference is independent, meaning:
144 If it is stored in the loop, this store
145 is independent on all other loads and
146 stores.
147 If it is only loaded, then it is independent
148 on all stores in the loop. */
149 bitmap dep_loop; /* The complement of INDEP_LOOP. */
150
151 bitmap indep_ref; /* The set of memory references on that
152 this reference is independent. */
153 bitmap dep_ref; /* The complement of DEP_REF. */
154 } *mem_ref_p;
155
156 DEF_VEC_P(mem_ref_p);
157 DEF_VEC_ALLOC_P(mem_ref_p, heap);
158
159 DEF_VEC_P(bitmap);
160 DEF_VEC_ALLOC_P(bitmap, heap);
161
162 DEF_VEC_P(htab_t);
163 DEF_VEC_ALLOC_P(htab_t, heap);
164
165 /* Description of memory accesses in loops. */
166
167 static struct
168 {
169 /* The hash table of memory references accessed in loops. */
170 htab_t refs;
171
172 /* The list of memory references. */
173 VEC (mem_ref_p, heap) *refs_list;
174
175 /* The set of memory references accessed in each loop. */
176 VEC (bitmap, heap) *refs_in_loop;
177
178 /* The set of memory references accessed in each loop, including
179 subloops. */
180 VEC (bitmap, heap) *all_refs_in_loop;
181
182 /* The set of memory references stored in each loop, including
183 subloops. */
184 VEC (bitmap, heap) *all_refs_stored_in_loop;
185
186 /* Cache for expanding memory addresses. */
187 struct pointer_map_t *ttae_cache;
188 } memory_accesses;
189
190 static bool ref_indep_loop_p (struct loop *, mem_ref_p);
191
192 /* Minimum cost of an expensive expression. */
193 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
194
195 /* The outermost loop for which execution of the header guarantees that the
196 block will be executed. */
197 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
198 #define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL))
199
200 /* Whether the reference was analyzable. */
201 #define MEM_ANALYZABLE(REF) ((REF)->mem != error_mark_node)
202
203 static struct lim_aux_data *
204 init_lim_data (gimple stmt)
205 {
206 void **p = pointer_map_insert (lim_aux_data_map, stmt);
207
208 *p = XCNEW (struct lim_aux_data);
209 return (struct lim_aux_data *) *p;
210 }
211
212 static struct lim_aux_data *
213 get_lim_data (gimple stmt)
214 {
215 void **p = pointer_map_contains (lim_aux_data_map, stmt);
216 if (!p)
217 return NULL;
218
219 return (struct lim_aux_data *) *p;
220 }
221
222 /* Releases the memory occupied by DATA. */
223
224 static void
225 free_lim_aux_data (struct lim_aux_data *data)
226 {
227 struct depend *dep, *next;
228
229 for (dep = data->depends; dep; dep = next)
230 {
231 next = dep->next;
232 free (dep);
233 }
234 free (data);
235 }
236
237 static void
238 clear_lim_data (gimple stmt)
239 {
240 void **p = pointer_map_contains (lim_aux_data_map, stmt);
241 if (!p)
242 return;
243
244 free_lim_aux_data ((struct lim_aux_data *) *p);
245 *p = NULL;
246 }
247
248 /* Calls CBCK for each index in memory reference ADDR_P. There are two
249 kinds situations handled; in each of these cases, the memory reference
250 and DATA are passed to the callback:
251
252 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
253 pass the pointer to the index to the callback.
254
255 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
256 pointer to addr to the callback.
257
258 If the callback returns false, the whole search stops and false is returned.
259 Otherwise the function returns true after traversing through the whole
260 reference *ADDR_P. */
261
262 bool
263 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
264 {
265 tree *nxt, *idx;
266
267 for (; ; addr_p = nxt)
268 {
269 switch (TREE_CODE (*addr_p))
270 {
271 case SSA_NAME:
272 return cbck (*addr_p, addr_p, data);
273
274 case MEM_REF:
275 nxt = &TREE_OPERAND (*addr_p, 0);
276 return cbck (*addr_p, nxt, data);
277
278 case BIT_FIELD_REF:
279 case VIEW_CONVERT_EXPR:
280 case REALPART_EXPR:
281 case IMAGPART_EXPR:
282 nxt = &TREE_OPERAND (*addr_p, 0);
283 break;
284
285 case COMPONENT_REF:
286 /* If the component has varying offset, it behaves like index
287 as well. */
288 idx = &TREE_OPERAND (*addr_p, 2);
289 if (*idx
290 && !cbck (*addr_p, idx, data))
291 return false;
292
293 nxt = &TREE_OPERAND (*addr_p, 0);
294 break;
295
296 case ARRAY_REF:
297 case ARRAY_RANGE_REF:
298 nxt = &TREE_OPERAND (*addr_p, 0);
299 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
300 return false;
301 break;
302
303 case VAR_DECL:
304 case PARM_DECL:
305 case STRING_CST:
306 case RESULT_DECL:
307 case VECTOR_CST:
308 case COMPLEX_CST:
309 case INTEGER_CST:
310 case REAL_CST:
311 case FIXED_CST:
312 case CONSTRUCTOR:
313 return true;
314
315 case ADDR_EXPR:
316 gcc_assert (is_gimple_min_invariant (*addr_p));
317 return true;
318
319 case TARGET_MEM_REF:
320 idx = &TMR_BASE (*addr_p);
321 if (*idx
322 && !cbck (*addr_p, idx, data))
323 return false;
324 idx = &TMR_INDEX (*addr_p);
325 if (*idx
326 && !cbck (*addr_p, idx, data))
327 return false;
328 idx = &TMR_INDEX2 (*addr_p);
329 if (*idx
330 && !cbck (*addr_p, idx, data))
331 return false;
332 return true;
333
334 default:
335 gcc_unreachable ();
336 }
337 }
338 }
339
340 /* If it is possible to hoist the statement STMT unconditionally,
341 returns MOVE_POSSIBLE.
342 If it is possible to hoist the statement STMT, but we must avoid making
343 it executed if it would not be executed in the original program (e.g.
344 because it may trap), return MOVE_PRESERVE_EXECUTION.
345 Otherwise return MOVE_IMPOSSIBLE. */
346
347 enum move_pos
348 movement_possibility (gimple stmt)
349 {
350 tree lhs;
351 enum move_pos ret = MOVE_POSSIBLE;
352
353 if (flag_unswitch_loops
354 && gimple_code (stmt) == GIMPLE_COND)
355 {
356 /* If we perform unswitching, force the operands of the invariant
357 condition to be moved out of the loop. */
358 return MOVE_POSSIBLE;
359 }
360
361 if (gimple_code (stmt) == GIMPLE_PHI
362 && gimple_phi_num_args (stmt) <= 2
363 && is_gimple_reg (gimple_phi_result (stmt))
364 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt)))
365 return MOVE_POSSIBLE;
366
367 if (gimple_get_lhs (stmt) == NULL_TREE)
368 return MOVE_IMPOSSIBLE;
369
370 if (gimple_vdef (stmt))
371 return MOVE_IMPOSSIBLE;
372
373 if (stmt_ends_bb_p (stmt)
374 || gimple_has_volatile_ops (stmt)
375 || gimple_has_side_effects (stmt)
376 || stmt_could_throw_p (stmt))
377 return MOVE_IMPOSSIBLE;
378
379 if (is_gimple_call (stmt))
380 {
381 /* While pure or const call is guaranteed to have no side effects, we
382 cannot move it arbitrarily. Consider code like
383
384 char *s = something ();
385
386 while (1)
387 {
388 if (s)
389 t = strlen (s);
390 else
391 t = 0;
392 }
393
394 Here the strlen call cannot be moved out of the loop, even though
395 s is invariant. In addition to possibly creating a call with
396 invalid arguments, moving out a function call that is not executed
397 may cause performance regressions in case the call is costly and
398 not executed at all. */
399 ret = MOVE_PRESERVE_EXECUTION;
400 lhs = gimple_call_lhs (stmt);
401 }
402 else if (is_gimple_assign (stmt))
403 lhs = gimple_assign_lhs (stmt);
404 else
405 return MOVE_IMPOSSIBLE;
406
407 if (TREE_CODE (lhs) == SSA_NAME
408 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
409 return MOVE_IMPOSSIBLE;
410
411 if (TREE_CODE (lhs) != SSA_NAME
412 || gimple_could_trap_p (stmt))
413 return MOVE_PRESERVE_EXECUTION;
414
415 return ret;
416 }
417
418 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
419 loop to that we could move the expression using DEF if it did not have
420 other operands, i.e. the outermost loop enclosing LOOP in that the value
421 of DEF is invariant. */
422
423 static struct loop *
424 outermost_invariant_loop (tree def, struct loop *loop)
425 {
426 gimple def_stmt;
427 basic_block def_bb;
428 struct loop *max_loop;
429 struct lim_aux_data *lim_data;
430
431 if (!def)
432 return superloop_at_depth (loop, 1);
433
434 if (TREE_CODE (def) != SSA_NAME)
435 {
436 gcc_assert (is_gimple_min_invariant (def));
437 return superloop_at_depth (loop, 1);
438 }
439
440 def_stmt = SSA_NAME_DEF_STMT (def);
441 def_bb = gimple_bb (def_stmt);
442 if (!def_bb)
443 return superloop_at_depth (loop, 1);
444
445 max_loop = find_common_loop (loop, def_bb->loop_father);
446
447 lim_data = get_lim_data (def_stmt);
448 if (lim_data != NULL && lim_data->max_loop != NULL)
449 max_loop = find_common_loop (max_loop,
450 loop_outer (lim_data->max_loop));
451 if (max_loop == loop)
452 return NULL;
453 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
454
455 return max_loop;
456 }
457
458 /* DATA is a structure containing information associated with a statement
459 inside LOOP. DEF is one of the operands of this statement.
460
461 Find the outermost loop enclosing LOOP in that value of DEF is invariant
462 and record this in DATA->max_loop field. If DEF itself is defined inside
463 this loop as well (i.e. we need to hoist it out of the loop if we want
464 to hoist the statement represented by DATA), record the statement in that
465 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
466 add the cost of the computation of DEF to the DATA->cost.
467
468 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
469
470 static bool
471 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
472 bool add_cost)
473 {
474 gimple def_stmt = SSA_NAME_DEF_STMT (def);
475 basic_block def_bb = gimple_bb (def_stmt);
476 struct loop *max_loop;
477 struct depend *dep;
478 struct lim_aux_data *def_data;
479
480 if (!def_bb)
481 return true;
482
483 max_loop = outermost_invariant_loop (def, loop);
484 if (!max_loop)
485 return false;
486
487 if (flow_loop_nested_p (data->max_loop, max_loop))
488 data->max_loop = max_loop;
489
490 def_data = get_lim_data (def_stmt);
491 if (!def_data)
492 return true;
493
494 if (add_cost
495 /* Only add the cost if the statement defining DEF is inside LOOP,
496 i.e. if it is likely that by moving the invariants dependent
497 on it, we will be able to avoid creating a new register for
498 it (since it will be only used in these dependent invariants). */
499 && def_bb->loop_father == loop)
500 data->cost += def_data->cost;
501
502 dep = XNEW (struct depend);
503 dep->stmt = def_stmt;
504 dep->next = data->depends;
505 data->depends = dep;
506
507 return true;
508 }
509
510 /* Returns an estimate for a cost of statement STMT. The values here
511 are just ad-hoc constants, similar to costs for inlining. */
512
513 static unsigned
514 stmt_cost (gimple stmt)
515 {
516 /* Always try to create possibilities for unswitching. */
517 if (gimple_code (stmt) == GIMPLE_COND
518 || gimple_code (stmt) == GIMPLE_PHI)
519 return LIM_EXPENSIVE;
520
521 /* We should be hoisting calls if possible. */
522 if (is_gimple_call (stmt))
523 {
524 tree fndecl;
525
526 /* Unless the call is a builtin_constant_p; this always folds to a
527 constant, so moving it is useless. */
528 fndecl = gimple_call_fndecl (stmt);
529 if (fndecl
530 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
531 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
532 return 0;
533
534 return LIM_EXPENSIVE;
535 }
536
537 /* Hoisting memory references out should almost surely be a win. */
538 if (gimple_references_memory_p (stmt))
539 return LIM_EXPENSIVE;
540
541 if (gimple_code (stmt) != GIMPLE_ASSIGN)
542 return 1;
543
544 switch (gimple_assign_rhs_code (stmt))
545 {
546 case MULT_EXPR:
547 case WIDEN_MULT_EXPR:
548 case WIDEN_MULT_PLUS_EXPR:
549 case WIDEN_MULT_MINUS_EXPR:
550 case DOT_PROD_EXPR:
551 case FMA_EXPR:
552 case TRUNC_DIV_EXPR:
553 case CEIL_DIV_EXPR:
554 case FLOOR_DIV_EXPR:
555 case ROUND_DIV_EXPR:
556 case EXACT_DIV_EXPR:
557 case CEIL_MOD_EXPR:
558 case FLOOR_MOD_EXPR:
559 case ROUND_MOD_EXPR:
560 case TRUNC_MOD_EXPR:
561 case RDIV_EXPR:
562 /* Division and multiplication are usually expensive. */
563 return LIM_EXPENSIVE;
564
565 case LSHIFT_EXPR:
566 case RSHIFT_EXPR:
567 case WIDEN_LSHIFT_EXPR:
568 case LROTATE_EXPR:
569 case RROTATE_EXPR:
570 /* Shifts and rotates are usually expensive. */
571 return LIM_EXPENSIVE;
572
573 case CONSTRUCTOR:
574 /* Make vector construction cost proportional to the number
575 of elements. */
576 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
577
578 case SSA_NAME:
579 case PAREN_EXPR:
580 /* Whether or not something is wrapped inside a PAREN_EXPR
581 should not change move cost. Nor should an intermediate
582 unpropagated SSA name copy. */
583 return 0;
584
585 default:
586 return 1;
587 }
588 }
589
590 /* Finds the outermost loop between OUTER and LOOP in that the memory reference
591 REF is independent. If REF is not independent in LOOP, NULL is returned
592 instead. */
593
594 static struct loop *
595 outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
596 {
597 struct loop *aloop;
598
599 if (bitmap_bit_p (ref->stored, loop->num))
600 return NULL;
601
602 for (aloop = outer;
603 aloop != loop;
604 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
605 if (!bitmap_bit_p (ref->stored, aloop->num)
606 && ref_indep_loop_p (aloop, ref))
607 return aloop;
608
609 if (ref_indep_loop_p (loop, ref))
610 return loop;
611 else
612 return NULL;
613 }
614
615 /* If there is a simple load or store to a memory reference in STMT, returns
616 the location of the memory reference, and sets IS_STORE according to whether
617 it is a store or load. Otherwise, returns NULL. */
618
619 static tree *
620 simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
621 {
622 tree *lhs;
623 enum tree_code code;
624
625 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
626 if (gimple_code (stmt) != GIMPLE_ASSIGN)
627 return NULL;
628
629 code = gimple_assign_rhs_code (stmt);
630
631 lhs = gimple_assign_lhs_ptr (stmt);
632
633 if (TREE_CODE (*lhs) == SSA_NAME)
634 {
635 if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
636 || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
637 return NULL;
638
639 *is_store = false;
640 return gimple_assign_rhs1_ptr (stmt);
641 }
642 else if (code == SSA_NAME
643 || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
644 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
645 {
646 *is_store = true;
647 return lhs;
648 }
649 else
650 return NULL;
651 }
652
653 /* Returns the memory reference contained in STMT. */
654
655 static mem_ref_p
656 mem_ref_in_stmt (gimple stmt)
657 {
658 bool store;
659 tree *mem = simple_mem_ref_in_stmt (stmt, &store);
660 hashval_t hash;
661 mem_ref_p ref;
662
663 if (!mem)
664 return NULL;
665 gcc_assert (!store);
666
667 hash = iterative_hash_expr (*mem, 0);
668 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
669
670 gcc_assert (ref != NULL);
671 return ref;
672 }
673
674 /* From a controlling predicate in DOM determine the arguments from
675 the PHI node PHI that are chosen if the predicate evaluates to
676 true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
677 they are non-NULL. Returns true if the arguments can be determined,
678 else return false. */
679
680 static bool
681 extract_true_false_args_from_phi (basic_block dom, gimple phi,
682 tree *true_arg_p, tree *false_arg_p)
683 {
684 basic_block bb = gimple_bb (phi);
685 edge true_edge, false_edge, tem;
686 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
687
688 /* We have to verify that one edge into the PHI node is dominated
689 by the true edge of the predicate block and the other edge
690 dominated by the false edge. This ensures that the PHI argument
691 we are going to take is completely determined by the path we
692 take from the predicate block.
693 We can only use BB dominance checks below if the destination of
694 the true/false edges are dominated by their edge, thus only
695 have a single predecessor. */
696 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
697 tem = EDGE_PRED (bb, 0);
698 if (tem == true_edge
699 || (single_pred_p (true_edge->dest)
700 && (tem->src == true_edge->dest
701 || dominated_by_p (CDI_DOMINATORS,
702 tem->src, true_edge->dest))))
703 arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
704 else if (tem == false_edge
705 || (single_pred_p (false_edge->dest)
706 && (tem->src == false_edge->dest
707 || dominated_by_p (CDI_DOMINATORS,
708 tem->src, false_edge->dest))))
709 arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
710 else
711 return false;
712 tem = EDGE_PRED (bb, 1);
713 if (tem == true_edge
714 || (single_pred_p (true_edge->dest)
715 && (tem->src == true_edge->dest
716 || dominated_by_p (CDI_DOMINATORS,
717 tem->src, true_edge->dest))))
718 arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
719 else if (tem == false_edge
720 || (single_pred_p (false_edge->dest)
721 && (tem->src == false_edge->dest
722 || dominated_by_p (CDI_DOMINATORS,
723 tem->src, false_edge->dest))))
724 arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
725 else
726 return false;
727 if (!arg0 || !arg1)
728 return false;
729
730 if (true_arg_p)
731 *true_arg_p = arg0;
732 if (false_arg_p)
733 *false_arg_p = arg1;
734
735 return true;
736 }
737
738 /* Determine the outermost loop to that it is possible to hoist a statement
739 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
740 the outermost loop in that the value computed by STMT is invariant.
741 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
742 we preserve the fact whether STMT is executed. It also fills other related
743 information to LIM_DATA (STMT).
744
745 The function returns false if STMT cannot be hoisted outside of the loop it
746 is defined in, and true otherwise. */
747
748 static bool
749 determine_max_movement (gimple stmt, bool must_preserve_exec)
750 {
751 basic_block bb = gimple_bb (stmt);
752 struct loop *loop = bb->loop_father;
753 struct loop *level;
754 struct lim_aux_data *lim_data = get_lim_data (stmt);
755 tree val;
756 ssa_op_iter iter;
757
758 if (must_preserve_exec)
759 level = ALWAYS_EXECUTED_IN (bb);
760 else
761 level = superloop_at_depth (loop, 1);
762 lim_data->max_loop = level;
763
764 if (gimple_code (stmt) == GIMPLE_PHI)
765 {
766 use_operand_p use_p;
767 unsigned min_cost = UINT_MAX;
768 unsigned total_cost = 0;
769 struct lim_aux_data *def_data;
770
771 /* We will end up promoting dependencies to be unconditionally
772 evaluated. For this reason the PHI cost (and thus the
773 cost we remove from the loop by doing the invariant motion)
774 is that of the cheapest PHI argument dependency chain. */
775 FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE)
776 {
777 val = USE_FROM_PTR (use_p);
778 if (TREE_CODE (val) != SSA_NAME)
779 continue;
780 if (!add_dependency (val, lim_data, loop, false))
781 return false;
782 def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
783 if (def_data)
784 {
785 min_cost = MIN (min_cost, def_data->cost);
786 total_cost += def_data->cost;
787 }
788 }
789
790 lim_data->cost += min_cost;
791
792 if (gimple_phi_num_args (stmt) > 1)
793 {
794 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
795 gimple cond;
796 if (gsi_end_p (gsi_last_bb (dom)))
797 return false;
798 cond = gsi_stmt (gsi_last_bb (dom));
799 if (gimple_code (cond) != GIMPLE_COND)
800 return false;
801 /* Verify that this is an extended form of a diamond and
802 the PHI arguments are completely controlled by the
803 predicate in DOM. */
804 if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL))
805 return false;
806
807 /* Fold in dependencies and cost of the condition. */
808 FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE)
809 {
810 if (!add_dependency (val, lim_data, loop, false))
811 return false;
812 def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
813 if (def_data)
814 total_cost += def_data->cost;
815 }
816
817 /* We want to avoid unconditionally executing very expensive
818 operations. As costs for our dependencies cannot be
819 negative just claim we are not invariand for this case.
820 We also are not sure whether the control-flow inside the
821 loop will vanish. */
822 if (total_cost - min_cost >= 2 * LIM_EXPENSIVE
823 && !(min_cost != 0
824 && total_cost / min_cost <= 2))
825 return false;
826
827 /* Assume that the control-flow in the loop will vanish.
828 ??? We should verify this and not artificially increase
829 the cost if that is not the case. */
830 lim_data->cost += stmt_cost (stmt);
831 }
832
833 return true;
834 }
835 else
836 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
837 if (!add_dependency (val, lim_data, loop, true))
838 return false;
839
840 if (gimple_vuse (stmt))
841 {
842 mem_ref_p ref = mem_ref_in_stmt (stmt);
843
844 if (ref)
845 {
846 lim_data->max_loop
847 = outermost_indep_loop (lim_data->max_loop, loop, ref);
848 if (!lim_data->max_loop)
849 return false;
850 }
851 else
852 {
853 if ((val = gimple_vuse (stmt)) != NULL_TREE)
854 {
855 if (!add_dependency (val, lim_data, loop, false))
856 return false;
857 }
858 }
859 }
860
861 lim_data->cost += stmt_cost (stmt);
862
863 return true;
864 }
865
866 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
867 and that one of the operands of this statement is computed by STMT.
868 Ensure that STMT (together with all the statements that define its
869 operands) is hoisted at least out of the loop LEVEL. */
870
871 static void
872 set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
873 {
874 struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
875 struct depend *dep;
876 struct lim_aux_data *lim_data;
877
878 stmt_loop = find_common_loop (orig_loop, stmt_loop);
879 lim_data = get_lim_data (stmt);
880 if (lim_data != NULL && lim_data->tgt_loop != NULL)
881 stmt_loop = find_common_loop (stmt_loop,
882 loop_outer (lim_data->tgt_loop));
883 if (flow_loop_nested_p (stmt_loop, level))
884 return;
885
886 gcc_assert (level == lim_data->max_loop
887 || flow_loop_nested_p (lim_data->max_loop, level));
888
889 lim_data->tgt_loop = level;
890 for (dep = lim_data->depends; dep; dep = dep->next)
891 set_level (dep->stmt, orig_loop, level);
892 }
893
894 /* Determines an outermost loop from that we want to hoist the statement STMT.
895 For now we chose the outermost possible loop. TODO -- use profiling
896 information to set it more sanely. */
897
898 static void
899 set_profitable_level (gimple stmt)
900 {
901 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
902 }
903
904 /* Returns true if STMT is a call that has side effects. */
905
906 static bool
907 nonpure_call_p (gimple stmt)
908 {
909 if (gimple_code (stmt) != GIMPLE_CALL)
910 return false;
911
912 return gimple_has_side_effects (stmt);
913 }
914
915 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
916
917 static gimple
918 rewrite_reciprocal (gimple_stmt_iterator *bsi)
919 {
920 gimple stmt, stmt1, stmt2;
921 tree var, name, lhs, type;
922 tree real_one;
923 gimple_stmt_iterator gsi;
924
925 stmt = gsi_stmt (*bsi);
926 lhs = gimple_assign_lhs (stmt);
927 type = TREE_TYPE (lhs);
928
929 var = create_tmp_var (type, "reciptmp");
930 add_referenced_var (var);
931 DECL_GIMPLE_REG_P (var) = 1;
932
933 real_one = build_one_cst (type);
934
935 stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
936 var, real_one, gimple_assign_rhs2 (stmt));
937 name = make_ssa_name (var, stmt1);
938 gimple_assign_set_lhs (stmt1, name);
939
940 stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
941 gimple_assign_rhs1 (stmt));
942
943 /* Replace division stmt with reciprocal and multiply stmts.
944 The multiply stmt is not invariant, so update iterator
945 and avoid rescanning. */
946 gsi = *bsi;
947 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
948 gsi_replace (&gsi, stmt2, true);
949
950 /* Continue processing with invariant reciprocal statement. */
951 return stmt1;
952 }
953
954 /* Check if the pattern at *BSI is a bittest of the form
955 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
956
957 static gimple
958 rewrite_bittest (gimple_stmt_iterator *bsi)
959 {
960 gimple stmt, use_stmt, stmt1, stmt2;
961 tree lhs, var, name, t, a, b;
962 use_operand_p use;
963
964 stmt = gsi_stmt (*bsi);
965 lhs = gimple_assign_lhs (stmt);
966
967 /* Verify that the single use of lhs is a comparison against zero. */
968 if (TREE_CODE (lhs) != SSA_NAME
969 || !single_imm_use (lhs, &use, &use_stmt)
970 || gimple_code (use_stmt) != GIMPLE_COND)
971 return stmt;
972 if (gimple_cond_lhs (use_stmt) != lhs
973 || (gimple_cond_code (use_stmt) != NE_EXPR
974 && gimple_cond_code (use_stmt) != EQ_EXPR)
975 || !integer_zerop (gimple_cond_rhs (use_stmt)))
976 return stmt;
977
978 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
979 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
980 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
981 return stmt;
982
983 /* There is a conversion in between possibly inserted by fold. */
984 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
985 {
986 t = gimple_assign_rhs1 (stmt1);
987 if (TREE_CODE (t) != SSA_NAME
988 || !has_single_use (t))
989 return stmt;
990 stmt1 = SSA_NAME_DEF_STMT (t);
991 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
992 return stmt;
993 }
994
995 /* Verify that B is loop invariant but A is not. Verify that with
996 all the stmt walking we are still in the same loop. */
997 if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
998 || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
999 return stmt;
1000
1001 a = gimple_assign_rhs1 (stmt1);
1002 b = gimple_assign_rhs2 (stmt1);
1003
1004 if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
1005 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
1006 {
1007 gimple_stmt_iterator rsi;
1008
1009 /* 1 << B */
1010 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
1011 add_referenced_var (var);
1012 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
1013 build_int_cst (TREE_TYPE (a), 1), b);
1014 stmt1 = gimple_build_assign (var, t);
1015 name = make_ssa_name (var, stmt1);
1016 gimple_assign_set_lhs (stmt1, name);
1017
1018 /* A & (1 << B) */
1019 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
1020 stmt2 = gimple_build_assign (var, t);
1021 name = make_ssa_name (var, stmt2);
1022 gimple_assign_set_lhs (stmt2, name);
1023
1024 /* Replace the SSA_NAME we compare against zero. Adjust
1025 the type of zero accordingly. */
1026 SET_USE (use, name);
1027 gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
1028
1029 /* Don't use gsi_replace here, none of the new assignments sets
1030 the variable originally set in stmt. Move bsi to stmt1, and
1031 then remove the original stmt, so that we get a chance to
1032 retain debug info for it. */
1033 rsi = *bsi;
1034 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
1035 gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
1036 gsi_remove (&rsi, true);
1037
1038 return stmt1;
1039 }
1040
1041 return stmt;
1042 }
1043
1044
1045 /* Determine the outermost loops in that statements in basic block BB are
1046 invariant, and record them to the LIM_DATA associated with the statements.
1047 Callback for walk_dominator_tree. */
1048
1049 static void
1050 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
1051 basic_block bb)
1052 {
1053 enum move_pos pos;
1054 gimple_stmt_iterator bsi;
1055 gimple stmt;
1056 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
1057 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
1058 struct lim_aux_data *lim_data;
1059
1060 if (!loop_outer (bb->loop_father))
1061 return;
1062
1063 if (dump_file && (dump_flags & TDF_DETAILS))
1064 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
1065 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
1066
1067 /* Look at PHI nodes, but only if there is at most two.
1068 ??? We could relax this further by post-processing the inserted
1069 code and transforming adjacent cond-exprs with the same predicate
1070 to control flow again. */
1071 bsi = gsi_start_phis (bb);
1072 if (!gsi_end_p (bsi)
1073 && ((gsi_next (&bsi), gsi_end_p (bsi))
1074 || (gsi_next (&bsi), gsi_end_p (bsi))))
1075 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1076 {
1077 stmt = gsi_stmt (bsi);
1078
1079 pos = movement_possibility (stmt);
1080 if (pos == MOVE_IMPOSSIBLE)
1081 continue;
1082
1083 lim_data = init_lim_data (stmt);
1084 lim_data->always_executed_in = outermost;
1085
1086 if (!determine_max_movement (stmt, false))
1087 {
1088 lim_data->max_loop = NULL;
1089 continue;
1090 }
1091
1092 if (dump_file && (dump_flags & TDF_DETAILS))
1093 {
1094 print_gimple_stmt (dump_file, stmt, 2, 0);
1095 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
1096 loop_depth (lim_data->max_loop),
1097 lim_data->cost);
1098 }
1099
1100 if (lim_data->cost >= LIM_EXPENSIVE)
1101 set_profitable_level (stmt);
1102 }
1103
1104 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1105 {
1106 stmt = gsi_stmt (bsi);
1107
1108 pos = movement_possibility (stmt);
1109 if (pos == MOVE_IMPOSSIBLE)
1110 {
1111 if (nonpure_call_p (stmt))
1112 {
1113 maybe_never = true;
1114 outermost = NULL;
1115 }
1116 /* Make sure to note always_executed_in for stores to make
1117 store-motion work. */
1118 else if (stmt_makes_single_store (stmt))
1119 {
1120 struct lim_aux_data *lim_data = init_lim_data (stmt);
1121 lim_data->always_executed_in = outermost;
1122 }
1123 continue;
1124 }
1125
1126 if (is_gimple_assign (stmt)
1127 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1128 == GIMPLE_BINARY_RHS))
1129 {
1130 tree op0 = gimple_assign_rhs1 (stmt);
1131 tree op1 = gimple_assign_rhs2 (stmt);
1132 struct loop *ol1 = outermost_invariant_loop (op1,
1133 loop_containing_stmt (stmt));
1134
1135 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
1136 to be hoisted out of loop, saving expensive divide. */
1137 if (pos == MOVE_POSSIBLE
1138 && gimple_assign_rhs_code (stmt) == RDIV_EXPR
1139 && flag_unsafe_math_optimizations
1140 && !flag_trapping_math
1141 && ol1 != NULL
1142 && outermost_invariant_loop (op0, ol1) == NULL)
1143 stmt = rewrite_reciprocal (&bsi);
1144
1145 /* If the shift count is invariant, convert (A >> B) & 1 to
1146 A & (1 << B) allowing the bit mask to be hoisted out of the loop
1147 saving an expensive shift. */
1148 if (pos == MOVE_POSSIBLE
1149 && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
1150 && integer_onep (op1)
1151 && TREE_CODE (op0) == SSA_NAME
1152 && has_single_use (op0))
1153 stmt = rewrite_bittest (&bsi);
1154 }
1155
1156 lim_data = init_lim_data (stmt);
1157 lim_data->always_executed_in = outermost;
1158
1159 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
1160 continue;
1161
1162 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
1163 {
1164 lim_data->max_loop = NULL;
1165 continue;
1166 }
1167
1168 if (dump_file && (dump_flags & TDF_DETAILS))
1169 {
1170 print_gimple_stmt (dump_file, stmt, 2, 0);
1171 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
1172 loop_depth (lim_data->max_loop),
1173 lim_data->cost);
1174 }
1175
1176 if (lim_data->cost >= LIM_EXPENSIVE)
1177 set_profitable_level (stmt);
1178 }
1179 }
1180
1181 /* For each statement determines the outermost loop in that it is invariant,
1182 statements on whose motion it depends and the cost of the computation.
1183 This information is stored to the LIM_DATA structure associated with
1184 each statement. */
1185
1186 static void
1187 determine_invariantness (void)
1188 {
1189 struct dom_walk_data walk_data;
1190
1191 memset (&walk_data, 0, sizeof (struct dom_walk_data));
1192 walk_data.dom_direction = CDI_DOMINATORS;
1193 walk_data.before_dom_children = determine_invariantness_stmt;
1194
1195 init_walk_dominator_tree (&walk_data);
1196 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1197 fini_walk_dominator_tree (&walk_data);
1198 }
1199
1200 /* Hoist the statements in basic block BB out of the loops prescribed by
1201 data stored in LIM_DATA structures associated with each statement. Callback
1202 for walk_dominator_tree. */
1203
1204 static void
1205 move_computations_stmt (struct dom_walk_data *dw_data,
1206 basic_block bb)
1207 {
1208 struct loop *level;
1209 gimple_stmt_iterator bsi;
1210 gimple stmt;
1211 unsigned cost = 0;
1212 struct lim_aux_data *lim_data;
1213
1214 if (!loop_outer (bb->loop_father))
1215 return;
1216
1217 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
1218 {
1219 gimple new_stmt;
1220 stmt = gsi_stmt (bsi);
1221
1222 lim_data = get_lim_data (stmt);
1223 if (lim_data == NULL)
1224 {
1225 gsi_next (&bsi);
1226 continue;
1227 }
1228
1229 cost = lim_data->cost;
1230 level = lim_data->tgt_loop;
1231 clear_lim_data (stmt);
1232
1233 if (!level)
1234 {
1235 gsi_next (&bsi);
1236 continue;
1237 }
1238
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1240 {
1241 fprintf (dump_file, "Moving PHI node\n");
1242 print_gimple_stmt (dump_file, stmt, 0, 0);
1243 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1244 cost, level->num);
1245 }
1246
1247 if (gimple_phi_num_args (stmt) == 1)
1248 {
1249 tree arg = PHI_ARG_DEF (stmt, 0);
1250 new_stmt = gimple_build_assign_with_ops (TREE_CODE (arg),
1251 gimple_phi_result (stmt),
1252 arg, NULL_TREE);
1253 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1254 }
1255 else
1256 {
1257 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
1258 gimple cond = gsi_stmt (gsi_last_bb (dom));
1259 tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
1260 /* Get the PHI arguments corresponding to the true and false
1261 edges of COND. */
1262 extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1);
1263 gcc_assert (arg0 && arg1);
1264 t = build2 (gimple_cond_code (cond), boolean_type_node,
1265 gimple_cond_lhs (cond), gimple_cond_rhs (cond));
1266 new_stmt = gimple_build_assign_with_ops3 (COND_EXPR,
1267 gimple_phi_result (stmt),
1268 t, arg0, arg1);
1269 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1270 *((unsigned int *)(dw_data->global_data)) |= TODO_cleanup_cfg;
1271 }
1272 gsi_insert_on_edge (loop_preheader_edge (level), new_stmt);
1273 remove_phi_node (&bsi, false);
1274 }
1275
1276 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
1277 {
1278 stmt = gsi_stmt (bsi);
1279
1280 lim_data = get_lim_data (stmt);
1281 if (lim_data == NULL)
1282 {
1283 gsi_next (&bsi);
1284 continue;
1285 }
1286
1287 cost = lim_data->cost;
1288 level = lim_data->tgt_loop;
1289 clear_lim_data (stmt);
1290
1291 if (!level)
1292 {
1293 gsi_next (&bsi);
1294 continue;
1295 }
1296
1297 /* We do not really want to move conditionals out of the loop; we just
1298 placed it here to force its operands to be moved if necessary. */
1299 if (gimple_code (stmt) == GIMPLE_COND)
1300 continue;
1301
1302 if (dump_file && (dump_flags & TDF_DETAILS))
1303 {
1304 fprintf (dump_file, "Moving statement\n");
1305 print_gimple_stmt (dump_file, stmt, 0, 0);
1306 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1307 cost, level->num);
1308 }
1309
1310 mark_virtual_ops_for_renaming (stmt);
1311 gsi_insert_on_edge (loop_preheader_edge (level), stmt);
1312 gsi_remove (&bsi, false);
1313 }
1314 }
1315
1316 /* Hoist the statements out of the loops prescribed by data stored in
1317 LIM_DATA structures associated with each statement.*/
1318
1319 static unsigned int
1320 move_computations (void)
1321 {
1322 struct dom_walk_data walk_data;
1323 unsigned int todo = 0;
1324
1325 memset (&walk_data, 0, sizeof (struct dom_walk_data));
1326 walk_data.global_data = &todo;
1327 walk_data.dom_direction = CDI_DOMINATORS;
1328 walk_data.before_dom_children = move_computations_stmt;
1329
1330 init_walk_dominator_tree (&walk_data);
1331 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1332 fini_walk_dominator_tree (&walk_data);
1333
1334 gsi_commit_edge_inserts ();
1335 if (need_ssa_update_p (cfun))
1336 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1337
1338 return todo;
1339 }
1340
1341 /* Checks whether the statement defining variable *INDEX can be hoisted
1342 out of the loop passed in DATA. Callback for for_each_index. */
1343
1344 static bool
1345 may_move_till (tree ref, tree *index, void *data)
1346 {
1347 struct loop *loop = (struct loop *) data, *max_loop;
1348
1349 /* If REF is an array reference, check also that the step and the lower
1350 bound is invariant in LOOP. */
1351 if (TREE_CODE (ref) == ARRAY_REF)
1352 {
1353 tree step = TREE_OPERAND (ref, 3);
1354 tree lbound = TREE_OPERAND (ref, 2);
1355
1356 max_loop = outermost_invariant_loop (step, loop);
1357 if (!max_loop)
1358 return false;
1359
1360 max_loop = outermost_invariant_loop (lbound, loop);
1361 if (!max_loop)
1362 return false;
1363 }
1364
1365 max_loop = outermost_invariant_loop (*index, loop);
1366 if (!max_loop)
1367 return false;
1368
1369 return true;
1370 }
1371
1372 /* If OP is SSA NAME, force the statement that defines it to be
1373 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
1374
1375 static void
1376 force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
1377 {
1378 gimple stmt;
1379
1380 if (!op
1381 || is_gimple_min_invariant (op))
1382 return;
1383
1384 gcc_assert (TREE_CODE (op) == SSA_NAME);
1385
1386 stmt = SSA_NAME_DEF_STMT (op);
1387 if (gimple_nop_p (stmt))
1388 return;
1389
1390 set_level (stmt, orig_loop, loop);
1391 }
1392
1393 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1394 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1395 for_each_index. */
1396
1397 struct fmt_data
1398 {
1399 struct loop *loop;
1400 struct loop *orig_loop;
1401 };
1402
1403 static bool
1404 force_move_till (tree ref, tree *index, void *data)
1405 {
1406 struct fmt_data *fmt_data = (struct fmt_data *) data;
1407
1408 if (TREE_CODE (ref) == ARRAY_REF)
1409 {
1410 tree step = TREE_OPERAND (ref, 3);
1411 tree lbound = TREE_OPERAND (ref, 2);
1412
1413 force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1414 force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
1415 }
1416
1417 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
1418
1419 return true;
1420 }
1421
1422 /* A hash function for struct mem_ref object OBJ. */
1423
1424 static hashval_t
1425 memref_hash (const void *obj)
1426 {
1427 const struct mem_ref *const mem = (const struct mem_ref *) obj;
1428
1429 return mem->hash;
1430 }
1431
1432 /* An equality function for struct mem_ref object OBJ1 with
1433 memory reference OBJ2. */
1434
1435 static int
1436 memref_eq (const void *obj1, const void *obj2)
1437 {
1438 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1439
1440 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1441 }
1442
1443 /* Releases list of memory reference locations ACCS. */
1444
1445 static void
1446 free_mem_ref_locs (mem_ref_locs_p accs)
1447 {
1448 unsigned i;
1449 mem_ref_loc_p loc;
1450
1451 if (!accs)
1452 return;
1453
1454 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
1455 free (loc);
1456 VEC_free (mem_ref_loc_p, heap, accs->locs);
1457 free (accs);
1458 }
1459
1460 /* A function to free the mem_ref object OBJ. */
1461
1462 static void
1463 memref_free (void *obj)
1464 {
1465 struct mem_ref *const mem = (struct mem_ref *) obj;
1466 unsigned i;
1467 mem_ref_locs_p accs;
1468
1469 BITMAP_FREE (mem->stored);
1470 BITMAP_FREE (mem->indep_loop);
1471 BITMAP_FREE (mem->dep_loop);
1472 BITMAP_FREE (mem->indep_ref);
1473 BITMAP_FREE (mem->dep_ref);
1474
1475 FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs)
1476 free_mem_ref_locs (accs);
1477 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1478
1479 free (mem);
1480 }
1481
1482 /* Allocates and returns a memory reference description for MEM whose hash
1483 value is HASH and id is ID. */
1484
1485 static mem_ref_p
1486 mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1487 {
1488 mem_ref_p ref = XNEW (struct mem_ref);
1489 ref->mem = mem;
1490 ref->id = id;
1491 ref->hash = hash;
1492 ref->stored = BITMAP_ALLOC (NULL);
1493 ref->indep_loop = BITMAP_ALLOC (NULL);
1494 ref->dep_loop = BITMAP_ALLOC (NULL);
1495 ref->indep_ref = BITMAP_ALLOC (NULL);
1496 ref->dep_ref = BITMAP_ALLOC (NULL);
1497 ref->accesses_in_loop = NULL;
1498
1499 return ref;
1500 }
1501
1502 /* Allocates and returns the new list of locations. */
1503
1504 static mem_ref_locs_p
1505 mem_ref_locs_alloc (void)
1506 {
1507 mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1508 accs->locs = NULL;
1509 return accs;
1510 }
1511
1512 /* Records memory reference location *LOC in LOOP to the memory reference
1513 description REF. The reference occurs in statement STMT. */
1514
1515 static void
1516 record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
1517 {
1518 mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1519 mem_ref_locs_p accs;
1520 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1521
1522 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1523 <= (unsigned) loop->num)
1524 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1525 loop->num + 1);
1526 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1527 if (!accs)
1528 {
1529 accs = mem_ref_locs_alloc ();
1530 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1531 }
1532
1533 aref->stmt = stmt;
1534 aref->ref = loc;
1535
1536 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1537 bitmap_set_bit (ril, ref->id);
1538 }
1539
1540 /* Marks reference REF as stored in LOOP. */
1541
1542 static void
1543 mark_ref_stored (mem_ref_p ref, struct loop *loop)
1544 {
1545 for (;
1546 loop != current_loops->tree_root
1547 && !bitmap_bit_p (ref->stored, loop->num);
1548 loop = loop_outer (loop))
1549 bitmap_set_bit (ref->stored, loop->num);
1550 }
1551
1552 /* Gathers memory references in statement STMT in LOOP, storing the
1553 information about them in the memory_accesses structure. Marks
1554 the vops accessed through unrecognized statements there as
1555 well. */
1556
1557 static void
1558 gather_mem_refs_stmt (struct loop *loop, gimple stmt)
1559 {
1560 tree *mem = NULL;
1561 hashval_t hash;
1562 PTR *slot;
1563 mem_ref_p ref;
1564 bool is_stored;
1565 unsigned id;
1566
1567 if (!gimple_vuse (stmt))
1568 return;
1569
1570 mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1571 if (!mem)
1572 {
1573 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1574 ref = mem_ref_alloc (error_mark_node, 0, id);
1575 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1577 {
1578 fprintf (dump_file, "Unanalyzed memory reference %u: ", id);
1579 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1580 }
1581 if (gimple_vdef (stmt))
1582 mark_ref_stored (ref, loop);
1583 record_mem_ref_loc (ref, loop, stmt, mem);
1584 return;
1585 }
1586
1587 hash = iterative_hash_expr (*mem, 0);
1588 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1589
1590 if (*slot)
1591 {
1592 ref = (mem_ref_p) *slot;
1593 id = ref->id;
1594 }
1595 else
1596 {
1597 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1598 ref = mem_ref_alloc (*mem, hash, id);
1599 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1600 *slot = ref;
1601
1602 if (dump_file && (dump_flags & TDF_DETAILS))
1603 {
1604 fprintf (dump_file, "Memory reference %u: ", id);
1605 print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1606 fprintf (dump_file, "\n");
1607 }
1608 }
1609 if (is_stored)
1610 mark_ref_stored (ref, loop);
1611
1612 record_mem_ref_loc (ref, loop, stmt, mem);
1613 return;
1614 }
1615
1616 /* Gathers memory references in loops. */
1617
1618 static void
1619 gather_mem_refs_in_loops (void)
1620 {
1621 gimple_stmt_iterator bsi;
1622 basic_block bb;
1623 struct loop *loop;
1624 loop_iterator li;
1625 bitmap lrefs, alrefs, alrefso;
1626
1627 FOR_EACH_BB (bb)
1628 {
1629 loop = bb->loop_father;
1630 if (loop == current_loops->tree_root)
1631 continue;
1632
1633 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1634 gather_mem_refs_stmt (loop, gsi_stmt (bsi));
1635 }
1636
1637 /* Propagate the information about accessed memory references up
1638 the loop hierarchy. */
1639 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1640 {
1641 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1642 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1643 bitmap_ior_into (alrefs, lrefs);
1644
1645 if (loop_outer (loop) == current_loops->tree_root)
1646 continue;
1647
1648 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1649 loop_outer (loop)->num);
1650 bitmap_ior_into (alrefso, alrefs);
1651 }
1652 }
1653
1654 /* Create a mapping from virtual operands to references that touch them
1655 in LOOP. */
1656
1657 static void
1658 create_vop_ref_mapping_loop (struct loop *loop)
1659 {
1660 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1661 struct loop *sloop;
1662 bitmap_iterator bi;
1663 unsigned i;
1664 mem_ref_p ref;
1665
1666 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1667 {
1668 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1669 for (sloop = loop; sloop != current_loops->tree_root;
1670 sloop = loop_outer (sloop))
1671 if (bitmap_bit_p (ref->stored, loop->num))
1672 {
1673 bitmap refs_stored
1674 = VEC_index (bitmap, memory_accesses.all_refs_stored_in_loop,
1675 sloop->num);
1676 bitmap_set_bit (refs_stored, ref->id);
1677 }
1678 }
1679 }
1680
1681 /* For each non-clobbered virtual operand and each loop, record the memory
1682 references in this loop that touch the operand. */
1683
1684 static void
1685 create_vop_ref_mapping (void)
1686 {
1687 loop_iterator li;
1688 struct loop *loop;
1689
1690 FOR_EACH_LOOP (li, loop, 0)
1691 {
1692 create_vop_ref_mapping_loop (loop);
1693 }
1694 }
1695
1696 /* Gathers information about memory accesses in the loops. */
1697
1698 static void
1699 analyze_memory_references (void)
1700 {
1701 unsigned i;
1702 bitmap empty;
1703
1704 memory_accesses.refs
1705 = htab_create (100, memref_hash, memref_eq, memref_free);
1706 memory_accesses.refs_list = NULL;
1707 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1708 number_of_loops ());
1709 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1710 number_of_loops ());
1711 memory_accesses.all_refs_stored_in_loop = VEC_alloc (bitmap, heap,
1712 number_of_loops ());
1713
1714 for (i = 0; i < number_of_loops (); i++)
1715 {
1716 empty = BITMAP_ALLOC (NULL);
1717 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1718 empty = BITMAP_ALLOC (NULL);
1719 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1720 empty = BITMAP_ALLOC (NULL);
1721 VEC_quick_push (bitmap, memory_accesses.all_refs_stored_in_loop, empty);
1722 }
1723
1724 memory_accesses.ttae_cache = NULL;
1725
1726 gather_mem_refs_in_loops ();
1727 create_vop_ref_mapping ();
1728 }
1729
1730 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1731 tree_to_aff_combination_expand. */
1732
1733 static bool
1734 mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1735 {
1736 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1737 object and their offset differ in such a way that the locations cannot
1738 overlap, then they cannot alias. */
1739 double_int size1, size2;
1740 aff_tree off1, off2;
1741
1742 /* Perform basic offset and type-based disambiguation. */
1743 if (!refs_may_alias_p (mem1, mem2))
1744 return false;
1745
1746 /* The expansion of addresses may be a bit expensive, thus we only do
1747 the check at -O2 and higher optimization levels. */
1748 if (optimize < 2)
1749 return true;
1750
1751 get_inner_reference_aff (mem1, &off1, &size1);
1752 get_inner_reference_aff (mem2, &off2, &size2);
1753 aff_combination_expand (&off1, ttae_cache);
1754 aff_combination_expand (&off2, ttae_cache);
1755 aff_combination_scale (&off1, double_int_minus_one);
1756 aff_combination_add (&off2, &off1);
1757
1758 if (aff_comb_cannot_overlap_p (&off2, size1, size2))
1759 return false;
1760
1761 return true;
1762 }
1763
1764 /* Rewrites location LOC by TMP_VAR. */
1765
1766 static void
1767 rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1768 {
1769 mark_virtual_ops_for_renaming (loc->stmt);
1770 *loc->ref = tmp_var;
1771 update_stmt (loc->stmt);
1772 }
1773
1774 /* Adds all locations of REF in LOOP and its subloops to LOCS. */
1775
1776 static void
1777 get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1778 VEC (mem_ref_loc_p, heap) **locs)
1779 {
1780 mem_ref_locs_p accs;
1781 unsigned i;
1782 mem_ref_loc_p loc;
1783 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1784 loop->num);
1785 struct loop *subloop;
1786
1787 if (!bitmap_bit_p (refs, ref->id))
1788 return;
1789
1790 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1791 > (unsigned) loop->num)
1792 {
1793 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1794 if (accs)
1795 {
1796 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
1797 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1798 }
1799 }
1800
1801 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1802 get_all_locs_in_loop (subloop, ref, locs);
1803 }
1804
1805 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1806
1807 static void
1808 rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1809 {
1810 unsigned i;
1811 mem_ref_loc_p loc;
1812 VEC (mem_ref_loc_p, heap) *locs = NULL;
1813
1814 get_all_locs_in_loop (loop, ref, &locs);
1815 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
1816 rewrite_mem_ref_loc (loc, tmp_var);
1817 VEC_free (mem_ref_loc_p, heap, locs);
1818 }
1819
1820 /* The name and the length of the currently generated variable
1821 for lsm. */
1822 #define MAX_LSM_NAME_LENGTH 40
1823 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1824 static int lsm_tmp_name_length;
1825
1826 /* Adds S to lsm_tmp_name. */
1827
1828 static void
1829 lsm_tmp_name_add (const char *s)
1830 {
1831 int l = strlen (s) + lsm_tmp_name_length;
1832 if (l > MAX_LSM_NAME_LENGTH)
1833 return;
1834
1835 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1836 lsm_tmp_name_length = l;
1837 }
1838
1839 /* Stores the name for temporary variable that replaces REF to
1840 lsm_tmp_name. */
1841
1842 static void
1843 gen_lsm_tmp_name (tree ref)
1844 {
1845 const char *name;
1846
1847 switch (TREE_CODE (ref))
1848 {
1849 case MEM_REF:
1850 case TARGET_MEM_REF:
1851 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1852 lsm_tmp_name_add ("_");
1853 break;
1854
1855 case ADDR_EXPR:
1856 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1857 break;
1858
1859 case BIT_FIELD_REF:
1860 case VIEW_CONVERT_EXPR:
1861 case ARRAY_RANGE_REF:
1862 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1863 break;
1864
1865 case REALPART_EXPR:
1866 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1867 lsm_tmp_name_add ("_RE");
1868 break;
1869
1870 case IMAGPART_EXPR:
1871 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1872 lsm_tmp_name_add ("_IM");
1873 break;
1874
1875 case COMPONENT_REF:
1876 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1877 lsm_tmp_name_add ("_");
1878 name = get_name (TREE_OPERAND (ref, 1));
1879 if (!name)
1880 name = "F";
1881 lsm_tmp_name_add (name);
1882 break;
1883
1884 case ARRAY_REF:
1885 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1886 lsm_tmp_name_add ("_I");
1887 break;
1888
1889 case SSA_NAME:
1890 ref = SSA_NAME_VAR (ref);
1891 /* Fallthru. */
1892
1893 case VAR_DECL:
1894 case PARM_DECL:
1895 name = get_name (ref);
1896 if (!name)
1897 name = "D";
1898 lsm_tmp_name_add (name);
1899 break;
1900
1901 case STRING_CST:
1902 lsm_tmp_name_add ("S");
1903 break;
1904
1905 case RESULT_DECL:
1906 lsm_tmp_name_add ("R");
1907 break;
1908
1909 case INTEGER_CST:
1910 /* Nothing. */
1911 break;
1912
1913 default:
1914 gcc_unreachable ();
1915 }
1916 }
1917
1918 /* Determines name for temporary variable that replaces REF.
1919 The name is accumulated into the lsm_tmp_name variable.
1920 N is added to the name of the temporary. */
1921
1922 char *
1923 get_lsm_tmp_name (tree ref, unsigned n)
1924 {
1925 char ns[2];
1926
1927 lsm_tmp_name_length = 0;
1928 gen_lsm_tmp_name (ref);
1929 lsm_tmp_name_add ("_lsm");
1930 if (n < 10)
1931 {
1932 ns[0] = '0' + n;
1933 ns[1] = 0;
1934 lsm_tmp_name_add (ns);
1935 }
1936 return lsm_tmp_name;
1937 }
1938
1939 /* Executes store motion of memory reference REF from LOOP.
1940 Exits from the LOOP are stored in EXITS. The initialization of the
1941 temporary variable is put to the preheader of the loop, and assignments
1942 to the reference from the temporary variable are emitted to exits. */
1943
1944 static void
1945 execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
1946 {
1947 tree tmp_var;
1948 unsigned i;
1949 gimple load, store;
1950 struct fmt_data fmt_data;
1951 edge ex;
1952 struct lim_aux_data *lim_data;
1953
1954 if (dump_file && (dump_flags & TDF_DETAILS))
1955 {
1956 fprintf (dump_file, "Executing store motion of ");
1957 print_generic_expr (dump_file, ref->mem, 0);
1958 fprintf (dump_file, " from loop %d\n", loop->num);
1959 }
1960
1961 tmp_var = make_rename_temp (TREE_TYPE (ref->mem),
1962 get_lsm_tmp_name (ref->mem, ~0));
1963
1964 fmt_data.loop = loop;
1965 fmt_data.orig_loop = loop;
1966 for_each_index (&ref->mem, force_move_till, &fmt_data);
1967
1968 rewrite_mem_refs (loop, ref, tmp_var);
1969
1970 /* Emit the load & stores. */
1971 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
1972 lim_data = init_lim_data (load);
1973 lim_data->max_loop = loop;
1974 lim_data->tgt_loop = loop;
1975
1976 /* Put this into the latch, so that we are sure it will be processed after
1977 all dependencies. */
1978 gsi_insert_on_edge (loop_latch_edge (loop), load);
1979
1980 FOR_EACH_VEC_ELT (edge, exits, i, ex)
1981 {
1982 store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
1983 gsi_insert_on_edge (ex, store);
1984 }
1985 }
1986
1987 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
1988 edges of the LOOP. */
1989
1990 static void
1991 hoist_memory_references (struct loop *loop, bitmap mem_refs,
1992 VEC (edge, heap) *exits)
1993 {
1994 mem_ref_p ref;
1995 unsigned i;
1996 bitmap_iterator bi;
1997
1998 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
1999 {
2000 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2001 execute_sm (loop, exits, ref);
2002 }
2003 }
2004
2005 /* Returns true if REF is always accessed in LOOP. If STORED_P is true
2006 make sure REF is always stored to in LOOP. */
2007
2008 static bool
2009 ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
2010 {
2011 VEC (mem_ref_loc_p, heap) *locs = NULL;
2012 unsigned i;
2013 mem_ref_loc_p loc;
2014 bool ret = false;
2015 struct loop *must_exec;
2016 tree base;
2017
2018 base = get_base_address (ref->mem);
2019 if (INDIRECT_REF_P (base)
2020 || TREE_CODE (base) == MEM_REF)
2021 base = TREE_OPERAND (base, 0);
2022
2023 get_all_locs_in_loop (loop, ref, &locs);
2024 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
2025 {
2026 if (!get_lim_data (loc->stmt))
2027 continue;
2028
2029 /* If we require an always executed store make sure the statement
2030 stores to the reference. */
2031 if (stored_p)
2032 {
2033 tree lhs;
2034 if (!gimple_get_lhs (loc->stmt))
2035 continue;
2036 lhs = get_base_address (gimple_get_lhs (loc->stmt));
2037 if (!lhs)
2038 continue;
2039 if (INDIRECT_REF_P (lhs)
2040 || TREE_CODE (lhs) == MEM_REF)
2041 lhs = TREE_OPERAND (lhs, 0);
2042 if (lhs != base)
2043 continue;
2044 }
2045
2046 must_exec = get_lim_data (loc->stmt)->always_executed_in;
2047 if (!must_exec)
2048 continue;
2049
2050 if (must_exec == loop
2051 || flow_loop_nested_p (must_exec, loop))
2052 {
2053 ret = true;
2054 break;
2055 }
2056 }
2057 VEC_free (mem_ref_loc_p, heap, locs);
2058
2059 return ret;
2060 }
2061
2062 /* Returns true if REF1 and REF2 are independent. */
2063
2064 static bool
2065 refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
2066 {
2067 if (ref1 == ref2
2068 || bitmap_bit_p (ref1->indep_ref, ref2->id))
2069 return true;
2070 if (bitmap_bit_p (ref1->dep_ref, ref2->id))
2071 return false;
2072 if (!MEM_ANALYZABLE (ref1)
2073 || !MEM_ANALYZABLE (ref2))
2074 return false;
2075
2076 if (dump_file && (dump_flags & TDF_DETAILS))
2077 fprintf (dump_file, "Querying dependency of refs %u and %u: ",
2078 ref1->id, ref2->id);
2079
2080 if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
2081 &memory_accesses.ttae_cache))
2082 {
2083 bitmap_set_bit (ref1->dep_ref, ref2->id);
2084 bitmap_set_bit (ref2->dep_ref, ref1->id);
2085 if (dump_file && (dump_flags & TDF_DETAILS))
2086 fprintf (dump_file, "dependent.\n");
2087 return false;
2088 }
2089 else
2090 {
2091 bitmap_set_bit (ref1->indep_ref, ref2->id);
2092 bitmap_set_bit (ref2->indep_ref, ref1->id);
2093 if (dump_file && (dump_flags & TDF_DETAILS))
2094 fprintf (dump_file, "independent.\n");
2095 return true;
2096 }
2097 }
2098
2099 /* Records the information whether REF is independent in LOOP (according
2100 to INDEP). */
2101
2102 static void
2103 record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
2104 {
2105 if (indep)
2106 bitmap_set_bit (ref->indep_loop, loop->num);
2107 else
2108 bitmap_set_bit (ref->dep_loop, loop->num);
2109 }
2110
2111 /* Returns true if REF is independent on all other memory references in
2112 LOOP. */
2113
2114 static bool
2115 ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
2116 {
2117 bitmap refs_to_check;
2118 unsigned i;
2119 bitmap_iterator bi;
2120 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
2121 mem_ref_p aref;
2122
2123 if (stored)
2124 refs_to_check = VEC_index (bitmap,
2125 memory_accesses.all_refs_in_loop, loop->num);
2126 else
2127 refs_to_check = VEC_index (bitmap,
2128 memory_accesses.all_refs_stored_in_loop,
2129 loop->num);
2130
2131 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
2132 {
2133 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2134 if (!MEM_ANALYZABLE (aref)
2135 || !refs_independent_p (ref, aref))
2136 {
2137 ret = false;
2138 record_indep_loop (loop, aref, false);
2139 break;
2140 }
2141 }
2142
2143 return ret;
2144 }
2145
2146 /* Returns true if REF is independent on all other memory references in
2147 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
2148
2149 static bool
2150 ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
2151 {
2152 bool ret;
2153
2154 if (bitmap_bit_p (ref->indep_loop, loop->num))
2155 return true;
2156 if (bitmap_bit_p (ref->dep_loop, loop->num))
2157 return false;
2158
2159 ret = ref_indep_loop_p_1 (loop, ref);
2160
2161 if (dump_file && (dump_flags & TDF_DETAILS))
2162 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2163 ref->id, loop->num, ret ? "independent" : "dependent");
2164
2165 record_indep_loop (loop, ref, ret);
2166
2167 return ret;
2168 }
2169
2170 /* Returns true if we can perform store motion of REF from LOOP. */
2171
2172 static bool
2173 can_sm_ref_p (struct loop *loop, mem_ref_p ref)
2174 {
2175 tree base;
2176
2177 /* Can't hoist unanalyzable refs. */
2178 if (!MEM_ANALYZABLE (ref))
2179 return false;
2180
2181 /* Unless the reference is stored in the loop, there is nothing to do. */
2182 if (!bitmap_bit_p (ref->stored, loop->num))
2183 return false;
2184
2185 /* It should be movable. */
2186 if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2187 || TREE_THIS_VOLATILE (ref->mem)
2188 || !for_each_index (&ref->mem, may_move_till, loop))
2189 return false;
2190
2191 /* If it can throw fail, we do not properly update EH info. */
2192 if (tree_could_throw_p (ref->mem))
2193 return false;
2194
2195 /* If it can trap, it must be always executed in LOOP.
2196 Readonly memory locations may trap when storing to them, but
2197 tree_could_trap_p is a predicate for rvalues, so check that
2198 explicitly. */
2199 base = get_base_address (ref->mem);
2200 if ((tree_could_trap_p (ref->mem)
2201 || (DECL_P (base) && TREE_READONLY (base)))
2202 && !ref_always_accessed_p (loop, ref, true))
2203 return false;
2204
2205 /* And it must be independent on all other memory references
2206 in LOOP. */
2207 if (!ref_indep_loop_p (loop, ref))
2208 return false;
2209
2210 return true;
2211 }
2212
2213 /* Marks the references in LOOP for that store motion should be performed
2214 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2215 motion was performed in one of the outer loops. */
2216
2217 static void
2218 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
2219 {
2220 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2221 loop->num);
2222 unsigned i;
2223 bitmap_iterator bi;
2224 mem_ref_p ref;
2225
2226 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2227 {
2228 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2229 if (can_sm_ref_p (loop, ref))
2230 bitmap_set_bit (refs_to_sm, i);
2231 }
2232 }
2233
2234 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
2235 for a store motion optimization (i.e. whether we can insert statement
2236 on its exits). */
2237
2238 static bool
2239 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2240 VEC (edge, heap) *exits)
2241 {
2242 unsigned i;
2243 edge ex;
2244
2245 FOR_EACH_VEC_ELT (edge, exits, i, ex)
2246 if (ex->flags & (EDGE_ABNORMAL | EDGE_EH))
2247 return false;
2248
2249 return true;
2250 }
2251
2252 /* Try to perform store motion for all memory references modified inside
2253 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2254 store motion was executed in one of the outer loops. */
2255
2256 static void
2257 store_motion_loop (struct loop *loop, bitmap sm_executed)
2258 {
2259 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
2260 struct loop *subloop;
2261 bitmap sm_in_loop = BITMAP_ALLOC (NULL);
2262
2263 if (loop_suitable_for_sm (loop, exits))
2264 {
2265 find_refs_for_sm (loop, sm_executed, sm_in_loop);
2266 hoist_memory_references (loop, sm_in_loop, exits);
2267 }
2268 VEC_free (edge, heap, exits);
2269
2270 bitmap_ior_into (sm_executed, sm_in_loop);
2271 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2272 store_motion_loop (subloop, sm_executed);
2273 bitmap_and_compl_into (sm_executed, sm_in_loop);
2274 BITMAP_FREE (sm_in_loop);
2275 }
2276
2277 /* Try to perform store motion for all memory references modified inside
2278 loops. */
2279
2280 static void
2281 store_motion (void)
2282 {
2283 struct loop *loop;
2284 bitmap sm_executed = BITMAP_ALLOC (NULL);
2285
2286 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2287 store_motion_loop (loop, sm_executed);
2288
2289 BITMAP_FREE (sm_executed);
2290 gsi_commit_edge_inserts ();
2291 }
2292
2293 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2294 for each such basic block bb records the outermost loop for that execution
2295 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2296 blocks that contain a nonpure call. */
2297
2298 static void
2299 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2300 {
2301 basic_block bb = NULL, *bbs, last = NULL;
2302 unsigned i;
2303 edge e;
2304 struct loop *inn_loop = loop;
2305
2306 if (ALWAYS_EXECUTED_IN (loop->header) == NULL)
2307 {
2308 bbs = get_loop_body_in_dom_order (loop);
2309
2310 for (i = 0; i < loop->num_nodes; i++)
2311 {
2312 edge_iterator ei;
2313 bb = bbs[i];
2314
2315 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2316 last = bb;
2317
2318 if (TEST_BIT (contains_call, bb->index))
2319 break;
2320
2321 FOR_EACH_EDGE (e, ei, bb->succs)
2322 if (!flow_bb_inside_loop_p (loop, e->dest))
2323 break;
2324 if (e)
2325 break;
2326
2327 /* A loop might be infinite (TODO use simple loop analysis
2328 to disprove this if possible). */
2329 if (bb->flags & BB_IRREDUCIBLE_LOOP)
2330 break;
2331
2332 if (!flow_bb_inside_loop_p (inn_loop, bb))
2333 break;
2334
2335 if (bb->loop_father->header == bb)
2336 {
2337 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2338 break;
2339
2340 /* In a loop that is always entered we may proceed anyway.
2341 But record that we entered it and stop once we leave it. */
2342 inn_loop = bb->loop_father;
2343 }
2344 }
2345
2346 while (1)
2347 {
2348 SET_ALWAYS_EXECUTED_IN (last, loop);
2349 if (last == loop->header)
2350 break;
2351 last = get_immediate_dominator (CDI_DOMINATORS, last);
2352 }
2353
2354 free (bbs);
2355 }
2356
2357 for (loop = loop->inner; loop; loop = loop->next)
2358 fill_always_executed_in (loop, contains_call);
2359 }
2360
2361 /* Compute the global information needed by the loop invariant motion pass. */
2362
2363 static void
2364 tree_ssa_lim_initialize (void)
2365 {
2366 sbitmap contains_call = sbitmap_alloc (last_basic_block);
2367 gimple_stmt_iterator bsi;
2368 struct loop *loop;
2369 basic_block bb;
2370
2371 sbitmap_zero (contains_call);
2372 FOR_EACH_BB (bb)
2373 {
2374 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2375 {
2376 if (nonpure_call_p (gsi_stmt (bsi)))
2377 break;
2378 }
2379
2380 if (!gsi_end_p (bsi))
2381 SET_BIT (contains_call, bb->index);
2382 }
2383
2384 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
2385 fill_always_executed_in (loop, contains_call);
2386
2387 sbitmap_free (contains_call);
2388
2389 lim_aux_data_map = pointer_map_create ();
2390 }
2391
2392 /* Cleans up after the invariant motion pass. */
2393
2394 static void
2395 tree_ssa_lim_finalize (void)
2396 {
2397 basic_block bb;
2398 unsigned i;
2399 bitmap b;
2400
2401 FOR_EACH_BB (bb)
2402 SET_ALWAYS_EXECUTED_IN (bb, NULL);
2403
2404 pointer_map_destroy (lim_aux_data_map);
2405
2406 VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2407 htab_delete (memory_accesses.refs);
2408
2409 FOR_EACH_VEC_ELT (bitmap, memory_accesses.refs_in_loop, i, b)
2410 BITMAP_FREE (b);
2411 VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2412
2413 FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_in_loop, i, b)
2414 BITMAP_FREE (b);
2415 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2416
2417 FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_stored_in_loop, i, b)
2418 BITMAP_FREE (b);
2419 VEC_free (bitmap, heap, memory_accesses.all_refs_stored_in_loop);
2420
2421 if (memory_accesses.ttae_cache)
2422 pointer_map_destroy (memory_accesses.ttae_cache);
2423 }
2424
2425 /* Moves invariants from loops. Only "expensive" invariants are moved out --
2426 i.e. those that are likely to be win regardless of the register pressure. */
2427
2428 unsigned int
2429 tree_ssa_lim (void)
2430 {
2431 unsigned int todo;
2432
2433 tree_ssa_lim_initialize ();
2434
2435 /* Gathers information about memory accesses in the loops. */
2436 analyze_memory_references ();
2437
2438 /* For each statement determine the outermost loop in that it is
2439 invariant and cost for computing the invariant. */
2440 determine_invariantness ();
2441
2442 /* Execute store motion. Force the necessary invariants to be moved
2443 out of the loops as well. */
2444 store_motion ();
2445
2446 /* Move the expressions that are expensive enough. */
2447 todo = move_computations ();
2448
2449 tree_ssa_lim_finalize ();
2450
2451 return todo;
2452 }