inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / tree-ssa-loop-im.c
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
3 Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "cfgloop.h"
36 #include "domwalk.h"
37 #include "params.h"
38 #include "tree-pass.h"
39 #include "flags.h"
40 #include "real.h"
41 #include "hashtab.h"
42 #include "tree-affine.h"
43 #include "pointer-set.h"
44 #include "tree-ssa-propagate.h"
45
46 /* TODO: Support for predicated code motion. I.e.
47
48 while (1)
49 {
50 if (cond)
51 {
52 a = inv;
53 something;
54 }
55 }
56
57 Where COND and INV are is invariants, but evaluating INV may trap or be
58 invalid from some other reason if !COND. This may be transformed to
59
60 if (cond)
61 a = inv;
62 while (1)
63 {
64 if (cond)
65 something;
66 } */
67
68 /* A type for the list of statements that have to be moved in order to be able
69 to hoist an invariant computation. */
70
71 struct depend
72 {
73 gimple stmt;
74 struct depend *next;
75 };
76
77 /* The auxiliary data kept for each statement. */
78
79 struct lim_aux_data
80 {
81 struct loop *max_loop; /* The outermost loop in that the statement
82 is invariant. */
83
84 struct loop *tgt_loop; /* The loop out of that we want to move the
85 invariant. */
86
87 struct loop *always_executed_in;
88 /* The outermost loop for that we are sure
89 the statement is executed if the loop
90 is entered. */
91
92 unsigned cost; /* Cost of the computation performed by the
93 statement. */
94
95 struct depend *depends; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
99 MAX_LOOP loop. */
100 };
101
102 /* Maps statements to their lim_aux_data. */
103
104 static struct pointer_map_t *lim_aux_data_map;
105
106 /* Description of a memory reference location. */
107
108 typedef struct mem_ref_loc
109 {
110 tree *ref; /* The reference itself. */
111 gimple stmt; /* The statement in that it occurs. */
112 } *mem_ref_loc_p;
113
114 DEF_VEC_P(mem_ref_loc_p);
115 DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
116
117 /* The list of memory reference locations in a loop. */
118
119 typedef struct mem_ref_locs
120 {
121 VEC (mem_ref_loc_p, heap) *locs;
122 } *mem_ref_locs_p;
123
124 DEF_VEC_P(mem_ref_locs_p);
125 DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
126
127 /* Description of a memory reference. */
128
129 typedef struct mem_ref
130 {
131 tree mem; /* The memory itself. */
132 unsigned id; /* ID assigned to the memory reference
133 (its index in memory_accesses.refs_list) */
134 hashval_t hash; /* Its hash value. */
135 bitmap stored; /* The set of loops in that this memory location
136 is stored to. */
137 VEC (mem_ref_locs_p, heap) *accesses_in_loop;
138 /* The locations of the accesses. Vector
139 indexed by the loop number. */
140 bitmap vops; /* Vops corresponding to this memory
141 location. */
142
143 /* The following sets are computed on demand. We keep both set and
144 its complement, so that we know whether the information was
145 already computed or not. */
146 bitmap indep_loop; /* The set of loops in that the memory
147 reference is independent, meaning:
148 If it is stored in the loop, this store
149 is independent on all other loads and
150 stores.
151 If it is only loaded, then it is independent
152 on all stores in the loop. */
153 bitmap dep_loop; /* The complement of INDEP_LOOP. */
154
155 bitmap indep_ref; /* The set of memory references on that
156 this reference is independent. */
157 bitmap dep_ref; /* The complement of DEP_REF. */
158 } *mem_ref_p;
159
160 DEF_VEC_P(mem_ref_p);
161 DEF_VEC_ALLOC_P(mem_ref_p, heap);
162
163 DEF_VEC_P(bitmap);
164 DEF_VEC_ALLOC_P(bitmap, heap);
165
166 DEF_VEC_P(htab_t);
167 DEF_VEC_ALLOC_P(htab_t, heap);
168
169 /* Description of memory accesses in loops. */
170
171 static struct
172 {
173 /* The hash table of memory references accessed in loops. */
174 htab_t refs;
175
176 /* The list of memory references. */
177 VEC (mem_ref_p, heap) *refs_list;
178
179 /* The set of memory references accessed in each loop. */
180 VEC (bitmap, heap) *refs_in_loop;
181
182 /* The set of memory references accessed in each loop, including
183 subloops. */
184 VEC (bitmap, heap) *all_refs_in_loop;
185
186 /* The set of virtual operands clobbered in a given loop. */
187 VEC (bitmap, heap) *clobbered_vops;
188
189 /* Map from the pair (loop, virtual operand) to the set of refs that
190 touch the virtual operand in the loop. */
191 VEC (htab_t, heap) *vop_ref_map;
192
193 /* Cache for expanding memory addresses. */
194 struct pointer_map_t *ttae_cache;
195 } memory_accesses;
196
197 static bool ref_indep_loop_p (struct loop *, mem_ref_p);
198
199 /* Minimum cost of an expensive expression. */
200 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
201
202 /* The outermost loop for that execution of the header guarantees that the
203 block will be executed. */
204 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
205
206 static struct lim_aux_data *
207 init_lim_data (gimple stmt)
208 {
209 void **p = pointer_map_insert (lim_aux_data_map, stmt);
210
211 *p = XCNEW (struct lim_aux_data);
212 return (struct lim_aux_data *) *p;
213 }
214
215 static struct lim_aux_data *
216 get_lim_data (gimple stmt)
217 {
218 void **p = pointer_map_contains (lim_aux_data_map, stmt);
219 if (!p)
220 return NULL;
221
222 return (struct lim_aux_data *) *p;
223 }
224
225 /* Releases the memory occupied by DATA. */
226
227 static void
228 free_lim_aux_data (struct lim_aux_data *data)
229 {
230 struct depend *dep, *next;
231
232 for (dep = data->depends; dep; dep = next)
233 {
234 next = dep->next;
235 free (dep);
236 }
237 free (data);
238 }
239
240 static void
241 clear_lim_data (gimple stmt)
242 {
243 void **p = pointer_map_contains (lim_aux_data_map, stmt);
244 if (!p)
245 return;
246
247 free_lim_aux_data ((struct lim_aux_data *) *p);
248 *p = NULL;
249 }
250
251 /* Calls CBCK for each index in memory reference ADDR_P. There are two
252 kinds situations handled; in each of these cases, the memory reference
253 and DATA are passed to the callback:
254
255 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
256 pass the pointer to the index to the callback.
257
258 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
259 pointer to addr to the callback.
260
261 If the callback returns false, the whole search stops and false is returned.
262 Otherwise the function returns true after traversing through the whole
263 reference *ADDR_P. */
264
265 bool
266 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
267 {
268 tree *nxt, *idx;
269
270 for (; ; addr_p = nxt)
271 {
272 switch (TREE_CODE (*addr_p))
273 {
274 case SSA_NAME:
275 return cbck (*addr_p, addr_p, data);
276
277 case MISALIGNED_INDIRECT_REF:
278 case ALIGN_INDIRECT_REF:
279 case INDIRECT_REF:
280 nxt = &TREE_OPERAND (*addr_p, 0);
281 return cbck (*addr_p, nxt, data);
282
283 case BIT_FIELD_REF:
284 case VIEW_CONVERT_EXPR:
285 case REALPART_EXPR:
286 case IMAGPART_EXPR:
287 nxt = &TREE_OPERAND (*addr_p, 0);
288 break;
289
290 case COMPONENT_REF:
291 /* If the component has varying offset, it behaves like index
292 as well. */
293 idx = &TREE_OPERAND (*addr_p, 2);
294 if (*idx
295 && !cbck (*addr_p, idx, data))
296 return false;
297
298 nxt = &TREE_OPERAND (*addr_p, 0);
299 break;
300
301 case ARRAY_REF:
302 case ARRAY_RANGE_REF:
303 nxt = &TREE_OPERAND (*addr_p, 0);
304 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
305 return false;
306 break;
307
308 case VAR_DECL:
309 case PARM_DECL:
310 case STRING_CST:
311 case RESULT_DECL:
312 case VECTOR_CST:
313 case COMPLEX_CST:
314 case INTEGER_CST:
315 case REAL_CST:
316 case FIXED_CST:
317 case CONSTRUCTOR:
318 return true;
319
320 case ADDR_EXPR:
321 gcc_assert (is_gimple_min_invariant (*addr_p));
322 return true;
323
324 case TARGET_MEM_REF:
325 idx = &TMR_BASE (*addr_p);
326 if (*idx
327 && !cbck (*addr_p, idx, data))
328 return false;
329 idx = &TMR_INDEX (*addr_p);
330 if (*idx
331 && !cbck (*addr_p, idx, data))
332 return false;
333 return true;
334
335 default:
336 gcc_unreachable ();
337 }
338 }
339 }
340
341 /* If it is possible to hoist the statement STMT unconditionally,
342 returns MOVE_POSSIBLE.
343 If it is possible to hoist the statement STMT, but we must avoid making
344 it executed if it would not be executed in the original program (e.g.
345 because it may trap), return MOVE_PRESERVE_EXECUTION.
346 Otherwise return MOVE_IMPOSSIBLE. */
347
348 enum move_pos
349 movement_possibility (gimple stmt)
350 {
351 tree lhs;
352 enum move_pos ret = MOVE_POSSIBLE;
353
354 if (flag_unswitch_loops
355 && gimple_code (stmt) == GIMPLE_COND)
356 {
357 /* If we perform unswitching, force the operands of the invariant
358 condition to be moved out of the loop. */
359 return MOVE_POSSIBLE;
360 }
361
362 if (gimple_get_lhs (stmt) == NULL_TREE)
363 return MOVE_IMPOSSIBLE;
364
365 if (gimple_vdef (stmt))
366 return MOVE_IMPOSSIBLE;
367
368 if (stmt_ends_bb_p (stmt)
369 || gimple_has_volatile_ops (stmt)
370 || gimple_has_side_effects (stmt)
371 || stmt_could_throw_p (stmt))
372 return MOVE_IMPOSSIBLE;
373
374 if (is_gimple_call (stmt))
375 {
376 /* While pure or const call is guaranteed to have no side effects, we
377 cannot move it arbitrarily. Consider code like
378
379 char *s = something ();
380
381 while (1)
382 {
383 if (s)
384 t = strlen (s);
385 else
386 t = 0;
387 }
388
389 Here the strlen call cannot be moved out of the loop, even though
390 s is invariant. In addition to possibly creating a call with
391 invalid arguments, moving out a function call that is not executed
392 may cause performance regressions in case the call is costly and
393 not executed at all. */
394 ret = MOVE_PRESERVE_EXECUTION;
395 lhs = gimple_call_lhs (stmt);
396 }
397 else if (is_gimple_assign (stmt))
398 lhs = gimple_assign_lhs (stmt);
399 else
400 return MOVE_IMPOSSIBLE;
401
402 if (TREE_CODE (lhs) == SSA_NAME
403 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
404 return MOVE_IMPOSSIBLE;
405
406 if (TREE_CODE (lhs) != SSA_NAME
407 || gimple_could_trap_p (stmt))
408 return MOVE_PRESERVE_EXECUTION;
409
410 return ret;
411 }
412
413 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
414 loop to that we could move the expression using DEF if it did not have
415 other operands, i.e. the outermost loop enclosing LOOP in that the value
416 of DEF is invariant. */
417
418 static struct loop *
419 outermost_invariant_loop (tree def, struct loop *loop)
420 {
421 gimple def_stmt;
422 basic_block def_bb;
423 struct loop *max_loop;
424 struct lim_aux_data *lim_data;
425
426 if (!def)
427 return superloop_at_depth (loop, 1);
428
429 if (TREE_CODE (def) != SSA_NAME)
430 {
431 gcc_assert (is_gimple_min_invariant (def));
432 return superloop_at_depth (loop, 1);
433 }
434
435 def_stmt = SSA_NAME_DEF_STMT (def);
436 def_bb = gimple_bb (def_stmt);
437 if (!def_bb)
438 return superloop_at_depth (loop, 1);
439
440 max_loop = find_common_loop (loop, def_bb->loop_father);
441
442 lim_data = get_lim_data (def_stmt);
443 if (lim_data != NULL && lim_data->max_loop != NULL)
444 max_loop = find_common_loop (max_loop,
445 loop_outer (lim_data->max_loop));
446 if (max_loop == loop)
447 return NULL;
448 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
449
450 return max_loop;
451 }
452
453 /* DATA is a structure containing information associated with a statement
454 inside LOOP. DEF is one of the operands of this statement.
455
456 Find the outermost loop enclosing LOOP in that value of DEF is invariant
457 and record this in DATA->max_loop field. If DEF itself is defined inside
458 this loop as well (i.e. we need to hoist it out of the loop if we want
459 to hoist the statement represented by DATA), record the statement in that
460 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
461 add the cost of the computation of DEF to the DATA->cost.
462
463 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
464
465 static bool
466 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
467 bool add_cost)
468 {
469 gimple def_stmt = SSA_NAME_DEF_STMT (def);
470 basic_block def_bb = gimple_bb (def_stmt);
471 struct loop *max_loop;
472 struct depend *dep;
473 struct lim_aux_data *def_data;
474
475 if (!def_bb)
476 return true;
477
478 max_loop = outermost_invariant_loop (def, loop);
479 if (!max_loop)
480 return false;
481
482 if (flow_loop_nested_p (data->max_loop, max_loop))
483 data->max_loop = max_loop;
484
485 def_data = get_lim_data (def_stmt);
486 if (!def_data)
487 return true;
488
489 if (add_cost
490 /* Only add the cost if the statement defining DEF is inside LOOP,
491 i.e. if it is likely that by moving the invariants dependent
492 on it, we will be able to avoid creating a new register for
493 it (since it will be only used in these dependent invariants). */
494 && def_bb->loop_father == loop)
495 data->cost += def_data->cost;
496
497 dep = XNEW (struct depend);
498 dep->stmt = def_stmt;
499 dep->next = data->depends;
500 data->depends = dep;
501
502 return true;
503 }
504
505 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
506 are just ad-hoc constants. The estimates should be based on target-specific
507 values. */
508
509 static unsigned
510 stmt_cost (gimple stmt)
511 {
512 tree fndecl;
513 unsigned cost = 1;
514
515 /* Always try to create possibilities for unswitching. */
516 if (gimple_code (stmt) == GIMPLE_COND)
517 return LIM_EXPENSIVE;
518
519 /* Hoisting memory references out should almost surely be a win. */
520 if (gimple_references_memory_p (stmt))
521 cost += 20;
522
523 if (is_gimple_call (stmt))
524 {
525 /* We should be hoisting calls if possible. */
526
527 /* Unless the call is a builtin_constant_p; this always folds to a
528 constant, so moving it is useless. */
529 fndecl = gimple_call_fndecl (stmt);
530 if (fndecl
531 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
532 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
533 return 0;
534
535 return cost + 20;
536 }
537
538 if (gimple_code (stmt) != GIMPLE_ASSIGN)
539 return cost;
540
541 switch (gimple_assign_rhs_code (stmt))
542 {
543 case MULT_EXPR:
544 case TRUNC_DIV_EXPR:
545 case CEIL_DIV_EXPR:
546 case FLOOR_DIV_EXPR:
547 case ROUND_DIV_EXPR:
548 case EXACT_DIV_EXPR:
549 case CEIL_MOD_EXPR:
550 case FLOOR_MOD_EXPR:
551 case ROUND_MOD_EXPR:
552 case TRUNC_MOD_EXPR:
553 case RDIV_EXPR:
554 /* Division and multiplication are usually expensive. */
555 cost += 20;
556 break;
557
558 case LSHIFT_EXPR:
559 case RSHIFT_EXPR:
560 cost += 20;
561 break;
562
563 default:
564 break;
565 }
566
567 return cost;
568 }
569
570 /* Finds the outermost loop between OUTER and LOOP in that the memory reference
571 REF is independent. If REF is not independent in LOOP, NULL is returned
572 instead. */
573
574 static struct loop *
575 outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
576 {
577 struct loop *aloop;
578
579 if (bitmap_bit_p (ref->stored, loop->num))
580 return NULL;
581
582 for (aloop = outer;
583 aloop != loop;
584 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
585 if (!bitmap_bit_p (ref->stored, aloop->num)
586 && ref_indep_loop_p (aloop, ref))
587 return aloop;
588
589 if (ref_indep_loop_p (loop, ref))
590 return loop;
591 else
592 return NULL;
593 }
594
595 /* If there is a simple load or store to a memory reference in STMT, returns
596 the location of the memory reference, and sets IS_STORE according to whether
597 it is a store or load. Otherwise, returns NULL. */
598
599 static tree *
600 simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
601 {
602 tree *lhs;
603 enum tree_code code;
604
605 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
606 if (gimple_code (stmt) != GIMPLE_ASSIGN)
607 return NULL;
608
609 code = gimple_assign_rhs_code (stmt);
610
611 lhs = gimple_assign_lhs_ptr (stmt);
612
613 if (TREE_CODE (*lhs) == SSA_NAME)
614 {
615 if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
616 || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
617 return NULL;
618
619 *is_store = false;
620 return gimple_assign_rhs1_ptr (stmt);
621 }
622 else if (code == SSA_NAME
623 || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
624 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
625 {
626 *is_store = true;
627 return lhs;
628 }
629 else
630 return NULL;
631 }
632
633 /* Returns the memory reference contained in STMT. */
634
635 static mem_ref_p
636 mem_ref_in_stmt (gimple stmt)
637 {
638 bool store;
639 tree *mem = simple_mem_ref_in_stmt (stmt, &store);
640 hashval_t hash;
641 mem_ref_p ref;
642
643 if (!mem)
644 return NULL;
645 gcc_assert (!store);
646
647 hash = iterative_hash_expr (*mem, 0);
648 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
649
650 gcc_assert (ref != NULL);
651 return ref;
652 }
653
654 /* Determine the outermost loop to that it is possible to hoist a statement
655 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
656 the outermost loop in that the value computed by STMT is invariant.
657 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
658 we preserve the fact whether STMT is executed. It also fills other related
659 information to LIM_DATA (STMT).
660
661 The function returns false if STMT cannot be hoisted outside of the loop it
662 is defined in, and true otherwise. */
663
664 static bool
665 determine_max_movement (gimple stmt, bool must_preserve_exec)
666 {
667 basic_block bb = gimple_bb (stmt);
668 struct loop *loop = bb->loop_father;
669 struct loop *level;
670 struct lim_aux_data *lim_data = get_lim_data (stmt);
671 tree val;
672 ssa_op_iter iter;
673
674 if (must_preserve_exec)
675 level = ALWAYS_EXECUTED_IN (bb);
676 else
677 level = superloop_at_depth (loop, 1);
678 lim_data->max_loop = level;
679
680 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
681 if (!add_dependency (val, lim_data, loop, true))
682 return false;
683
684 if (gimple_vuse (stmt))
685 {
686 mem_ref_p ref = mem_ref_in_stmt (stmt);
687
688 if (ref)
689 {
690 lim_data->max_loop
691 = outermost_indep_loop (lim_data->max_loop, loop, ref);
692 if (!lim_data->max_loop)
693 return false;
694 }
695 else
696 {
697 if ((val = gimple_vuse (stmt)) != NULL_TREE)
698 {
699 if (!add_dependency (val, lim_data, loop, false))
700 return false;
701 }
702 }
703 }
704
705 lim_data->cost += stmt_cost (stmt);
706
707 return true;
708 }
709
710 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
711 and that one of the operands of this statement is computed by STMT.
712 Ensure that STMT (together with all the statements that define its
713 operands) is hoisted at least out of the loop LEVEL. */
714
715 static void
716 set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
717 {
718 struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
719 struct depend *dep;
720 struct lim_aux_data *lim_data;
721
722 stmt_loop = find_common_loop (orig_loop, stmt_loop);
723 lim_data = get_lim_data (stmt);
724 if (lim_data != NULL && lim_data->tgt_loop != NULL)
725 stmt_loop = find_common_loop (stmt_loop,
726 loop_outer (lim_data->tgt_loop));
727 if (flow_loop_nested_p (stmt_loop, level))
728 return;
729
730 gcc_assert (level == lim_data->max_loop
731 || flow_loop_nested_p (lim_data->max_loop, level));
732
733 lim_data->tgt_loop = level;
734 for (dep = lim_data->depends; dep; dep = dep->next)
735 set_level (dep->stmt, orig_loop, level);
736 }
737
738 /* Determines an outermost loop from that we want to hoist the statement STMT.
739 For now we chose the outermost possible loop. TODO -- use profiling
740 information to set it more sanely. */
741
742 static void
743 set_profitable_level (gimple stmt)
744 {
745 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
746 }
747
748 /* Returns true if STMT is a call that has side effects. */
749
750 static bool
751 nonpure_call_p (gimple stmt)
752 {
753 if (gimple_code (stmt) != GIMPLE_CALL)
754 return false;
755
756 return gimple_has_side_effects (stmt);
757 }
758
759 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
760
761 static gimple
762 rewrite_reciprocal (gimple_stmt_iterator *bsi)
763 {
764 gimple stmt, stmt1, stmt2;
765 tree var, name, lhs, type;
766 tree real_one;
767
768 stmt = gsi_stmt (*bsi);
769 lhs = gimple_assign_lhs (stmt);
770 type = TREE_TYPE (lhs);
771
772 var = create_tmp_var (type, "reciptmp");
773 add_referenced_var (var);
774 DECL_GIMPLE_REG_P (var) = 1;
775
776 /* For vectors, create a VECTOR_CST full of 1's. */
777 if (TREE_CODE (type) == VECTOR_TYPE)
778 {
779 int i, len;
780 tree list = NULL_TREE;
781 real_one = build_real (TREE_TYPE (type), dconst1);
782 len = TYPE_VECTOR_SUBPARTS (type);
783 for (i = 0; i < len; i++)
784 list = tree_cons (NULL, real_one, list);
785 real_one = build_vector (type, list);
786 }
787 else
788 real_one = build_real (type, dconst1);
789
790 stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
791 var, real_one, gimple_assign_rhs2 (stmt));
792 name = make_ssa_name (var, stmt1);
793 gimple_assign_set_lhs (stmt1, name);
794
795 stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
796 gimple_assign_rhs1 (stmt));
797
798 /* Replace division stmt with reciprocal and multiply stmts.
799 The multiply stmt is not invariant, so update iterator
800 and avoid rescanning. */
801 gsi_replace (bsi, stmt1, true);
802 gsi_insert_after (bsi, stmt2, GSI_NEW_STMT);
803
804 /* Continue processing with invariant reciprocal statement. */
805 return stmt1;
806 }
807
808 /* Check if the pattern at *BSI is a bittest of the form
809 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
810
811 static gimple
812 rewrite_bittest (gimple_stmt_iterator *bsi)
813 {
814 gimple stmt, use_stmt, stmt1, stmt2;
815 tree lhs, var, name, t, a, b;
816 use_operand_p use;
817
818 stmt = gsi_stmt (*bsi);
819 lhs = gimple_assign_lhs (stmt);
820
821 /* Verify that the single use of lhs is a comparison against zero. */
822 if (TREE_CODE (lhs) != SSA_NAME
823 || !single_imm_use (lhs, &use, &use_stmt)
824 || gimple_code (use_stmt) != GIMPLE_COND)
825 return stmt;
826 if (gimple_cond_lhs (use_stmt) != lhs
827 || (gimple_cond_code (use_stmt) != NE_EXPR
828 && gimple_cond_code (use_stmt) != EQ_EXPR)
829 || !integer_zerop (gimple_cond_rhs (use_stmt)))
830 return stmt;
831
832 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
833 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
834 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
835 return stmt;
836
837 /* There is a conversion in between possibly inserted by fold. */
838 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
839 {
840 t = gimple_assign_rhs1 (stmt1);
841 if (TREE_CODE (t) != SSA_NAME
842 || !has_single_use (t))
843 return stmt;
844 stmt1 = SSA_NAME_DEF_STMT (t);
845 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
846 return stmt;
847 }
848
849 /* Verify that B is loop invariant but A is not. Verify that with
850 all the stmt walking we are still in the same loop. */
851 if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
852 || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
853 return stmt;
854
855 a = gimple_assign_rhs1 (stmt1);
856 b = gimple_assign_rhs2 (stmt1);
857
858 if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
859 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
860 {
861 /* 1 << B */
862 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
863 add_referenced_var (var);
864 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
865 build_int_cst (TREE_TYPE (a), 1), b);
866 stmt1 = gimple_build_assign (var, t);
867 name = make_ssa_name (var, stmt1);
868 gimple_assign_set_lhs (stmt1, name);
869
870 /* A & (1 << B) */
871 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
872 stmt2 = gimple_build_assign (var, t);
873 name = make_ssa_name (var, stmt2);
874 gimple_assign_set_lhs (stmt2, name);
875
876 /* Replace the SSA_NAME we compare against zero. Adjust
877 the type of zero accordingly. */
878 SET_USE (use, name);
879 gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
880
881 gsi_insert_before (bsi, stmt1, GSI_SAME_STMT);
882 gsi_replace (bsi, stmt2, true);
883
884 return stmt1;
885 }
886
887 return stmt;
888 }
889
890
891 /* Determine the outermost loops in that statements in basic block BB are
892 invariant, and record them to the LIM_DATA associated with the statements.
893 Callback for walk_dominator_tree. */
894
895 static void
896 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
897 basic_block bb)
898 {
899 enum move_pos pos;
900 gimple_stmt_iterator bsi;
901 gimple stmt;
902 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
903 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
904 struct lim_aux_data *lim_data;
905
906 if (!loop_outer (bb->loop_father))
907 return;
908
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
911 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
912
913 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
914 {
915 stmt = gsi_stmt (bsi);
916
917 pos = movement_possibility (stmt);
918 if (pos == MOVE_IMPOSSIBLE)
919 {
920 if (nonpure_call_p (stmt))
921 {
922 maybe_never = true;
923 outermost = NULL;
924 }
925 /* Make sure to note always_executed_in for stores to make
926 store-motion work. */
927 else if (stmt_makes_single_store (stmt))
928 {
929 struct lim_aux_data *lim_data = init_lim_data (stmt);
930 lim_data->always_executed_in = outermost;
931 }
932 continue;
933 }
934
935 if (is_gimple_assign (stmt)
936 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
937 == GIMPLE_BINARY_RHS))
938 {
939 tree op0 = gimple_assign_rhs1 (stmt);
940 tree op1 = gimple_assign_rhs2 (stmt);
941 struct loop *ol1 = outermost_invariant_loop (op1,
942 loop_containing_stmt (stmt));
943
944 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
945 to be hoisted out of loop, saving expensive divide. */
946 if (pos == MOVE_POSSIBLE
947 && gimple_assign_rhs_code (stmt) == RDIV_EXPR
948 && flag_unsafe_math_optimizations
949 && !flag_trapping_math
950 && ol1 != NULL
951 && outermost_invariant_loop (op0, ol1) == NULL)
952 stmt = rewrite_reciprocal (&bsi);
953
954 /* If the shift count is invariant, convert (A >> B) & 1 to
955 A & (1 << B) allowing the bit mask to be hoisted out of the loop
956 saving an expensive shift. */
957 if (pos == MOVE_POSSIBLE
958 && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
959 && integer_onep (op1)
960 && TREE_CODE (op0) == SSA_NAME
961 && has_single_use (op0))
962 stmt = rewrite_bittest (&bsi);
963 }
964
965 lim_data = init_lim_data (stmt);
966 lim_data->always_executed_in = outermost;
967
968 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
969 continue;
970
971 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
972 {
973 lim_data->max_loop = NULL;
974 continue;
975 }
976
977 if (dump_file && (dump_flags & TDF_DETAILS))
978 {
979 print_gimple_stmt (dump_file, stmt, 2, 0);
980 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
981 loop_depth (lim_data->max_loop),
982 lim_data->cost);
983 }
984
985 if (lim_data->cost >= LIM_EXPENSIVE)
986 set_profitable_level (stmt);
987 }
988 }
989
990 /* For each statement determines the outermost loop in that it is invariant,
991 statements on whose motion it depends and the cost of the computation.
992 This information is stored to the LIM_DATA structure associated with
993 each statement. */
994
995 static void
996 determine_invariantness (void)
997 {
998 struct dom_walk_data walk_data;
999
1000 memset (&walk_data, 0, sizeof (struct dom_walk_data));
1001 walk_data.dom_direction = CDI_DOMINATORS;
1002 walk_data.before_dom_children = determine_invariantness_stmt;
1003
1004 init_walk_dominator_tree (&walk_data);
1005 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1006 fini_walk_dominator_tree (&walk_data);
1007 }
1008
1009 /* Hoist the statements in basic block BB out of the loops prescribed by
1010 data stored in LIM_DATA structures associated with each statement. Callback
1011 for walk_dominator_tree. */
1012
1013 static void
1014 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
1015 basic_block bb)
1016 {
1017 struct loop *level;
1018 gimple_stmt_iterator bsi;
1019 gimple stmt;
1020 unsigned cost = 0;
1021 struct lim_aux_data *lim_data;
1022
1023 if (!loop_outer (bb->loop_father))
1024 return;
1025
1026 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
1027 {
1028 stmt = gsi_stmt (bsi);
1029
1030 lim_data = get_lim_data (stmt);
1031 if (lim_data == NULL)
1032 {
1033 gsi_next (&bsi);
1034 continue;
1035 }
1036
1037 cost = lim_data->cost;
1038 level = lim_data->tgt_loop;
1039 clear_lim_data (stmt);
1040
1041 if (!level)
1042 {
1043 gsi_next (&bsi);
1044 continue;
1045 }
1046
1047 /* We do not really want to move conditionals out of the loop; we just
1048 placed it here to force its operands to be moved if necessary. */
1049 if (gimple_code (stmt) == GIMPLE_COND)
1050 continue;
1051
1052 if (dump_file && (dump_flags & TDF_DETAILS))
1053 {
1054 fprintf (dump_file, "Moving statement\n");
1055 print_gimple_stmt (dump_file, stmt, 0, 0);
1056 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1057 cost, level->num);
1058 }
1059
1060 mark_virtual_ops_for_renaming (stmt);
1061 gsi_insert_on_edge (loop_preheader_edge (level), stmt);
1062 gsi_remove (&bsi, false);
1063 }
1064 }
1065
1066 /* Hoist the statements out of the loops prescribed by data stored in
1067 LIM_DATA structures associated with each statement.*/
1068
1069 static void
1070 move_computations (void)
1071 {
1072 struct dom_walk_data walk_data;
1073
1074 memset (&walk_data, 0, sizeof (struct dom_walk_data));
1075 walk_data.dom_direction = CDI_DOMINATORS;
1076 walk_data.before_dom_children = move_computations_stmt;
1077
1078 init_walk_dominator_tree (&walk_data);
1079 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1080 fini_walk_dominator_tree (&walk_data);
1081
1082 gsi_commit_edge_inserts ();
1083 if (need_ssa_update_p (cfun))
1084 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1085 }
1086
1087 /* Checks whether the statement defining variable *INDEX can be hoisted
1088 out of the loop passed in DATA. Callback for for_each_index. */
1089
1090 static bool
1091 may_move_till (tree ref, tree *index, void *data)
1092 {
1093 struct loop *loop = (struct loop *) data, *max_loop;
1094
1095 /* If REF is an array reference, check also that the step and the lower
1096 bound is invariant in LOOP. */
1097 if (TREE_CODE (ref) == ARRAY_REF)
1098 {
1099 tree step = TREE_OPERAND (ref, 3);
1100 tree lbound = TREE_OPERAND (ref, 2);
1101
1102 max_loop = outermost_invariant_loop (step, loop);
1103 if (!max_loop)
1104 return false;
1105
1106 max_loop = outermost_invariant_loop (lbound, loop);
1107 if (!max_loop)
1108 return false;
1109 }
1110
1111 max_loop = outermost_invariant_loop (*index, loop);
1112 if (!max_loop)
1113 return false;
1114
1115 return true;
1116 }
1117
1118 /* If OP is SSA NAME, force the statement that defines it to be
1119 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
1120
1121 static void
1122 force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
1123 {
1124 gimple stmt;
1125
1126 if (!op
1127 || is_gimple_min_invariant (op))
1128 return;
1129
1130 gcc_assert (TREE_CODE (op) == SSA_NAME);
1131
1132 stmt = SSA_NAME_DEF_STMT (op);
1133 if (gimple_nop_p (stmt))
1134 return;
1135
1136 set_level (stmt, orig_loop, loop);
1137 }
1138
1139 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1140 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1141 for_each_index. */
1142
1143 struct fmt_data
1144 {
1145 struct loop *loop;
1146 struct loop *orig_loop;
1147 };
1148
1149 static bool
1150 force_move_till (tree ref, tree *index, void *data)
1151 {
1152 struct fmt_data *fmt_data = (struct fmt_data *) data;
1153
1154 if (TREE_CODE (ref) == ARRAY_REF)
1155 {
1156 tree step = TREE_OPERAND (ref, 3);
1157 tree lbound = TREE_OPERAND (ref, 2);
1158
1159 force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1160 force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
1161 }
1162
1163 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
1164
1165 return true;
1166 }
1167
1168 /* A hash function for struct mem_ref object OBJ. */
1169
1170 static hashval_t
1171 memref_hash (const void *obj)
1172 {
1173 const struct mem_ref *const mem = (const struct mem_ref *) obj;
1174
1175 return mem->hash;
1176 }
1177
1178 /* An equality function for struct mem_ref object OBJ1 with
1179 memory reference OBJ2. */
1180
1181 static int
1182 memref_eq (const void *obj1, const void *obj2)
1183 {
1184 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1185
1186 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1187 }
1188
1189 /* Releases list of memory reference locations ACCS. */
1190
1191 static void
1192 free_mem_ref_locs (mem_ref_locs_p accs)
1193 {
1194 unsigned i;
1195 mem_ref_loc_p loc;
1196
1197 if (!accs)
1198 return;
1199
1200 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1201 free (loc);
1202 VEC_free (mem_ref_loc_p, heap, accs->locs);
1203 free (accs);
1204 }
1205
1206 /* A function to free the mem_ref object OBJ. */
1207
1208 static void
1209 memref_free (void *obj)
1210 {
1211 struct mem_ref *const mem = (struct mem_ref *) obj;
1212 unsigned i;
1213 mem_ref_locs_p accs;
1214
1215 BITMAP_FREE (mem->stored);
1216 BITMAP_FREE (mem->indep_loop);
1217 BITMAP_FREE (mem->dep_loop);
1218 BITMAP_FREE (mem->indep_ref);
1219 BITMAP_FREE (mem->dep_ref);
1220
1221 for (i = 0; VEC_iterate (mem_ref_locs_p, mem->accesses_in_loop, i, accs); i++)
1222 free_mem_ref_locs (accs);
1223 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1224
1225 BITMAP_FREE (mem->vops);
1226 free (mem);
1227 }
1228
1229 /* Allocates and returns a memory reference description for MEM whose hash
1230 value is HASH and id is ID. */
1231
1232 static mem_ref_p
1233 mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1234 {
1235 mem_ref_p ref = XNEW (struct mem_ref);
1236 ref->mem = mem;
1237 ref->id = id;
1238 ref->hash = hash;
1239 ref->stored = BITMAP_ALLOC (NULL);
1240 ref->indep_loop = BITMAP_ALLOC (NULL);
1241 ref->dep_loop = BITMAP_ALLOC (NULL);
1242 ref->indep_ref = BITMAP_ALLOC (NULL);
1243 ref->dep_ref = BITMAP_ALLOC (NULL);
1244 ref->accesses_in_loop = NULL;
1245 ref->vops = BITMAP_ALLOC (NULL);
1246
1247 return ref;
1248 }
1249
1250 /* Allocates and returns the new list of locations. */
1251
1252 static mem_ref_locs_p
1253 mem_ref_locs_alloc (void)
1254 {
1255 mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1256 accs->locs = NULL;
1257 return accs;
1258 }
1259
1260 /* Records memory reference location *LOC in LOOP to the memory reference
1261 description REF. The reference occurs in statement STMT. */
1262
1263 static void
1264 record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
1265 {
1266 mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1267 mem_ref_locs_p accs;
1268 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1269
1270 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1271 <= (unsigned) loop->num)
1272 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1273 loop->num + 1);
1274 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1275 if (!accs)
1276 {
1277 accs = mem_ref_locs_alloc ();
1278 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1279 }
1280
1281 aref->stmt = stmt;
1282 aref->ref = loc;
1283
1284 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1285 bitmap_set_bit (ril, ref->id);
1286 }
1287
1288 /* Marks reference REF as stored in LOOP. */
1289
1290 static void
1291 mark_ref_stored (mem_ref_p ref, struct loop *loop)
1292 {
1293 for (;
1294 loop != current_loops->tree_root
1295 && !bitmap_bit_p (ref->stored, loop->num);
1296 loop = loop_outer (loop))
1297 bitmap_set_bit (ref->stored, loop->num);
1298 }
1299
1300 /* Gathers memory references in statement STMT in LOOP, storing the
1301 information about them in the memory_accesses structure. Marks
1302 the vops accessed through unrecognized statements there as
1303 well. */
1304
1305 static void
1306 gather_mem_refs_stmt (struct loop *loop, gimple stmt)
1307 {
1308 tree *mem = NULL;
1309 hashval_t hash;
1310 PTR *slot;
1311 mem_ref_p ref;
1312 tree vname;
1313 bool is_stored;
1314 bitmap clvops;
1315 unsigned id;
1316
1317 if (!gimple_vuse (stmt))
1318 return;
1319
1320 mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1321 if (!mem)
1322 goto fail;
1323
1324 hash = iterative_hash_expr (*mem, 0);
1325 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1326
1327 if (*slot)
1328 {
1329 ref = (mem_ref_p) *slot;
1330 id = ref->id;
1331 }
1332 else
1333 {
1334 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1335 ref = mem_ref_alloc (*mem, hash, id);
1336 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1337 *slot = ref;
1338
1339 if (dump_file && (dump_flags & TDF_DETAILS))
1340 {
1341 fprintf (dump_file, "Memory reference %u: ", id);
1342 print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1343 fprintf (dump_file, "\n");
1344 }
1345 }
1346 if (is_stored)
1347 mark_ref_stored (ref, loop);
1348
1349 if ((vname = gimple_vuse (stmt)) != NULL_TREE)
1350 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1351 record_mem_ref_loc (ref, loop, stmt, mem);
1352 return;
1353
1354 fail:
1355 clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1356 if ((vname = gimple_vuse (stmt)) != NULL_TREE)
1357 bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
1358 }
1359
1360 /* Gathers memory references in loops. */
1361
1362 static void
1363 gather_mem_refs_in_loops (void)
1364 {
1365 gimple_stmt_iterator bsi;
1366 basic_block bb;
1367 struct loop *loop;
1368 loop_iterator li;
1369 bitmap clvo, clvi;
1370 bitmap lrefs, alrefs, alrefso;
1371
1372 FOR_EACH_BB (bb)
1373 {
1374 loop = bb->loop_father;
1375 if (loop == current_loops->tree_root)
1376 continue;
1377
1378 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1379 gather_mem_refs_stmt (loop, gsi_stmt (bsi));
1380 }
1381
1382 /* Propagate the information about clobbered vops and accessed memory
1383 references up the loop hierarchy. */
1384 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1385 {
1386 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1387 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1388 bitmap_ior_into (alrefs, lrefs);
1389
1390 if (loop_outer (loop) == current_loops->tree_root)
1391 continue;
1392
1393 clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1394 clvo = VEC_index (bitmap, memory_accesses.clobbered_vops,
1395 loop_outer (loop)->num);
1396 bitmap_ior_into (clvo, clvi);
1397
1398 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1399 loop_outer (loop)->num);
1400 bitmap_ior_into (alrefso, alrefs);
1401 }
1402 }
1403
1404 /* Element of the hash table that maps vops to memory references. */
1405
1406 struct vop_to_refs_elt
1407 {
1408 /* DECL_UID of the vop. */
1409 unsigned uid;
1410
1411 /* List of the all references. */
1412 bitmap refs_all;
1413
1414 /* List of stored references. */
1415 bitmap refs_stored;
1416 };
1417
1418 /* A hash function for struct vop_to_refs_elt object OBJ. */
1419
1420 static hashval_t
1421 vtoe_hash (const void *obj)
1422 {
1423 const struct vop_to_refs_elt *const vtoe =
1424 (const struct vop_to_refs_elt *) obj;
1425
1426 return vtoe->uid;
1427 }
1428
1429 /* An equality function for struct vop_to_refs_elt object OBJ1 with
1430 uid of a vop OBJ2. */
1431
1432 static int
1433 vtoe_eq (const void *obj1, const void *obj2)
1434 {
1435 const struct vop_to_refs_elt *const vtoe =
1436 (const struct vop_to_refs_elt *) obj1;
1437 const unsigned *const uid = (const unsigned *) obj2;
1438
1439 return vtoe->uid == *uid;
1440 }
1441
1442 /* A function to free the struct vop_to_refs_elt object. */
1443
1444 static void
1445 vtoe_free (void *obj)
1446 {
1447 struct vop_to_refs_elt *const vtoe =
1448 (struct vop_to_refs_elt *) obj;
1449
1450 BITMAP_FREE (vtoe->refs_all);
1451 BITMAP_FREE (vtoe->refs_stored);
1452 free (vtoe);
1453 }
1454
1455 /* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true
1456 if the reference REF is stored. */
1457
1458 static void
1459 record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored)
1460 {
1461 void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT);
1462 struct vop_to_refs_elt *vtoe;
1463
1464 if (!*slot)
1465 {
1466 vtoe = XNEW (struct vop_to_refs_elt);
1467 vtoe->uid = vop;
1468 vtoe->refs_all = BITMAP_ALLOC (NULL);
1469 vtoe->refs_stored = BITMAP_ALLOC (NULL);
1470 *slot = vtoe;
1471 }
1472 else
1473 vtoe = (struct vop_to_refs_elt *) *slot;
1474
1475 bitmap_set_bit (vtoe->refs_all, ref);
1476 if (stored)
1477 bitmap_set_bit (vtoe->refs_stored, ref);
1478 }
1479
1480 /* Returns the set of references that access VOP according to the table
1481 VOP_TO_REFS. */
1482
1483 static bitmap
1484 get_vop_accesses (htab_t vop_to_refs, unsigned vop)
1485 {
1486 struct vop_to_refs_elt *const vtoe =
1487 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1488 return vtoe->refs_all;
1489 }
1490
1491 /* Returns the set of stores that access VOP according to the table
1492 VOP_TO_REFS. */
1493
1494 static bitmap
1495 get_vop_stores (htab_t vop_to_refs, unsigned vop)
1496 {
1497 struct vop_to_refs_elt *const vtoe =
1498 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1499 return vtoe->refs_stored;
1500 }
1501
1502 /* Adds REF to mapping from virtual operands to references in LOOP. */
1503
1504 static void
1505 add_vop_ref_mapping (struct loop *loop, mem_ref_p ref)
1506 {
1507 htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
1508 bool stored = bitmap_bit_p (ref->stored, loop->num);
1509 bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops,
1510 loop->num);
1511 bitmap_iterator bi;
1512 unsigned vop;
1513
1514 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi)
1515 {
1516 record_vop_access (map, vop, ref->id, stored);
1517 }
1518 }
1519
1520 /* Create a mapping from virtual operands to references that touch them
1521 in LOOP. */
1522
1523 static void
1524 create_vop_ref_mapping_loop (struct loop *loop)
1525 {
1526 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1527 struct loop *sloop;
1528 bitmap_iterator bi;
1529 unsigned i;
1530 mem_ref_p ref;
1531
1532 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1533 {
1534 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1535 for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop))
1536 add_vop_ref_mapping (sloop, ref);
1537 }
1538 }
1539
1540 /* For each non-clobbered virtual operand and each loop, record the memory
1541 references in this loop that touch the operand. */
1542
1543 static void
1544 create_vop_ref_mapping (void)
1545 {
1546 loop_iterator li;
1547 struct loop *loop;
1548
1549 FOR_EACH_LOOP (li, loop, 0)
1550 {
1551 create_vop_ref_mapping_loop (loop);
1552 }
1553 }
1554
1555 /* Gathers information about memory accesses in the loops. */
1556
1557 static void
1558 analyze_memory_references (void)
1559 {
1560 unsigned i;
1561 bitmap empty;
1562 htab_t hempty;
1563
1564 memory_accesses.refs
1565 = htab_create (100, memref_hash, memref_eq, memref_free);
1566 memory_accesses.refs_list = NULL;
1567 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1568 number_of_loops ());
1569 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1570 number_of_loops ());
1571 memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap,
1572 number_of_loops ());
1573 memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap,
1574 number_of_loops ());
1575
1576 for (i = 0; i < number_of_loops (); i++)
1577 {
1578 empty = BITMAP_ALLOC (NULL);
1579 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1580 empty = BITMAP_ALLOC (NULL);
1581 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1582 empty = BITMAP_ALLOC (NULL);
1583 VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty);
1584 hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free);
1585 VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty);
1586 }
1587
1588 memory_accesses.ttae_cache = NULL;
1589
1590 gather_mem_refs_in_loops ();
1591 create_vop_ref_mapping ();
1592 }
1593
1594 /* Returns true if a region of size SIZE1 at position 0 and a region of
1595 size SIZE2 at position DIFF cannot overlap. */
1596
1597 static bool
1598 cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
1599 {
1600 double_int d, bound;
1601
1602 /* Unless the difference is a constant, we fail. */
1603 if (diff->n != 0)
1604 return false;
1605
1606 d = diff->offset;
1607 if (double_int_negative_p (d))
1608 {
1609 /* The second object is before the first one, we succeed if the last
1610 element of the second object is before the start of the first one. */
1611 bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
1612 return double_int_negative_p (bound);
1613 }
1614 else
1615 {
1616 /* We succeed if the second object starts after the first one ends. */
1617 return double_int_scmp (size1, d) <= 0;
1618 }
1619 }
1620
1621 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1622 tree_to_aff_combination_expand. */
1623
1624 static bool
1625 mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1626 {
1627 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1628 object and their offset differ in such a way that the locations cannot
1629 overlap, then they cannot alias. */
1630 double_int size1, size2;
1631 aff_tree off1, off2;
1632
1633 /* Perform basic offset and type-based disambiguation. */
1634 if (!refs_may_alias_p (mem1, mem2))
1635 return false;
1636
1637 /* The expansion of addresses may be a bit expensive, thus we only do
1638 the check at -O2 and higher optimization levels. */
1639 if (optimize < 2)
1640 return true;
1641
1642 get_inner_reference_aff (mem1, &off1, &size1);
1643 get_inner_reference_aff (mem2, &off2, &size2);
1644 aff_combination_expand (&off1, ttae_cache);
1645 aff_combination_expand (&off2, ttae_cache);
1646 aff_combination_scale (&off1, double_int_minus_one);
1647 aff_combination_add (&off2, &off1);
1648
1649 if (cannot_overlap_p (&off2, size1, size2))
1650 return false;
1651
1652 return true;
1653 }
1654
1655 /* Rewrites location LOC by TMP_VAR. */
1656
1657 static void
1658 rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1659 {
1660 mark_virtual_ops_for_renaming (loc->stmt);
1661 *loc->ref = tmp_var;
1662 update_stmt (loc->stmt);
1663 }
1664
1665 /* Adds all locations of REF in LOOP and its subloops to LOCS. */
1666
1667 static void
1668 get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1669 VEC (mem_ref_loc_p, heap) **locs)
1670 {
1671 mem_ref_locs_p accs;
1672 unsigned i;
1673 mem_ref_loc_p loc;
1674 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1675 loop->num);
1676 struct loop *subloop;
1677
1678 if (!bitmap_bit_p (refs, ref->id))
1679 return;
1680
1681 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1682 > (unsigned) loop->num)
1683 {
1684 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1685 if (accs)
1686 {
1687 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1688 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1689 }
1690 }
1691
1692 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1693 get_all_locs_in_loop (subloop, ref, locs);
1694 }
1695
1696 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1697
1698 static void
1699 rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1700 {
1701 unsigned i;
1702 mem_ref_loc_p loc;
1703 VEC (mem_ref_loc_p, heap) *locs = NULL;
1704
1705 get_all_locs_in_loop (loop, ref, &locs);
1706 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1707 rewrite_mem_ref_loc (loc, tmp_var);
1708 VEC_free (mem_ref_loc_p, heap, locs);
1709 }
1710
1711 /* The name and the length of the currently generated variable
1712 for lsm. */
1713 #define MAX_LSM_NAME_LENGTH 40
1714 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1715 static int lsm_tmp_name_length;
1716
1717 /* Adds S to lsm_tmp_name. */
1718
1719 static void
1720 lsm_tmp_name_add (const char *s)
1721 {
1722 int l = strlen (s) + lsm_tmp_name_length;
1723 if (l > MAX_LSM_NAME_LENGTH)
1724 return;
1725
1726 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1727 lsm_tmp_name_length = l;
1728 }
1729
1730 /* Stores the name for temporary variable that replaces REF to
1731 lsm_tmp_name. */
1732
1733 static void
1734 gen_lsm_tmp_name (tree ref)
1735 {
1736 const char *name;
1737
1738 switch (TREE_CODE (ref))
1739 {
1740 case MISALIGNED_INDIRECT_REF:
1741 case ALIGN_INDIRECT_REF:
1742 case INDIRECT_REF:
1743 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1744 lsm_tmp_name_add ("_");
1745 break;
1746
1747 case BIT_FIELD_REF:
1748 case VIEW_CONVERT_EXPR:
1749 case ARRAY_RANGE_REF:
1750 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1751 break;
1752
1753 case REALPART_EXPR:
1754 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1755 lsm_tmp_name_add ("_RE");
1756 break;
1757
1758 case IMAGPART_EXPR:
1759 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1760 lsm_tmp_name_add ("_IM");
1761 break;
1762
1763 case COMPONENT_REF:
1764 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1765 lsm_tmp_name_add ("_");
1766 name = get_name (TREE_OPERAND (ref, 1));
1767 if (!name)
1768 name = "F";
1769 lsm_tmp_name_add ("_");
1770 lsm_tmp_name_add (name);
1771
1772 case ARRAY_REF:
1773 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1774 lsm_tmp_name_add ("_I");
1775 break;
1776
1777 case SSA_NAME:
1778 ref = SSA_NAME_VAR (ref);
1779 /* Fallthru. */
1780
1781 case VAR_DECL:
1782 case PARM_DECL:
1783 name = get_name (ref);
1784 if (!name)
1785 name = "D";
1786 lsm_tmp_name_add (name);
1787 break;
1788
1789 case STRING_CST:
1790 lsm_tmp_name_add ("S");
1791 break;
1792
1793 case RESULT_DECL:
1794 lsm_tmp_name_add ("R");
1795 break;
1796
1797 case INTEGER_CST:
1798 /* Nothing. */
1799 break;
1800
1801 default:
1802 gcc_unreachable ();
1803 }
1804 }
1805
1806 /* Determines name for temporary variable that replaces REF.
1807 The name is accumulated into the lsm_tmp_name variable.
1808 N is added to the name of the temporary. */
1809
1810 char *
1811 get_lsm_tmp_name (tree ref, unsigned n)
1812 {
1813 char ns[2];
1814
1815 lsm_tmp_name_length = 0;
1816 gen_lsm_tmp_name (ref);
1817 lsm_tmp_name_add ("_lsm");
1818 if (n < 10)
1819 {
1820 ns[0] = '0' + n;
1821 ns[1] = 0;
1822 lsm_tmp_name_add (ns);
1823 }
1824 return lsm_tmp_name;
1825 }
1826
1827 /* Executes store motion of memory reference REF from LOOP.
1828 Exits from the LOOP are stored in EXITS. The initialization of the
1829 temporary variable is put to the preheader of the loop, and assignments
1830 to the reference from the temporary variable are emitted to exits. */
1831
1832 static void
1833 execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
1834 {
1835 tree tmp_var;
1836 unsigned i;
1837 gimple load, store;
1838 struct fmt_data fmt_data;
1839 edge ex;
1840 struct lim_aux_data *lim_data;
1841
1842 if (dump_file && (dump_flags & TDF_DETAILS))
1843 {
1844 fprintf (dump_file, "Executing store motion of ");
1845 print_generic_expr (dump_file, ref->mem, 0);
1846 fprintf (dump_file, " from loop %d\n", loop->num);
1847 }
1848
1849 tmp_var = make_rename_temp (TREE_TYPE (ref->mem),
1850 get_lsm_tmp_name (ref->mem, ~0));
1851
1852 fmt_data.loop = loop;
1853 fmt_data.orig_loop = loop;
1854 for_each_index (&ref->mem, force_move_till, &fmt_data);
1855
1856 rewrite_mem_refs (loop, ref, tmp_var);
1857
1858 /* Emit the load & stores. */
1859 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
1860 lim_data = init_lim_data (load);
1861 lim_data->max_loop = loop;
1862 lim_data->tgt_loop = loop;
1863
1864 /* Put this into the latch, so that we are sure it will be processed after
1865 all dependencies. */
1866 gsi_insert_on_edge (loop_latch_edge (loop), load);
1867
1868 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1869 {
1870 store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
1871 gsi_insert_on_edge (ex, store);
1872 }
1873 }
1874
1875 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
1876 edges of the LOOP. */
1877
1878 static void
1879 hoist_memory_references (struct loop *loop, bitmap mem_refs,
1880 VEC (edge, heap) *exits)
1881 {
1882 mem_ref_p ref;
1883 unsigned i;
1884 bitmap_iterator bi;
1885
1886 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
1887 {
1888 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1889 execute_sm (loop, exits, ref);
1890 }
1891 }
1892
1893 /* Returns true if REF is always accessed in LOOP. */
1894
1895 static bool
1896 ref_always_accessed_p (struct loop *loop, mem_ref_p ref)
1897 {
1898 VEC (mem_ref_loc_p, heap) *locs = NULL;
1899 unsigned i;
1900 mem_ref_loc_p loc;
1901 bool ret = false;
1902 struct loop *must_exec;
1903
1904 get_all_locs_in_loop (loop, ref, &locs);
1905 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1906 {
1907 if (!get_lim_data (loc->stmt))
1908 continue;
1909
1910 must_exec = get_lim_data (loc->stmt)->always_executed_in;
1911 if (!must_exec)
1912 continue;
1913
1914 if (must_exec == loop
1915 || flow_loop_nested_p (must_exec, loop))
1916 {
1917 ret = true;
1918 break;
1919 }
1920 }
1921 VEC_free (mem_ref_loc_p, heap, locs);
1922
1923 return ret;
1924 }
1925
1926 /* Returns true if REF1 and REF2 are independent. */
1927
1928 static bool
1929 refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
1930 {
1931 if (ref1 == ref2
1932 || bitmap_bit_p (ref1->indep_ref, ref2->id))
1933 return true;
1934 if (bitmap_bit_p (ref1->dep_ref, ref2->id))
1935 return false;
1936
1937 if (dump_file && (dump_flags & TDF_DETAILS))
1938 fprintf (dump_file, "Querying dependency of refs %u and %u: ",
1939 ref1->id, ref2->id);
1940
1941 if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
1942 &memory_accesses.ttae_cache))
1943 {
1944 bitmap_set_bit (ref1->dep_ref, ref2->id);
1945 bitmap_set_bit (ref2->dep_ref, ref1->id);
1946 if (dump_file && (dump_flags & TDF_DETAILS))
1947 fprintf (dump_file, "dependent.\n");
1948 return false;
1949 }
1950 else
1951 {
1952 bitmap_set_bit (ref1->indep_ref, ref2->id);
1953 bitmap_set_bit (ref2->indep_ref, ref1->id);
1954 if (dump_file && (dump_flags & TDF_DETAILS))
1955 fprintf (dump_file, "independent.\n");
1956 return true;
1957 }
1958 }
1959
1960 /* Records the information whether REF is independent in LOOP (according
1961 to INDEP). */
1962
1963 static void
1964 record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
1965 {
1966 if (indep)
1967 bitmap_set_bit (ref->indep_loop, loop->num);
1968 else
1969 bitmap_set_bit (ref->dep_loop, loop->num);
1970 }
1971
1972 /* Returns true if REF is independent on all other memory references in
1973 LOOP. */
1974
1975 static bool
1976 ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
1977 {
1978 bitmap clobbers, refs_to_check, refs;
1979 unsigned i;
1980 bitmap_iterator bi;
1981 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
1982 htab_t map;
1983 mem_ref_p aref;
1984
1985 /* If the reference is clobbered, it is not independent. */
1986 clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1987 if (bitmap_intersect_p (ref->vops, clobbers))
1988 return false;
1989
1990 refs_to_check = BITMAP_ALLOC (NULL);
1991
1992 map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
1993 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, i, bi)
1994 {
1995 if (stored)
1996 refs = get_vop_accesses (map, i);
1997 else
1998 refs = get_vop_stores (map, i);
1999
2000 bitmap_ior_into (refs_to_check, refs);
2001 }
2002
2003 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
2004 {
2005 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2006 if (!refs_independent_p (ref, aref))
2007 {
2008 ret = false;
2009 record_indep_loop (loop, aref, false);
2010 break;
2011 }
2012 }
2013
2014 BITMAP_FREE (refs_to_check);
2015 return ret;
2016 }
2017
2018 /* Returns true if REF is independent on all other memory references in
2019 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
2020
2021 static bool
2022 ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
2023 {
2024 bool ret;
2025
2026 if (bitmap_bit_p (ref->indep_loop, loop->num))
2027 return true;
2028 if (bitmap_bit_p (ref->dep_loop, loop->num))
2029 return false;
2030
2031 ret = ref_indep_loop_p_1 (loop, ref);
2032
2033 if (dump_file && (dump_flags & TDF_DETAILS))
2034 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2035 ref->id, loop->num, ret ? "independent" : "dependent");
2036
2037 record_indep_loop (loop, ref, ret);
2038
2039 return ret;
2040 }
2041
2042 /* Returns true if we can perform store motion of REF from LOOP. */
2043
2044 static bool
2045 can_sm_ref_p (struct loop *loop, mem_ref_p ref)
2046 {
2047 /* Unless the reference is stored in the loop, there is nothing to do. */
2048 if (!bitmap_bit_p (ref->stored, loop->num))
2049 return false;
2050
2051 /* It should be movable. */
2052 if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2053 || TREE_THIS_VOLATILE (ref->mem)
2054 || !for_each_index (&ref->mem, may_move_till, loop))
2055 return false;
2056
2057 /* If it can trap, it must be always executed in LOOP. */
2058 if (tree_could_trap_p (ref->mem)
2059 && !ref_always_accessed_p (loop, ref))
2060 return false;
2061
2062 /* And it must be independent on all other memory references
2063 in LOOP. */
2064 if (!ref_indep_loop_p (loop, ref))
2065 return false;
2066
2067 return true;
2068 }
2069
2070 /* Marks the references in LOOP for that store motion should be performed
2071 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2072 motion was performed in one of the outer loops. */
2073
2074 static void
2075 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
2076 {
2077 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2078 loop->num);
2079 unsigned i;
2080 bitmap_iterator bi;
2081 mem_ref_p ref;
2082
2083 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2084 {
2085 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2086 if (can_sm_ref_p (loop, ref))
2087 bitmap_set_bit (refs_to_sm, i);
2088 }
2089 }
2090
2091 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
2092 for a store motion optimization (i.e. whether we can insert statement
2093 on its exits). */
2094
2095 static bool
2096 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2097 VEC (edge, heap) *exits)
2098 {
2099 unsigned i;
2100 edge ex;
2101
2102 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
2103 if (ex->flags & EDGE_ABNORMAL)
2104 return false;
2105
2106 return true;
2107 }
2108
2109 /* Try to perform store motion for all memory references modified inside
2110 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2111 store motion was executed in one of the outer loops. */
2112
2113 static void
2114 store_motion_loop (struct loop *loop, bitmap sm_executed)
2115 {
2116 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
2117 struct loop *subloop;
2118 bitmap sm_in_loop = BITMAP_ALLOC (NULL);
2119
2120 if (loop_suitable_for_sm (loop, exits))
2121 {
2122 find_refs_for_sm (loop, sm_executed, sm_in_loop);
2123 hoist_memory_references (loop, sm_in_loop, exits);
2124 }
2125 VEC_free (edge, heap, exits);
2126
2127 bitmap_ior_into (sm_executed, sm_in_loop);
2128 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2129 store_motion_loop (subloop, sm_executed);
2130 bitmap_and_compl_into (sm_executed, sm_in_loop);
2131 BITMAP_FREE (sm_in_loop);
2132 }
2133
2134 /* Try to perform store motion for all memory references modified inside
2135 loops. */
2136
2137 static void
2138 store_motion (void)
2139 {
2140 struct loop *loop;
2141 bitmap sm_executed = BITMAP_ALLOC (NULL);
2142
2143 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2144 store_motion_loop (loop, sm_executed);
2145
2146 BITMAP_FREE (sm_executed);
2147 gsi_commit_edge_inserts ();
2148 }
2149
2150 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2151 for each such basic block bb records the outermost loop for that execution
2152 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2153 blocks that contain a nonpure call. */
2154
2155 static void
2156 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2157 {
2158 basic_block bb = NULL, *bbs, last = NULL;
2159 unsigned i;
2160 edge e;
2161 struct loop *inn_loop = loop;
2162
2163 if (!loop->header->aux)
2164 {
2165 bbs = get_loop_body_in_dom_order (loop);
2166
2167 for (i = 0; i < loop->num_nodes; i++)
2168 {
2169 edge_iterator ei;
2170 bb = bbs[i];
2171
2172 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2173 last = bb;
2174
2175 if (TEST_BIT (contains_call, bb->index))
2176 break;
2177
2178 FOR_EACH_EDGE (e, ei, bb->succs)
2179 if (!flow_bb_inside_loop_p (loop, e->dest))
2180 break;
2181 if (e)
2182 break;
2183
2184 /* A loop might be infinite (TODO use simple loop analysis
2185 to disprove this if possible). */
2186 if (bb->flags & BB_IRREDUCIBLE_LOOP)
2187 break;
2188
2189 if (!flow_bb_inside_loop_p (inn_loop, bb))
2190 break;
2191
2192 if (bb->loop_father->header == bb)
2193 {
2194 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2195 break;
2196
2197 /* In a loop that is always entered we may proceed anyway.
2198 But record that we entered it and stop once we leave it. */
2199 inn_loop = bb->loop_father;
2200 }
2201 }
2202
2203 while (1)
2204 {
2205 last->aux = loop;
2206 if (last == loop->header)
2207 break;
2208 last = get_immediate_dominator (CDI_DOMINATORS, last);
2209 }
2210
2211 free (bbs);
2212 }
2213
2214 for (loop = loop->inner; loop; loop = loop->next)
2215 fill_always_executed_in (loop, contains_call);
2216 }
2217
2218 /* Compute the global information needed by the loop invariant motion pass. */
2219
2220 static void
2221 tree_ssa_lim_initialize (void)
2222 {
2223 sbitmap contains_call = sbitmap_alloc (last_basic_block);
2224 gimple_stmt_iterator bsi;
2225 struct loop *loop;
2226 basic_block bb;
2227
2228 sbitmap_zero (contains_call);
2229 FOR_EACH_BB (bb)
2230 {
2231 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2232 {
2233 if (nonpure_call_p (gsi_stmt (bsi)))
2234 break;
2235 }
2236
2237 if (!gsi_end_p (bsi))
2238 SET_BIT (contains_call, bb->index);
2239 }
2240
2241 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
2242 fill_always_executed_in (loop, contains_call);
2243
2244 sbitmap_free (contains_call);
2245
2246 lim_aux_data_map = pointer_map_create ();
2247 }
2248
2249 /* Cleans up after the invariant motion pass. */
2250
2251 static void
2252 tree_ssa_lim_finalize (void)
2253 {
2254 basic_block bb;
2255 unsigned i;
2256 bitmap b;
2257 htab_t h;
2258
2259 FOR_EACH_BB (bb)
2260 {
2261 bb->aux = NULL;
2262 }
2263
2264 pointer_map_destroy (lim_aux_data_map);
2265
2266 VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2267 htab_delete (memory_accesses.refs);
2268
2269 for (i = 0; VEC_iterate (bitmap, memory_accesses.refs_in_loop, i, b); i++)
2270 BITMAP_FREE (b);
2271 VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2272
2273 for (i = 0; VEC_iterate (bitmap, memory_accesses.all_refs_in_loop, i, b); i++)
2274 BITMAP_FREE (b);
2275 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2276
2277 for (i = 0; VEC_iterate (bitmap, memory_accesses.clobbered_vops, i, b); i++)
2278 BITMAP_FREE (b);
2279 VEC_free (bitmap, heap, memory_accesses.clobbered_vops);
2280
2281 for (i = 0; VEC_iterate (htab_t, memory_accesses.vop_ref_map, i, h); i++)
2282 htab_delete (h);
2283 VEC_free (htab_t, heap, memory_accesses.vop_ref_map);
2284
2285 if (memory_accesses.ttae_cache)
2286 pointer_map_destroy (memory_accesses.ttae_cache);
2287 }
2288
2289 /* Moves invariants from loops. Only "expensive" invariants are moved out --
2290 i.e. those that are likely to be win regardless of the register pressure. */
2291
2292 void
2293 tree_ssa_lim (void)
2294 {
2295 tree_ssa_lim_initialize ();
2296
2297 /* Gathers information about memory accesses in the loops. */
2298 analyze_memory_references ();
2299
2300 /* For each statement determine the outermost loop in that it is
2301 invariant and cost for computing the invariant. */
2302 determine_invariantness ();
2303
2304 /* Execute store motion. Force the necessary invariants to be moved
2305 out of the loops as well. */
2306 store_motion ();
2307
2308 /* Move the expressions that are expensive enough. */
2309 move_computations ();
2310
2311 tree_ssa_lim_finalize ();
2312 }