re PR libfortran/47894 (Documentation text for VERIFY intrinsic function is wrong.)
[gcc.git] / gcc / tree-loop-distribution.c
1 /* Loop distribution.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Georges-Andre Silber <Georges-Andre.Silber@ensmp.fr>
5 and Sebastian Pop <sebastian.pop@amd.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This pass performs loop distribution: for example, the loop
24
25 |DO I = 2, N
26 | A(I) = B(I) + C
27 | D(I) = A(I-1)*E
28 |ENDDO
29
30 is transformed to
31
32 |DOALL I = 2, N
33 | A(I) = B(I) + C
34 |ENDDO
35 |
36 |DOALL I = 2, N
37 | D(I) = A(I-1)*E
38 |ENDDO
39
40 This pass uses an RDG, Reduced Dependence Graph built on top of the
41 data dependence relations. The RDG is then topologically sorted to
42 obtain a map of information producers/consumers based on which it
43 generates the new loops. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tree-flow.h"
49 #include "cfgloop.h"
50 #include "tree-chrec.h"
51 #include "tree-data-ref.h"
52 #include "tree-scalar-evolution.h"
53 #include "tree-pass.h"
54
55 /* If bit I is not set, it means that this node represents an
56 operation that has already been performed, and that should not be
57 performed again. This is the subgraph of remaining important
58 computations that is passed to the DFS algorithm for avoiding to
59 include several times the same stores in different loops. */
60 static bitmap remaining_stmts;
61
62 /* A node of the RDG is marked in this bitmap when it has as a
63 predecessor a node that writes to memory. */
64 static bitmap upstream_mem_writes;
65
66 /* Update the PHI nodes of NEW_LOOP. NEW_LOOP is a duplicate of
67 ORIG_LOOP. */
68
69 static void
70 update_phis_for_loop_copy (struct loop *orig_loop, struct loop *new_loop)
71 {
72 tree new_ssa_name;
73 gimple_stmt_iterator si_new, si_orig;
74 edge orig_loop_latch = loop_latch_edge (orig_loop);
75 edge orig_entry_e = loop_preheader_edge (orig_loop);
76 edge new_loop_entry_e = loop_preheader_edge (new_loop);
77
78 /* Scan the phis in the headers of the old and new loops
79 (they are organized in exactly the same order). */
80 for (si_new = gsi_start_phis (new_loop->header),
81 si_orig = gsi_start_phis (orig_loop->header);
82 !gsi_end_p (si_new) && !gsi_end_p (si_orig);
83 gsi_next (&si_new), gsi_next (&si_orig))
84 {
85 tree def;
86 source_location locus;
87 gimple phi_new = gsi_stmt (si_new);
88 gimple phi_orig = gsi_stmt (si_orig);
89
90 /* Add the first phi argument for the phi in NEW_LOOP (the one
91 associated with the entry of NEW_LOOP) */
92 def = PHI_ARG_DEF_FROM_EDGE (phi_orig, orig_entry_e);
93 locus = gimple_phi_arg_location_from_edge (phi_orig, orig_entry_e);
94 add_phi_arg (phi_new, def, new_loop_entry_e, locus);
95
96 /* Add the second phi argument for the phi in NEW_LOOP (the one
97 associated with the latch of NEW_LOOP) */
98 def = PHI_ARG_DEF_FROM_EDGE (phi_orig, orig_loop_latch);
99 locus = gimple_phi_arg_location_from_edge (phi_orig, orig_loop_latch);
100
101 if (TREE_CODE (def) == SSA_NAME)
102 {
103 new_ssa_name = get_current_def (def);
104
105 if (!new_ssa_name)
106 /* This only happens if there are no definitions inside the
107 loop. Use the the invariant in the new loop as is. */
108 new_ssa_name = def;
109 }
110 else
111 /* Could be an integer. */
112 new_ssa_name = def;
113
114 add_phi_arg (phi_new, new_ssa_name, loop_latch_edge (new_loop), locus);
115 }
116 }
117
118 /* Return a copy of LOOP placed before LOOP. */
119
120 static struct loop *
121 copy_loop_before (struct loop *loop)
122 {
123 struct loop *res;
124 edge preheader = loop_preheader_edge (loop);
125
126 if (!single_exit (loop))
127 return NULL;
128
129 initialize_original_copy_tables ();
130 res = slpeel_tree_duplicate_loop_to_edge_cfg (loop, preheader);
131 free_original_copy_tables ();
132
133 if (!res)
134 return NULL;
135
136 update_phis_for_loop_copy (loop, res);
137 rename_variables_in_loop (res);
138
139 return res;
140 }
141
142 /* Creates an empty basic block after LOOP. */
143
144 static void
145 create_bb_after_loop (struct loop *loop)
146 {
147 edge exit = single_exit (loop);
148
149 if (!exit)
150 return;
151
152 split_edge (exit);
153 }
154
155 /* Generate code for PARTITION from the code in LOOP. The loop is
156 copied when COPY_P is true. All the statements not flagged in the
157 PARTITION bitmap are removed from the loop or from its copy. The
158 statements are indexed in sequence inside a basic block, and the
159 basic blocks of a loop are taken in dom order. Returns true when
160 the code gen succeeded. */
161
162 static bool
163 generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p)
164 {
165 unsigned i, x;
166 gimple_stmt_iterator bsi;
167 basic_block *bbs;
168
169 if (copy_p)
170 {
171 loop = copy_loop_before (loop);
172 create_preheader (loop, CP_SIMPLE_PREHEADERS);
173 create_bb_after_loop (loop);
174 }
175
176 if (loop == NULL)
177 return false;
178
179 /* Remove stmts not in the PARTITION bitmap. The order in which we
180 visit the phi nodes and the statements is exactly as in
181 stmts_from_loop. */
182 bbs = get_loop_body_in_dom_order (loop);
183
184 for (x = 0, i = 0; i < loop->num_nodes; i++)
185 {
186 basic_block bb = bbs[i];
187
188 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
189 if (!bitmap_bit_p (partition, x++))
190 {
191 gimple phi = gsi_stmt (bsi);
192 if (!is_gimple_reg (gimple_phi_result (phi)))
193 mark_virtual_phi_result_for_renaming (phi);
194 remove_phi_node (&bsi, true);
195 }
196 else
197 gsi_next (&bsi);
198
199 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
200 {
201 gimple stmt = gsi_stmt (bsi);
202 if (gimple_code (gsi_stmt (bsi)) != GIMPLE_LABEL
203 && !bitmap_bit_p (partition, x++))
204 {
205 unlink_stmt_vdef (stmt);
206 gsi_remove (&bsi, true);
207 release_defs (stmt);
208 }
209 else
210 gsi_next (&bsi);
211 }
212 }
213
214 free (bbs);
215 return true;
216 }
217
218 /* Build the size argument for a memset call. */
219
220 static inline tree
221 build_size_arg_loc (location_t loc, tree nb_iter, tree op,
222 gimple_seq *stmt_list)
223 {
224 gimple_seq stmts;
225 tree x = size_binop_loc (loc, MULT_EXPR,
226 fold_convert_loc (loc, sizetype, nb_iter),
227 TYPE_SIZE_UNIT (TREE_TYPE (op)));
228 x = force_gimple_operand (x, &stmts, true, NULL);
229 gimple_seq_add_seq (stmt_list, stmts);
230
231 return x;
232 }
233
234 /* Generate a call to memset. Return true when the operation succeeded. */
235
236 static void
237 generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
238 gimple_stmt_iterator bsi)
239 {
240 tree addr_base, nb_bytes;
241 bool res = false;
242 gimple_seq stmt_list = NULL, stmts;
243 gimple fn_call;
244 tree mem, fn;
245 struct data_reference *dr = XCNEW (struct data_reference);
246 location_t loc = gimple_location (stmt);
247
248 DR_STMT (dr) = stmt;
249 DR_REF (dr) = op0;
250 res = dr_analyze_innermost (dr);
251 gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0)));
252
253 nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
254 addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
255 addr_base = fold_convert_loc (loc, sizetype, addr_base);
256
257 /* Test for a negative stride, iterating over every element. */
258 if (integer_zerop (size_binop (PLUS_EXPR,
259 TYPE_SIZE_UNIT (TREE_TYPE (op0)),
260 fold_convert (sizetype, DR_STEP (dr)))))
261 {
262 addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base,
263 fold_convert_loc (loc, sizetype, nb_bytes));
264 addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base,
265 TYPE_SIZE_UNIT (TREE_TYPE (op0)));
266 }
267
268 addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
269 TREE_TYPE (DR_BASE_ADDRESS (dr)),
270 DR_BASE_ADDRESS (dr), addr_base);
271 mem = force_gimple_operand (addr_base, &stmts, true, NULL);
272 gimple_seq_add_seq (&stmt_list, stmts);
273
274 fn = build_fold_addr_expr (implicit_built_in_decls [BUILT_IN_MEMSET]);
275 fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
276 gimple_seq_add_stmt (&stmt_list, fn_call);
277 gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);
278
279 if (dump_file && (dump_flags & TDF_DETAILS))
280 fprintf (dump_file, "generated memset zero\n");
281
282 free_data_ref (dr);
283 }
284
285 /* Tries to generate a builtin function for the instructions of LOOP
286 pointed to by the bits set in PARTITION. Returns true when the
287 operation succeeded. */
288
289 static bool
290 generate_builtin (struct loop *loop, bitmap partition, bool copy_p)
291 {
292 bool res = false;
293 unsigned i, x = 0;
294 basic_block *bbs;
295 gimple write = NULL;
296 gimple_stmt_iterator bsi;
297 tree nb_iter = number_of_exit_cond_executions (loop);
298
299 if (!nb_iter || nb_iter == chrec_dont_know)
300 return false;
301
302 bbs = get_loop_body_in_dom_order (loop);
303
304 for (i = 0; i < loop->num_nodes; i++)
305 {
306 basic_block bb = bbs[i];
307
308 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
309 x++;
310
311 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
312 {
313 gimple stmt = gsi_stmt (bsi);
314
315 if (bitmap_bit_p (partition, x++)
316 && is_gimple_assign (stmt)
317 && !is_gimple_reg (gimple_assign_lhs (stmt)))
318 {
319 /* Don't generate the builtins when there are more than
320 one memory write. */
321 if (write != NULL)
322 goto end;
323
324 write = stmt;
325 if (bb == loop->latch)
326 nb_iter = number_of_latch_executions (loop);
327 }
328 }
329 }
330
331 if (!stmt_with_adjacent_zero_store_dr_p (write))
332 goto end;
333
334 /* The new statements will be placed before LOOP. */
335 bsi = gsi_last_bb (loop_preheader_edge (loop)->src);
336 generate_memset_zero (write, gimple_assign_lhs (write), nb_iter, bsi);
337 res = true;
338
339 /* If this is the last partition for which we generate code, we have
340 to destroy the loop. */
341 if (!copy_p)
342 {
343 unsigned nbbs = loop->num_nodes;
344 edge exit = single_exit (loop);
345 basic_block src = loop_preheader_edge (loop)->src, dest = exit->dest;
346 redirect_edge_pred (exit, src);
347 exit->flags &= ~(EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
348 exit->flags |= EDGE_FALLTHRU;
349 cancel_loop_tree (loop);
350 rescan_loop_exit (exit, false, true);
351
352 for (i = 0; i < nbbs; i++)
353 delete_basic_block (bbs[i]);
354
355 set_immediate_dominator (CDI_DOMINATORS, dest,
356 recompute_dominator (CDI_DOMINATORS, dest));
357 }
358
359 end:
360 free (bbs);
361 return res;
362 }
363
364 /* Generates code for PARTITION. For simple loops, this function can
365 generate a built-in. */
366
367 static bool
368 generate_code_for_partition (struct loop *loop, bitmap partition, bool copy_p)
369 {
370 if (generate_builtin (loop, partition, copy_p))
371 return true;
372
373 return generate_loops_for_partition (loop, partition, copy_p);
374 }
375
376
377 /* Returns true if the node V of RDG cannot be recomputed. */
378
379 static bool
380 rdg_cannot_recompute_vertex_p (struct graph *rdg, int v)
381 {
382 if (RDG_MEM_WRITE_STMT (rdg, v))
383 return true;
384
385 return false;
386 }
387
388 /* Returns true when the vertex V has already been generated in the
389 current partition (V is in PROCESSED), or when V belongs to another
390 partition and cannot be recomputed (V is not in REMAINING_STMTS). */
391
392 static inline bool
393 already_processed_vertex_p (bitmap processed, int v)
394 {
395 return (bitmap_bit_p (processed, v)
396 || !bitmap_bit_p (remaining_stmts, v));
397 }
398
399 /* Returns NULL when there is no anti-dependence among the successors
400 of vertex V, otherwise returns the edge with the anti-dep. */
401
402 static struct graph_edge *
403 has_anti_dependence (struct vertex *v)
404 {
405 struct graph_edge *e;
406
407 if (v->succ)
408 for (e = v->succ; e; e = e->succ_next)
409 if (RDGE_TYPE (e) == anti_dd)
410 return e;
411
412 return NULL;
413 }
414
415 /* Returns true when V has an anti-dependence edge among its successors. */
416
417 static bool
418 predecessor_has_mem_write (struct graph *rdg, struct vertex *v)
419 {
420 struct graph_edge *e;
421
422 if (v->pred)
423 for (e = v->pred; e; e = e->pred_next)
424 if (bitmap_bit_p (upstream_mem_writes, e->src)
425 /* Don't consider flow channels: a write to memory followed
426 by a read from memory. These channels allow the split of
427 the RDG in different partitions. */
428 && !RDG_MEM_WRITE_STMT (rdg, e->src))
429 return true;
430
431 return false;
432 }
433
434 /* Initializes the upstream_mem_writes bitmap following the
435 information from RDG. */
436
437 static void
438 mark_nodes_having_upstream_mem_writes (struct graph *rdg)
439 {
440 int v, x;
441 bitmap seen = BITMAP_ALLOC (NULL);
442
443 for (v = rdg->n_vertices - 1; v >= 0; v--)
444 if (!bitmap_bit_p (seen, v))
445 {
446 unsigned i;
447 VEC (int, heap) *nodes = VEC_alloc (int, heap, 3);
448
449 graphds_dfs (rdg, &v, 1, &nodes, false, NULL);
450
451 FOR_EACH_VEC_ELT (int, nodes, i, x)
452 {
453 if (!bitmap_set_bit (seen, x))
454 continue;
455
456 if (RDG_MEM_WRITE_STMT (rdg, x)
457 || predecessor_has_mem_write (rdg, &(rdg->vertices[x]))
458 /* In anti dependences the read should occur before
459 the write, this is why both the read and the write
460 should be placed in the same partition. */
461 || has_anti_dependence (&(rdg->vertices[x])))
462 {
463 bitmap_set_bit (upstream_mem_writes, x);
464 }
465 }
466
467 VEC_free (int, heap, nodes);
468 }
469 }
470
471 /* Returns true when vertex u has a memory write node as a predecessor
472 in RDG. */
473
474 static bool
475 has_upstream_mem_writes (int u)
476 {
477 return bitmap_bit_p (upstream_mem_writes, u);
478 }
479
480 static void rdg_flag_vertex_and_dependent (struct graph *, int, bitmap, bitmap,
481 bitmap, bool *);
482
483 /* Flag the uses of U stopping following the information from
484 upstream_mem_writes. */
485
486 static void
487 rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
488 bitmap processed, bool *part_has_writes)
489 {
490 use_operand_p use_p;
491 struct vertex *x = &(rdg->vertices[u]);
492 gimple stmt = RDGV_STMT (x);
493 struct graph_edge *anti_dep = has_anti_dependence (x);
494
495 /* Keep in the same partition the destination of an antidependence,
496 because this is a store to the exact same location. Putting this
497 in another partition is bad for cache locality. */
498 if (anti_dep)
499 {
500 int v = anti_dep->dest;
501
502 if (!already_processed_vertex_p (processed, v))
503 rdg_flag_vertex_and_dependent (rdg, v, partition, loops,
504 processed, part_has_writes);
505 }
506
507 if (gimple_code (stmt) != GIMPLE_PHI)
508 {
509 if ((use_p = gimple_vuse_op (stmt)) != NULL_USE_OPERAND_P)
510 {
511 tree use = USE_FROM_PTR (use_p);
512
513 if (TREE_CODE (use) == SSA_NAME)
514 {
515 gimple def_stmt = SSA_NAME_DEF_STMT (use);
516 int v = rdg_vertex_for_stmt (rdg, def_stmt);
517
518 if (v >= 0
519 && !already_processed_vertex_p (processed, v))
520 rdg_flag_vertex_and_dependent (rdg, v, partition, loops,
521 processed, part_has_writes);
522 }
523 }
524 }
525
526 if (is_gimple_assign (stmt) && has_upstream_mem_writes (u))
527 {
528 tree op0 = gimple_assign_lhs (stmt);
529
530 /* Scalar channels don't have enough space for transmitting data
531 between tasks, unless we add more storage by privatizing. */
532 if (is_gimple_reg (op0))
533 {
534 use_operand_p use_p;
535 imm_use_iterator iter;
536
537 FOR_EACH_IMM_USE_FAST (use_p, iter, op0)
538 {
539 int v = rdg_vertex_for_stmt (rdg, USE_STMT (use_p));
540
541 if (!already_processed_vertex_p (processed, v))
542 rdg_flag_vertex_and_dependent (rdg, v, partition, loops,
543 processed, part_has_writes);
544 }
545 }
546 }
547 }
548
549 /* Flag V from RDG as part of PARTITION, and also flag its loop number
550 in LOOPS. */
551
552 static void
553 rdg_flag_vertex (struct graph *rdg, int v, bitmap partition, bitmap loops,
554 bool *part_has_writes)
555 {
556 struct loop *loop;
557
558 if (!bitmap_set_bit (partition, v))
559 return;
560
561 loop = loop_containing_stmt (RDG_STMT (rdg, v));
562 bitmap_set_bit (loops, loop->num);
563
564 if (rdg_cannot_recompute_vertex_p (rdg, v))
565 {
566 *part_has_writes = true;
567 bitmap_clear_bit (remaining_stmts, v);
568 }
569 }
570
571 /* Flag in the bitmap PARTITION the vertex V and all its predecessors.
572 Also flag their loop number in LOOPS. */
573
574 static void
575 rdg_flag_vertex_and_dependent (struct graph *rdg, int v, bitmap partition,
576 bitmap loops, bitmap processed,
577 bool *part_has_writes)
578 {
579 unsigned i;
580 VEC (int, heap) *nodes = VEC_alloc (int, heap, 3);
581 int x;
582
583 bitmap_set_bit (processed, v);
584 rdg_flag_uses (rdg, v, partition, loops, processed, part_has_writes);
585 graphds_dfs (rdg, &v, 1, &nodes, false, remaining_stmts);
586 rdg_flag_vertex (rdg, v, partition, loops, part_has_writes);
587
588 FOR_EACH_VEC_ELT (int, nodes, i, x)
589 if (!already_processed_vertex_p (processed, x))
590 rdg_flag_vertex_and_dependent (rdg, x, partition, loops, processed,
591 part_has_writes);
592
593 VEC_free (int, heap, nodes);
594 }
595
596 /* Initialize CONDS with all the condition statements from the basic
597 blocks of LOOP. */
598
599 static void
600 collect_condition_stmts (struct loop *loop, VEC (gimple, heap) **conds)
601 {
602 unsigned i;
603 edge e;
604 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
605
606 FOR_EACH_VEC_ELT (edge, exits, i, e)
607 {
608 gimple cond = last_stmt (e->src);
609
610 if (cond)
611 VEC_safe_push (gimple, heap, *conds, cond);
612 }
613
614 VEC_free (edge, heap, exits);
615 }
616
617 /* Add to PARTITION all the exit condition statements for LOOPS
618 together with all their dependent statements determined from
619 RDG. */
620
621 static void
622 rdg_flag_loop_exits (struct graph *rdg, bitmap loops, bitmap partition,
623 bitmap processed, bool *part_has_writes)
624 {
625 unsigned i;
626 bitmap_iterator bi;
627 VEC (gimple, heap) *conds = VEC_alloc (gimple, heap, 3);
628
629 EXECUTE_IF_SET_IN_BITMAP (loops, 0, i, bi)
630 collect_condition_stmts (get_loop (i), &conds);
631
632 while (!VEC_empty (gimple, conds))
633 {
634 gimple cond = VEC_pop (gimple, conds);
635 int v = rdg_vertex_for_stmt (rdg, cond);
636 bitmap new_loops = BITMAP_ALLOC (NULL);
637
638 if (!already_processed_vertex_p (processed, v))
639 rdg_flag_vertex_and_dependent (rdg, v, partition, new_loops, processed,
640 part_has_writes);
641
642 EXECUTE_IF_SET_IN_BITMAP (new_loops, 0, i, bi)
643 if (bitmap_set_bit (loops, i))
644 collect_condition_stmts (get_loop (i), &conds);
645
646 BITMAP_FREE (new_loops);
647 }
648
649 VEC_free (gimple, heap, conds);
650 }
651
652 /* Returns a bitmap in which all the statements needed for computing
653 the strongly connected component C of the RDG are flagged, also
654 including the loop exit conditions. */
655
656 static bitmap
657 build_rdg_partition_for_component (struct graph *rdg, rdgc c,
658 bool *part_has_writes)
659 {
660 int i, v;
661 bitmap partition = BITMAP_ALLOC (NULL);
662 bitmap loops = BITMAP_ALLOC (NULL);
663 bitmap processed = BITMAP_ALLOC (NULL);
664
665 FOR_EACH_VEC_ELT (int, c->vertices, i, v)
666 if (!already_processed_vertex_p (processed, v))
667 rdg_flag_vertex_and_dependent (rdg, v, partition, loops, processed,
668 part_has_writes);
669
670 rdg_flag_loop_exits (rdg, loops, partition, processed, part_has_writes);
671
672 BITMAP_FREE (processed);
673 BITMAP_FREE (loops);
674 return partition;
675 }
676
677 /* Free memory for COMPONENTS. */
678
679 static void
680 free_rdg_components (VEC (rdgc, heap) *components)
681 {
682 int i;
683 rdgc x;
684
685 FOR_EACH_VEC_ELT (rdgc, components, i, x)
686 {
687 VEC_free (int, heap, x->vertices);
688 free (x);
689 }
690
691 VEC_free (rdgc, heap, components);
692 }
693
694 /* Build the COMPONENTS vector with the strongly connected components
695 of RDG in which the STARTING_VERTICES occur. */
696
697 static void
698 rdg_build_components (struct graph *rdg, VEC (int, heap) *starting_vertices,
699 VEC (rdgc, heap) **components)
700 {
701 int i, v;
702 bitmap saved_components = BITMAP_ALLOC (NULL);
703 int n_components = graphds_scc (rdg, NULL);
704 VEC (int, heap) **all_components = XNEWVEC (VEC (int, heap) *, n_components);
705
706 for (i = 0; i < n_components; i++)
707 all_components[i] = VEC_alloc (int, heap, 3);
708
709 for (i = 0; i < rdg->n_vertices; i++)
710 VEC_safe_push (int, heap, all_components[rdg->vertices[i].component], i);
711
712 FOR_EACH_VEC_ELT (int, starting_vertices, i, v)
713 {
714 int c = rdg->vertices[v].component;
715
716 if (bitmap_set_bit (saved_components, c))
717 {
718 rdgc x = XCNEW (struct rdg_component);
719 x->num = c;
720 x->vertices = all_components[c];
721
722 VEC_safe_push (rdgc, heap, *components, x);
723 }
724 }
725
726 for (i = 0; i < n_components; i++)
727 if (!bitmap_bit_p (saved_components, i))
728 VEC_free (int, heap, all_components[i]);
729
730 free (all_components);
731 BITMAP_FREE (saved_components);
732 }
733
734 /* Returns true when it is possible to generate a builtin pattern for
735 the PARTITION of RDG. For the moment we detect only the memset
736 zero pattern. */
737
738 static bool
739 can_generate_builtin (struct graph *rdg, bitmap partition)
740 {
741 unsigned i;
742 bitmap_iterator bi;
743 int nb_reads = 0;
744 int nb_writes = 0;
745 int stores_zero = 0;
746
747 EXECUTE_IF_SET_IN_BITMAP (partition, 0, i, bi)
748 if (RDG_MEM_READS_STMT (rdg, i))
749 nb_reads++;
750 else if (RDG_MEM_WRITE_STMT (rdg, i))
751 {
752 nb_writes++;
753 if (stmt_with_adjacent_zero_store_dr_p (RDG_STMT (rdg, i)))
754 stores_zero++;
755 }
756
757 return stores_zero == 1 && nb_writes == 1 && nb_reads == 0;
758 }
759
760 /* Returns true when PARTITION1 and PARTITION2 have similar memory
761 accesses in RDG. */
762
763 static bool
764 similar_memory_accesses (struct graph *rdg, bitmap partition1,
765 bitmap partition2)
766 {
767 unsigned i, j;
768 bitmap_iterator bi, bj;
769
770 EXECUTE_IF_SET_IN_BITMAP (partition1, 0, i, bi)
771 if (RDG_MEM_WRITE_STMT (rdg, i)
772 || RDG_MEM_READS_STMT (rdg, i))
773 EXECUTE_IF_SET_IN_BITMAP (partition2, 0, j, bj)
774 if (RDG_MEM_WRITE_STMT (rdg, j)
775 || RDG_MEM_READS_STMT (rdg, j))
776 if (rdg_has_similar_memory_accesses (rdg, i, j))
777 return true;
778
779 return false;
780 }
781
782 /* Fuse all the partitions from PARTITIONS that contain similar memory
783 references, i.e., we're taking care of cache locality. This
784 function does not fuse those partitions that contain patterns that
785 can be code generated with builtins. */
786
787 static void
788 fuse_partitions_with_similar_memory_accesses (struct graph *rdg,
789 VEC (bitmap, heap) **partitions)
790 {
791 int p1, p2;
792 bitmap partition1, partition2;
793
794 FOR_EACH_VEC_ELT (bitmap, *partitions, p1, partition1)
795 if (!can_generate_builtin (rdg, partition1))
796 FOR_EACH_VEC_ELT (bitmap, *partitions, p2, partition2)
797 if (p1 != p2
798 && !can_generate_builtin (rdg, partition2)
799 && similar_memory_accesses (rdg, partition1, partition2))
800 {
801 bitmap_ior_into (partition1, partition2);
802 VEC_ordered_remove (bitmap, *partitions, p2);
803 p2--;
804 }
805 }
806
807 /* Returns true when DEF is an SSA_NAME defined in LOOP and used after
808 the LOOP. */
809
810 static bool
811 ssa_name_has_uses_outside_loop_p (tree def, loop_p loop)
812 {
813 imm_use_iterator imm_iter;
814 use_operand_p use_p;
815
816 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
817 if (loop != loop_containing_stmt (USE_STMT (use_p)))
818 return true;
819
820 return false;
821 }
822
823 /* Returns true when STMT defines a scalar variable used after the
824 loop. */
825
826 static bool
827 stmt_has_scalar_dependences_outside_loop (gimple stmt)
828 {
829 tree name;
830
831 switch (gimple_code (stmt))
832 {
833 case GIMPLE_ASSIGN:
834 name = gimple_assign_lhs (stmt);
835 break;
836
837 case GIMPLE_PHI:
838 name = gimple_phi_result (stmt);
839 break;
840
841 default:
842 return false;
843 }
844
845 return TREE_CODE (name) == SSA_NAME
846 && ssa_name_has_uses_outside_loop_p (name, loop_containing_stmt (stmt));
847 }
848
849 /* Returns true when STMT will be code generated in a partition of RDG
850 different than PART and that will not be code generated as a
851 builtin. */
852
853 static bool
854 stmt_generated_in_another_partition (struct graph *rdg, gimple stmt, int part,
855 VEC (bitmap, heap) *partitions)
856 {
857 int p;
858 bitmap pp;
859 unsigned i;
860 bitmap_iterator bi;
861
862 FOR_EACH_VEC_ELT (bitmap, partitions, p, pp)
863 if (p != part
864 && !can_generate_builtin (rdg, pp))
865 EXECUTE_IF_SET_IN_BITMAP (pp, 0, i, bi)
866 if (stmt == RDG_STMT (rdg, i))
867 return true;
868
869 return false;
870 }
871
872 /* For each partition in PARTITIONS that will be code generated using
873 a builtin, add its scalar computations used after the loop to
874 PARTITION. */
875
876 static void
877 add_scalar_computations_to_partition (struct graph *rdg,
878 VEC (bitmap, heap) *partitions,
879 bitmap partition)
880 {
881 int p;
882 bitmap pp;
883 unsigned i;
884 bitmap_iterator bi;
885 bitmap l = BITMAP_ALLOC (NULL);
886 bitmap pr = BITMAP_ALLOC (NULL);
887 bool f = false;
888
889 FOR_EACH_VEC_ELT (bitmap, partitions, p, pp)
890 if (can_generate_builtin (rdg, pp))
891 EXECUTE_IF_SET_IN_BITMAP (pp, 0, i, bi)
892 if (stmt_has_scalar_dependences_outside_loop (RDG_STMT (rdg, i))
893 && !stmt_generated_in_another_partition (rdg, RDG_STMT (rdg, i), p,
894 partitions))
895 rdg_flag_vertex_and_dependent (rdg, i, partition, l, pr, &f);
896
897 rdg_flag_loop_exits (rdg, l, partition, pr, &f);
898
899 BITMAP_FREE (pr);
900 BITMAP_FREE (l);
901 }
902
903 /* Aggregate several components into a useful partition that is
904 registered in the PARTITIONS vector. Partitions will be
905 distributed in different loops. */
906
907 static void
908 rdg_build_partitions (struct graph *rdg, VEC (rdgc, heap) *components,
909 VEC (int, heap) **other_stores,
910 VEC (bitmap, heap) **partitions, bitmap processed)
911 {
912 int i;
913 rdgc x;
914 bitmap partition = BITMAP_ALLOC (NULL);
915
916 FOR_EACH_VEC_ELT (rdgc, components, i, x)
917 {
918 bitmap np;
919 bool part_has_writes = false;
920 int v = VEC_index (int, x->vertices, 0);
921
922 if (bitmap_bit_p (processed, v))
923 continue;
924
925 np = build_rdg_partition_for_component (rdg, x, &part_has_writes);
926 bitmap_ior_into (partition, np);
927 bitmap_ior_into (processed, np);
928 BITMAP_FREE (np);
929
930 if (part_has_writes)
931 {
932 if (dump_file && (dump_flags & TDF_DETAILS))
933 {
934 fprintf (dump_file, "ldist useful partition:\n");
935 dump_bitmap (dump_file, partition);
936 }
937
938 VEC_safe_push (bitmap, heap, *partitions, partition);
939 partition = BITMAP_ALLOC (NULL);
940 }
941 }
942
943 /* Add the nodes from the RDG that were not marked as processed, and
944 that are used outside the current loop. These are scalar
945 computations that are not yet part of previous partitions. */
946 for (i = 0; i < rdg->n_vertices; i++)
947 if (!bitmap_bit_p (processed, i)
948 && rdg_defs_used_in_other_loops_p (rdg, i))
949 VEC_safe_push (int, heap, *other_stores, i);
950
951 /* If there are still statements left in the OTHER_STORES array,
952 create other components and partitions with these stores and
953 their dependences. */
954 if (VEC_length (int, *other_stores) > 0)
955 {
956 VEC (rdgc, heap) *comps = VEC_alloc (rdgc, heap, 3);
957 VEC (int, heap) *foo = VEC_alloc (int, heap, 3);
958
959 rdg_build_components (rdg, *other_stores, &comps);
960 rdg_build_partitions (rdg, comps, &foo, partitions, processed);
961
962 VEC_free (int, heap, foo);
963 free_rdg_components (comps);
964 }
965
966 add_scalar_computations_to_partition (rdg, *partitions, partition);
967
968 /* If there is something left in the last partition, save it. */
969 if (bitmap_count_bits (partition) > 0)
970 VEC_safe_push (bitmap, heap, *partitions, partition);
971 else
972 BITMAP_FREE (partition);
973
974 fuse_partitions_with_similar_memory_accesses (rdg, partitions);
975 }
976
977 /* Dump to FILE the PARTITIONS. */
978
979 static void
980 dump_rdg_partitions (FILE *file, VEC (bitmap, heap) *partitions)
981 {
982 int i;
983 bitmap partition;
984
985 FOR_EACH_VEC_ELT (bitmap, partitions, i, partition)
986 debug_bitmap_file (file, partition);
987 }
988
989 /* Debug PARTITIONS. */
990 extern void debug_rdg_partitions (VEC (bitmap, heap) *);
991
992 DEBUG_FUNCTION void
993 debug_rdg_partitions (VEC (bitmap, heap) *partitions)
994 {
995 dump_rdg_partitions (stderr, partitions);
996 }
997
998 /* Returns the number of read and write operations in the RDG. */
999
1000 static int
1001 number_of_rw_in_rdg (struct graph *rdg)
1002 {
1003 int i, res = 0;
1004
1005 for (i = 0; i < rdg->n_vertices; i++)
1006 {
1007 if (RDG_MEM_WRITE_STMT (rdg, i))
1008 ++res;
1009
1010 if (RDG_MEM_READS_STMT (rdg, i))
1011 ++res;
1012 }
1013
1014 return res;
1015 }
1016
1017 /* Returns the number of read and write operations in a PARTITION of
1018 the RDG. */
1019
1020 static int
1021 number_of_rw_in_partition (struct graph *rdg, bitmap partition)
1022 {
1023 int res = 0;
1024 unsigned i;
1025 bitmap_iterator ii;
1026
1027 EXECUTE_IF_SET_IN_BITMAP (partition, 0, i, ii)
1028 {
1029 if (RDG_MEM_WRITE_STMT (rdg, i))
1030 ++res;
1031
1032 if (RDG_MEM_READS_STMT (rdg, i))
1033 ++res;
1034 }
1035
1036 return res;
1037 }
1038
1039 /* Returns true when one of the PARTITIONS contains all the read or
1040 write operations of RDG. */
1041
1042 static bool
1043 partition_contains_all_rw (struct graph *rdg, VEC (bitmap, heap) *partitions)
1044 {
1045 int i;
1046 bitmap partition;
1047 int nrw = number_of_rw_in_rdg (rdg);
1048
1049 FOR_EACH_VEC_ELT (bitmap, partitions, i, partition)
1050 if (nrw == number_of_rw_in_partition (rdg, partition))
1051 return true;
1052
1053 return false;
1054 }
1055
1056 /* Generate code from STARTING_VERTICES in RDG. Returns the number of
1057 distributed loops. */
1058
1059 static int
1060 ldist_gen (struct loop *loop, struct graph *rdg,
1061 VEC (int, heap) *starting_vertices)
1062 {
1063 int i, nbp;
1064 VEC (rdgc, heap) *components = VEC_alloc (rdgc, heap, 3);
1065 VEC (bitmap, heap) *partitions = VEC_alloc (bitmap, heap, 3);
1066 VEC (int, heap) *other_stores = VEC_alloc (int, heap, 3);
1067 bitmap partition, processed = BITMAP_ALLOC (NULL);
1068
1069 remaining_stmts = BITMAP_ALLOC (NULL);
1070 upstream_mem_writes = BITMAP_ALLOC (NULL);
1071
1072 for (i = 0; i < rdg->n_vertices; i++)
1073 {
1074 bitmap_set_bit (remaining_stmts, i);
1075
1076 /* Save in OTHER_STORES all the memory writes that are not in
1077 STARTING_VERTICES. */
1078 if (RDG_MEM_WRITE_STMT (rdg, i))
1079 {
1080 int v;
1081 unsigned j;
1082 bool found = false;
1083
1084 FOR_EACH_VEC_ELT (int, starting_vertices, j, v)
1085 if (i == v)
1086 {
1087 found = true;
1088 break;
1089 }
1090
1091 if (!found)
1092 VEC_safe_push (int, heap, other_stores, i);
1093 }
1094 }
1095
1096 mark_nodes_having_upstream_mem_writes (rdg);
1097 rdg_build_components (rdg, starting_vertices, &components);
1098 rdg_build_partitions (rdg, components, &other_stores, &partitions,
1099 processed);
1100 BITMAP_FREE (processed);
1101 nbp = VEC_length (bitmap, partitions);
1102
1103 if (nbp <= 1
1104 || partition_contains_all_rw (rdg, partitions))
1105 goto ldist_done;
1106
1107 if (dump_file && (dump_flags & TDF_DETAILS))
1108 dump_rdg_partitions (dump_file, partitions);
1109
1110 FOR_EACH_VEC_ELT (bitmap, partitions, i, partition)
1111 if (!generate_code_for_partition (loop, partition, i < nbp - 1))
1112 goto ldist_done;
1113
1114 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1115 update_ssa (TODO_update_ssa_only_virtuals | TODO_update_ssa);
1116
1117 ldist_done:
1118
1119 BITMAP_FREE (remaining_stmts);
1120 BITMAP_FREE (upstream_mem_writes);
1121
1122 FOR_EACH_VEC_ELT (bitmap, partitions, i, partition)
1123 BITMAP_FREE (partition);
1124
1125 VEC_free (int, heap, other_stores);
1126 VEC_free (bitmap, heap, partitions);
1127 free_rdg_components (components);
1128 return nbp;
1129 }
1130
1131 /* Distributes the code from LOOP in such a way that producer
1132 statements are placed before consumer statements. When STMTS is
1133 NULL, performs the maximal distribution, if STMTS is not NULL,
1134 tries to separate only these statements from the LOOP's body.
1135 Returns the number of distributed loops. */
1136
1137 static int
1138 distribute_loop (struct loop *loop, VEC (gimple, heap) *stmts)
1139 {
1140 int res = 0;
1141 struct graph *rdg;
1142 gimple s;
1143 unsigned i;
1144 VEC (int, heap) *vertices;
1145 VEC (ddr_p, heap) *dependence_relations;
1146 VEC (data_reference_p, heap) *datarefs;
1147 VEC (loop_p, heap) *loop_nest;
1148
1149 if (loop->num_nodes > 2)
1150 {
1151 if (dump_file && (dump_flags & TDF_DETAILS))
1152 fprintf (dump_file,
1153 "FIXME: Loop %d not distributed: it has more than two basic blocks.\n",
1154 loop->num);
1155
1156 return res;
1157 }
1158
1159 datarefs = VEC_alloc (data_reference_p, heap, 10);
1160 dependence_relations = VEC_alloc (ddr_p, heap, 100);
1161 loop_nest = VEC_alloc (loop_p, heap, 3);
1162 rdg = build_rdg (loop, &loop_nest, &dependence_relations, &datarefs);
1163
1164 if (!rdg)
1165 {
1166 if (dump_file && (dump_flags & TDF_DETAILS))
1167 fprintf (dump_file,
1168 "FIXME: Loop %d not distributed: failed to build the RDG.\n",
1169 loop->num);
1170
1171 free_dependence_relations (dependence_relations);
1172 free_data_refs (datarefs);
1173 VEC_free (loop_p, heap, loop_nest);
1174 return res;
1175 }
1176
1177 vertices = VEC_alloc (int, heap, 3);
1178
1179 if (dump_file && (dump_flags & TDF_DETAILS))
1180 dump_rdg (dump_file, rdg);
1181
1182 FOR_EACH_VEC_ELT (gimple, stmts, i, s)
1183 {
1184 int v = rdg_vertex_for_stmt (rdg, s);
1185
1186 if (v >= 0)
1187 {
1188 VEC_safe_push (int, heap, vertices, v);
1189
1190 if (dump_file && (dump_flags & TDF_DETAILS))
1191 fprintf (dump_file,
1192 "ldist asked to generate code for vertex %d\n", v);
1193 }
1194 }
1195
1196 res = ldist_gen (loop, rdg, vertices);
1197 VEC_free (int, heap, vertices);
1198 free_rdg (rdg);
1199 free_dependence_relations (dependence_relations);
1200 free_data_refs (datarefs);
1201 VEC_free (loop_p, heap, loop_nest);
1202 return res;
1203 }
1204
1205 /* Distribute all loops in the current function. */
1206
1207 static unsigned int
1208 tree_loop_distribution (void)
1209 {
1210 struct loop *loop;
1211 loop_iterator li;
1212 int nb_generated_loops = 0;
1213
1214 FOR_EACH_LOOP (li, loop, 0)
1215 {
1216 VEC (gimple, heap) *work_list = NULL;
1217 int num = loop->num;
1218
1219 /* If the loop doesn't have a single exit we will fail anyway,
1220 so do that early. */
1221 if (!single_exit (loop))
1222 continue;
1223
1224 /* If both flag_tree_loop_distribute_patterns and
1225 flag_tree_loop_distribution are set, then only
1226 distribute_patterns is executed. */
1227 if (flag_tree_loop_distribute_patterns)
1228 {
1229 /* With the following working list, we're asking
1230 distribute_loop to separate from the rest of the loop the
1231 stores of the form "A[i] = 0". */
1232 stores_zero_from_loop (loop, &work_list);
1233
1234 /* Do nothing if there are no patterns to be distributed. */
1235 if (VEC_length (gimple, work_list) > 0)
1236 nb_generated_loops = distribute_loop (loop, work_list);
1237 }
1238 else if (flag_tree_loop_distribution)
1239 {
1240 /* With the following working list, we're asking
1241 distribute_loop to separate the stores of the loop: when
1242 dependences allow, it will end on having one store per
1243 loop. */
1244 stores_from_loop (loop, &work_list);
1245
1246 /* A simple heuristic for cache locality is to not split
1247 stores to the same array. Without this call, an unrolled
1248 loop would be split into as many loops as unroll factor,
1249 each loop storing in the same array. */
1250 remove_similar_memory_refs (&work_list);
1251
1252 nb_generated_loops = distribute_loop (loop, work_list);
1253 }
1254
1255 if (dump_file && (dump_flags & TDF_DETAILS))
1256 {
1257 if (nb_generated_loops > 1)
1258 fprintf (dump_file, "Loop %d distributed: split to %d loops.\n",
1259 num, nb_generated_loops);
1260 else
1261 fprintf (dump_file, "Loop %d is the same.\n", num);
1262 }
1263
1264 verify_loop_structure ();
1265
1266 VEC_free (gimple, heap, work_list);
1267 }
1268
1269 return 0;
1270 }
1271
1272 static bool
1273 gate_tree_loop_distribution (void)
1274 {
1275 return flag_tree_loop_distribution
1276 || flag_tree_loop_distribute_patterns;
1277 }
1278
1279 struct gimple_opt_pass pass_loop_distribution =
1280 {
1281 {
1282 GIMPLE_PASS,
1283 "ldist", /* name */
1284 gate_tree_loop_distribution, /* gate */
1285 tree_loop_distribution, /* execute */
1286 NULL, /* sub */
1287 NULL, /* next */
1288 0, /* static_pass_number */
1289 TV_TREE_LOOP_DISTRIBUTION, /* tv_id */
1290 PROP_cfg | PROP_ssa, /* properties_required */
1291 0, /* properties_provided */
1292 0, /* properties_destroyed */
1293 0, /* todo_flags_start */
1294 TODO_dump_func /* todo_flags_finish */
1295 }
1296 };