arm.h (REG_CLASS_CONTENTS): Remove soft frame pointer from CORE_REGS and GENERAL_REGS...
[gcc.git] / gcc / ddg.c
1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 #include "except.h"
38 #include "recog.h"
39 #include "sched-int.h"
40 #include "target.h"
41 #include "cfglayout.h"
42 #include "cfgloop.h"
43 #include "sbitmap.h"
44 #include "expr.h"
45 #include "bitmap.h"
46 #include "ddg.h"
47
48 #ifdef INSN_SCHEDULING
49
50 /* A flag indicating that a ddg edge belongs to an SCC or not. */
51 enum edge_flag {NOT_IN_SCC = 0, IN_SCC};
52
53 /* Forward declarations. */
54 static void add_backarc_to_ddg (ddg_ptr, ddg_edge_ptr);
55 static void add_backarc_to_scc (ddg_scc_ptr, ddg_edge_ptr);
56 static void add_scc_to_ddg (ddg_all_sccs_ptr, ddg_scc_ptr);
57 static void create_ddg_dep_from_intra_loop_link (ddg_ptr, ddg_node_ptr,
58 ddg_node_ptr, dep_t);
59 static void create_ddg_dep_no_link (ddg_ptr, ddg_node_ptr, ddg_node_ptr,
60 dep_type, dep_data_type, int);
61 static ddg_edge_ptr create_ddg_edge (ddg_node_ptr, ddg_node_ptr, dep_type,
62 dep_data_type, int, int);
63 static void add_edge_to_ddg (ddg_ptr g, ddg_edge_ptr);
64 \f
65 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
66 static bool mem_ref_p;
67
68 /* Auxiliary function for mem_read_insn_p. */
69 static int
70 mark_mem_use (rtx *x, void *data ATTRIBUTE_UNUSED)
71 {
72 if (MEM_P (*x))
73 mem_ref_p = true;
74 return 0;
75 }
76
77 /* Auxiliary function for mem_read_insn_p. */
78 static void
79 mark_mem_use_1 (rtx *x, void *data)
80 {
81 for_each_rtx (x, mark_mem_use, data);
82 }
83
84 /* Returns nonzero if INSN reads from memory. */
85 static bool
86 mem_read_insn_p (rtx insn)
87 {
88 mem_ref_p = false;
89 note_uses (&PATTERN (insn), mark_mem_use_1, NULL);
90 return mem_ref_p;
91 }
92
93 static void
94 mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
95 {
96 if (MEM_P (loc))
97 mem_ref_p = true;
98 }
99
100 /* Returns nonzero if INSN writes to memory. */
101 static bool
102 mem_write_insn_p (rtx insn)
103 {
104 mem_ref_p = false;
105 note_stores (PATTERN (insn), mark_mem_store, NULL);
106 return mem_ref_p;
107 }
108
109 /* Returns nonzero if X has access to memory. */
110 static bool
111 rtx_mem_access_p (rtx x)
112 {
113 int i, j;
114 const char *fmt;
115 enum rtx_code code;
116
117 if (x == 0)
118 return false;
119
120 if (MEM_P (x))
121 return true;
122
123 code = GET_CODE (x);
124 fmt = GET_RTX_FORMAT (code);
125 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
126 {
127 if (fmt[i] == 'e')
128 {
129 if (rtx_mem_access_p (XEXP (x, i)))
130 return true;
131 }
132 else if (fmt[i] == 'E')
133 for (j = 0; j < XVECLEN (x, i); j++)
134 {
135 if (rtx_mem_access_p (XVECEXP (x, i, j)))
136 return true;
137 }
138 }
139 return false;
140 }
141
142 /* Returns nonzero if INSN reads to or writes from memory. */
143 static bool
144 mem_access_insn_p (rtx insn)
145 {
146 return rtx_mem_access_p (PATTERN (insn));
147 }
148
149 /* Computes the dependence parameters (latency, distance etc.), creates
150 a ddg_edge and adds it to the given DDG. */
151 static void
152 create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
153 ddg_node_ptr dest_node, dep_t link)
154 {
155 ddg_edge_ptr e;
156 int latency, distance = 0;
157 dep_type t = TRUE_DEP;
158 dep_data_type dt = (mem_access_insn_p (src_node->insn)
159 && mem_access_insn_p (dest_node->insn) ? MEM_DEP
160 : REG_DEP);
161 gcc_assert (src_node->cuid < dest_node->cuid);
162 gcc_assert (link);
163
164 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
165 if (DEP_TYPE (link) == REG_DEP_ANTI)
166 t = ANTI_DEP;
167 else if (DEP_TYPE (link) == REG_DEP_OUTPUT)
168 t = OUTPUT_DEP;
169
170 gcc_assert (!DEBUG_INSN_P (dest_node->insn) || t == ANTI_DEP);
171 gcc_assert (!DEBUG_INSN_P (src_node->insn) || t == ANTI_DEP);
172
173 /* We currently choose not to create certain anti-deps edges and
174 compensate for that by generating reg-moves based on the life-range
175 analysis. The anti-deps that will be deleted are the ones which
176 have true-deps edges in the opposite direction (in other words
177 the kernel has only one def of the relevant register). TODO:
178 support the removal of all anti-deps edges, i.e. including those
179 whose register has multiple defs in the loop. */
180 if (flag_modulo_sched_allow_regmoves && (t == ANTI_DEP && dt == REG_DEP))
181 {
182 rtx set;
183
184 set = single_set (dest_node->insn);
185 /* TODO: Handle registers that REG_P is not true for them, i.e.
186 subregs and special registers. */
187 if (set && REG_P (SET_DEST (set)))
188 {
189 int regno = REGNO (SET_DEST (set));
190 df_ref first_def;
191 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
192
193 first_def = df_bb_regno_first_def_find (g->bb, regno);
194 gcc_assert (first_def);
195
196 if (bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)))
197 return;
198 }
199 }
200
201 latency = dep_cost (link);
202 e = create_ddg_edge (src_node, dest_node, t, dt, latency, distance);
203 add_edge_to_ddg (g, e);
204 }
205
206 /* The same as the above function, but it doesn't require a link parameter. */
207 static void
208 create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
209 dep_type d_t, dep_data_type d_dt, int distance)
210 {
211 ddg_edge_ptr e;
212 int l;
213 enum reg_note dep_kind;
214 struct _dep _dep, *dep = &_dep;
215
216 gcc_assert (!DEBUG_INSN_P (to->insn) || d_t == ANTI_DEP);
217 gcc_assert (!DEBUG_INSN_P (from->insn) || d_t == ANTI_DEP);
218
219 if (d_t == ANTI_DEP)
220 dep_kind = REG_DEP_ANTI;
221 else if (d_t == OUTPUT_DEP)
222 dep_kind = REG_DEP_OUTPUT;
223 else
224 {
225 gcc_assert (d_t == TRUE_DEP);
226
227 dep_kind = REG_DEP_TRUE;
228 }
229
230 init_dep (dep, from->insn, to->insn, dep_kind);
231
232 l = dep_cost (dep);
233
234 e = create_ddg_edge (from, to, d_t, d_dt, l, distance);
235 if (distance > 0)
236 add_backarc_to_ddg (g, e);
237 else
238 add_edge_to_ddg (g, e);
239 }
240
241
242 /* Given a downwards exposed register def LAST_DEF (which is the last
243 definition of that register in the bb), add inter-loop true dependences
244 to all its uses in the next iteration, an output dependence to the
245 first def of the same register (possibly itself) in the next iteration
246 and anti-dependences from its uses in the current iteration to the
247 first definition in the next iteration. */
248 static void
249 add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
250 {
251 int regno = DF_REF_REGNO (last_def);
252 struct df_link *r_use;
253 int has_use_in_bb_p = false;
254 rtx def_insn = DF_REF_INSN (last_def);
255 ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn);
256 ddg_node_ptr use_node;
257 #ifdef ENABLE_CHECKING
258 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
259 #endif
260 df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
261
262 gcc_assert (last_def_node);
263 gcc_assert (first_def);
264
265 #ifdef ENABLE_CHECKING
266 if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
267 gcc_assert (!bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)));
268 #endif
269
270 /* Create inter-loop true dependences and anti dependences. */
271 for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
272 {
273 rtx use_insn = DF_REF_INSN (r_use->ref);
274
275 if (BLOCK_FOR_INSN (use_insn) != g->bb)
276 continue;
277
278 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
279 use_node = get_node_of_insn (g, use_insn);
280 gcc_assert (use_node);
281 has_use_in_bb_p = true;
282 if (use_node->cuid <= last_def_node->cuid)
283 {
284 /* Add true deps from last_def to it's uses in the next
285 iteration. Any such upwards exposed use appears before
286 the last_def def. */
287 create_ddg_dep_no_link (g, last_def_node, use_node,
288 DEBUG_INSN_P (use_insn) ? ANTI_DEP : TRUE_DEP,
289 REG_DEP, 1);
290 }
291 else if (!DEBUG_INSN_P (use_insn))
292 {
293 /* Add anti deps from last_def's uses in the current iteration
294 to the first def in the next iteration. We do not add ANTI
295 dep when there is an intra-loop TRUE dep in the opposite
296 direction, but use regmoves to fix such disregarded ANTI
297 deps when broken. If the first_def reaches the USE then
298 there is such a dep. */
299 ddg_node_ptr first_def_node = get_node_of_insn (g,
300 DF_REF_INSN (first_def));
301
302 gcc_assert (first_def_node);
303
304 if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
305 || !flag_modulo_sched_allow_regmoves)
306 create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
307 REG_DEP, 1);
308
309 }
310 }
311 /* Create an inter-loop output dependence between LAST_DEF (which is the
312 last def in its block, being downwards exposed) and the first def in
313 its block. Avoid creating a self output dependence. Avoid creating
314 an output dependence if there is a dependence path between the two
315 defs starting with a true dependence to a use which can be in the
316 next iteration; followed by an anti dependence of that use to the
317 first def (i.e. if there is a use between the two defs.) */
318 if (!has_use_in_bb_p)
319 {
320 ddg_node_ptr dest_node;
321
322 if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
323 return;
324
325 dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
326 gcc_assert (dest_node);
327 create_ddg_dep_no_link (g, last_def_node, dest_node,
328 OUTPUT_DEP, REG_DEP, 1);
329 }
330 }
331 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
332 static void
333 build_inter_loop_deps (ddg_ptr g)
334 {
335 unsigned rd_num;
336 struct df_rd_bb_info *rd_bb_info;
337 bitmap_iterator bi;
338
339 rd_bb_info = DF_RD_BB_INFO (g->bb);
340
341 /* Find inter-loop register output, true and anti deps. */
342 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info->gen, 0, rd_num, bi)
343 {
344 df_ref rd = DF_DEFS_GET (rd_num);
345
346 add_cross_iteration_register_deps (g, rd);
347 }
348 }
349
350
351 static int
352 walk_mems_2 (rtx *x, rtx mem)
353 {
354 if (MEM_P (*x))
355 {
356 if (may_alias_p (*x, mem))
357 return 1;
358
359 return -1;
360 }
361 return 0;
362 }
363
364 static int
365 walk_mems_1 (rtx *x, rtx *pat)
366 {
367 if (MEM_P (*x))
368 {
369 /* Visit all MEMs in *PAT and check indepedence. */
370 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
371 /* Indicate that dependence was determined and stop traversal. */
372 return 1;
373
374 return -1;
375 }
376 return 0;
377 }
378
379 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
380 static int
381 insns_may_alias_p (rtx insn1, rtx insn2)
382 {
383 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
384 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
385 &PATTERN (insn2));
386 }
387
388 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
389 to ddg G. */
390 static void
391 add_inter_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
392 {
393 if (!insns_may_alias_p (from->insn, to->insn))
394 /* Do not create edge if memory references have disjoint alias sets. */
395 return;
396
397 if (mem_write_insn_p (from->insn))
398 {
399 if (mem_read_insn_p (to->insn))
400 create_ddg_dep_no_link (g, from, to,
401 DEBUG_INSN_P (to->insn)
402 ? ANTI_DEP : TRUE_DEP, MEM_DEP, 1);
403 else if (from->cuid != to->cuid)
404 create_ddg_dep_no_link (g, from, to,
405 DEBUG_INSN_P (to->insn)
406 ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 1);
407 }
408 else
409 {
410 if (mem_read_insn_p (to->insn))
411 return;
412 else if (from->cuid != to->cuid)
413 {
414 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 1);
415 if (DEBUG_INSN_P (from->insn) || DEBUG_INSN_P (to->insn))
416 create_ddg_dep_no_link (g, to, from, ANTI_DEP, MEM_DEP, 1);
417 else
418 create_ddg_dep_no_link (g, to, from, TRUE_DEP, MEM_DEP, 1);
419 }
420 }
421
422 }
423
424 /* Perform intra-block Data Dependency analysis and connect the nodes in
425 the DDG. We assume the loop has a single basic block. */
426 static void
427 build_intra_loop_deps (ddg_ptr g)
428 {
429 int i;
430 /* Hold the dependency analysis state during dependency calculations. */
431 struct deps_desc tmp_deps;
432 rtx head, tail;
433
434 /* Build the dependence information, using the sched_analyze function. */
435 init_deps_global ();
436 init_deps (&tmp_deps, false);
437
438 /* Do the intra-block data dependence analysis for the given block. */
439 get_ebb_head_tail (g->bb, g->bb, &head, &tail);
440 sched_analyze (&tmp_deps, head, tail);
441
442 /* Build intra-loop data dependencies using the scheduler dependency
443 analysis. */
444 for (i = 0; i < g->num_nodes; i++)
445 {
446 ddg_node_ptr dest_node = &g->nodes[i];
447 sd_iterator_def sd_it;
448 dep_t dep;
449
450 if (! INSN_P (dest_node->insn))
451 continue;
452
453 FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep)
454 {
455 ddg_node_ptr src_node = get_node_of_insn (g, DEP_PRO (dep));
456
457 if (!src_node)
458 continue;
459
460 create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep);
461 }
462
463 /* If this insn modifies memory, add an edge to all insns that access
464 memory. */
465 if (mem_access_insn_p (dest_node->insn))
466 {
467 int j;
468
469 for (j = 0; j <= i; j++)
470 {
471 ddg_node_ptr j_node = &g->nodes[j];
472 if (DEBUG_INSN_P (j_node->insn))
473 continue;
474 if (mem_access_insn_p (j_node->insn))
475 /* Don't bother calculating inter-loop dep if an intra-loop dep
476 already exists. */
477 if (! TEST_BIT (dest_node->successors, j))
478 add_inter_loop_mem_dep (g, dest_node, j_node);
479 }
480 }
481 }
482
483 /* Free the INSN_LISTs. */
484 finish_deps_global ();
485 free_deps (&tmp_deps);
486
487 /* Free dependencies. */
488 sched_free_deps (head, tail, false);
489 }
490
491
492 /* Given a basic block, create its DDG and return a pointer to a variable
493 of ddg type that represents it.
494 Initialize the ddg structure fields to the appropriate values. */
495 ddg_ptr
496 create_ddg (basic_block bb, int closing_branch_deps)
497 {
498 ddg_ptr g;
499 rtx insn, first_note;
500 int i;
501 int num_nodes = 0;
502
503 g = (ddg_ptr) xcalloc (1, sizeof (struct ddg));
504
505 g->bb = bb;
506 g->closing_branch_deps = closing_branch_deps;
507
508 /* Count the number of insns in the BB. */
509 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
510 insn = NEXT_INSN (insn))
511 {
512 if (! INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
513 continue;
514
515 if (DEBUG_INSN_P (insn))
516 g->num_debug++;
517 else
518 {
519 if (mem_read_insn_p (insn))
520 g->num_loads++;
521 if (mem_write_insn_p (insn))
522 g->num_stores++;
523 }
524 num_nodes++;
525 }
526
527 /* There is nothing to do for this BB. */
528 if ((num_nodes - g->num_debug) <= 1)
529 {
530 free (g);
531 return NULL;
532 }
533
534 /* Allocate the nodes array, and initialize the nodes. */
535 g->num_nodes = num_nodes;
536 g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node));
537 g->closing_branch = NULL;
538 i = 0;
539 first_note = NULL_RTX;
540 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
541 insn = NEXT_INSN (insn))
542 {
543 if (! INSN_P (insn))
544 {
545 if (! first_note && NOTE_P (insn)
546 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
547 first_note = insn;
548 continue;
549 }
550 if (JUMP_P (insn))
551 {
552 gcc_assert (!g->closing_branch);
553 g->closing_branch = &g->nodes[i];
554 }
555 else if (GET_CODE (PATTERN (insn)) == USE)
556 {
557 if (! first_note)
558 first_note = insn;
559 continue;
560 }
561
562 g->nodes[i].cuid = i;
563 g->nodes[i].successors = sbitmap_alloc (num_nodes);
564 sbitmap_zero (g->nodes[i].successors);
565 g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
566 sbitmap_zero (g->nodes[i].predecessors);
567 g->nodes[i].first_note = (first_note ? first_note : insn);
568 g->nodes[i++].insn = insn;
569 first_note = NULL_RTX;
570 }
571
572 /* We must have found a branch in DDG. */
573 gcc_assert (g->closing_branch);
574
575
576 /* Build the data dependency graph. */
577 build_intra_loop_deps (g);
578 build_inter_loop_deps (g);
579 return g;
580 }
581
582 /* Free all the memory allocated for the DDG. */
583 void
584 free_ddg (ddg_ptr g)
585 {
586 int i;
587
588 if (!g)
589 return;
590
591 for (i = 0; i < g->num_nodes; i++)
592 {
593 ddg_edge_ptr e = g->nodes[i].out;
594
595 while (e)
596 {
597 ddg_edge_ptr next = e->next_out;
598
599 free (e);
600 e = next;
601 }
602 sbitmap_free (g->nodes[i].successors);
603 sbitmap_free (g->nodes[i].predecessors);
604 }
605 if (g->num_backarcs > 0)
606 free (g->backarcs);
607 free (g->nodes);
608 free (g);
609 }
610
611 void
612 print_ddg_edge (FILE *file, ddg_edge_ptr e)
613 {
614 char dep_c;
615
616 switch (e->type)
617 {
618 case OUTPUT_DEP :
619 dep_c = 'O';
620 break;
621 case ANTI_DEP :
622 dep_c = 'A';
623 break;
624 default:
625 dep_c = 'T';
626 }
627
628 fprintf (file, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e->src->insn),
629 dep_c, e->latency, e->distance, INSN_UID (e->dest->insn));
630 }
631
632 /* Print the DDG nodes with there in/out edges to the dump file. */
633 void
634 print_ddg (FILE *file, ddg_ptr g)
635 {
636 int i;
637
638 for (i = 0; i < g->num_nodes; i++)
639 {
640 ddg_edge_ptr e;
641
642 fprintf (file, "Node num: %d\n", g->nodes[i].cuid);
643 print_rtl_single (file, g->nodes[i].insn);
644 fprintf (file, "OUT ARCS: ");
645 for (e = g->nodes[i].out; e; e = e->next_out)
646 print_ddg_edge (file, e);
647
648 fprintf (file, "\nIN ARCS: ");
649 for (e = g->nodes[i].in; e; e = e->next_in)
650 print_ddg_edge (file, e);
651
652 fprintf (file, "\n");
653 }
654 }
655
656 /* Print the given DDG in VCG format. */
657 void
658 vcg_print_ddg (FILE *file, ddg_ptr g)
659 {
660 int src_cuid;
661
662 fprintf (file, "graph: {\n");
663 for (src_cuid = 0; src_cuid < g->num_nodes; src_cuid++)
664 {
665 ddg_edge_ptr e;
666 int src_uid = INSN_UID (g->nodes[src_cuid].insn);
667
668 fprintf (file, "node: {title: \"%d_%d\" info1: \"", src_cuid, src_uid);
669 print_rtl_single (file, g->nodes[src_cuid].insn);
670 fprintf (file, "\"}\n");
671 for (e = g->nodes[src_cuid].out; e; e = e->next_out)
672 {
673 int dst_uid = INSN_UID (e->dest->insn);
674 int dst_cuid = e->dest->cuid;
675
676 /* Give the backarcs a different color. */
677 if (e->distance > 0)
678 fprintf (file, "backedge: {color: red ");
679 else
680 fprintf (file, "edge: { ");
681
682 fprintf (file, "sourcename: \"%d_%d\" ", src_cuid, src_uid);
683 fprintf (file, "targetname: \"%d_%d\" ", dst_cuid, dst_uid);
684 fprintf (file, "label: \"%d_%d\"}\n", e->latency, e->distance);
685 }
686 }
687 fprintf (file, "}\n");
688 }
689
690 /* Dump the sccs in SCCS. */
691 void
692 print_sccs (FILE *file, ddg_all_sccs_ptr sccs, ddg_ptr g)
693 {
694 unsigned int u = 0;
695 sbitmap_iterator sbi;
696 int i;
697
698 if (!file)
699 return;
700
701 fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs);
702 for (i = 0; i < sccs->num_sccs; i++)
703 {
704 fprintf (file, "SCC number: %d\n", i);
705 EXECUTE_IF_SET_IN_SBITMAP (sccs->sccs[i]->nodes, 0, u, sbi)
706 {
707 fprintf (file, "insn num %d\n", u);
708 print_rtl_single (file, g->nodes[u].insn);
709 }
710 }
711 fprintf (file, "\n");
712 }
713
714 /* Create an edge and initialize it with given values. */
715 static ddg_edge_ptr
716 create_ddg_edge (ddg_node_ptr src, ddg_node_ptr dest,
717 dep_type t, dep_data_type dt, int l, int d)
718 {
719 ddg_edge_ptr e = (ddg_edge_ptr) xmalloc (sizeof (struct ddg_edge));
720
721 e->src = src;
722 e->dest = dest;
723 e->type = t;
724 e->data_type = dt;
725 e->latency = l;
726 e->distance = d;
727 e->next_in = e->next_out = NULL;
728 e->aux.info = 0;
729 return e;
730 }
731
732 /* Add the given edge to the in/out linked lists of the DDG nodes. */
733 static void
734 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
735 {
736 ddg_node_ptr src = e->src;
737 ddg_node_ptr dest = e->dest;
738
739 /* Should have allocated the sbitmaps. */
740 gcc_assert (src->successors && dest->predecessors);
741
742 SET_BIT (src->successors, dest->cuid);
743 SET_BIT (dest->predecessors, src->cuid);
744 e->next_in = dest->in;
745 dest->in = e;
746 e->next_out = src->out;
747 src->out = e;
748 }
749
750
751 \f
752 /* Algorithm for computing the recurrence_length of an scc. We assume at
753 for now that cycles in the data dependence graph contain a single backarc.
754 This simplifies the algorithm, and can be generalized later. */
755 static void
756 set_recurrence_length (ddg_scc_ptr scc, ddg_ptr g)
757 {
758 int j;
759 int result = -1;
760
761 for (j = 0; j < scc->num_backarcs; j++)
762 {
763 ddg_edge_ptr backarc = scc->backarcs[j];
764 int length;
765 int distance = backarc->distance;
766 ddg_node_ptr src = backarc->dest;
767 ddg_node_ptr dest = backarc->src;
768
769 length = longest_simple_path (g, src->cuid, dest->cuid, scc->nodes);
770 if (length < 0 )
771 {
772 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
773 continue;
774 }
775 length += backarc->latency;
776 result = MAX (result, (length / distance));
777 }
778 scc->recurrence_length = result;
779 }
780
781 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
782 and mark edges that belong to this scc as IN_SCC. */
783 static ddg_scc_ptr
784 create_scc (ddg_ptr g, sbitmap nodes)
785 {
786 ddg_scc_ptr scc;
787 unsigned int u = 0;
788 sbitmap_iterator sbi;
789
790 scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc));
791 scc->backarcs = NULL;
792 scc->num_backarcs = 0;
793 scc->nodes = sbitmap_alloc (g->num_nodes);
794 sbitmap_copy (scc->nodes, nodes);
795
796 /* Mark the backarcs that belong to this SCC. */
797 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, u, sbi)
798 {
799 ddg_edge_ptr e;
800 ddg_node_ptr n = &g->nodes[u];
801
802 for (e = n->out; e; e = e->next_out)
803 if (TEST_BIT (nodes, e->dest->cuid))
804 {
805 e->aux.count = IN_SCC;
806 if (e->distance > 0)
807 add_backarc_to_scc (scc, e);
808 }
809 }
810
811 set_recurrence_length (scc, g);
812 return scc;
813 }
814
815 /* Cleans the memory allocation of a given SCC. */
816 static void
817 free_scc (ddg_scc_ptr scc)
818 {
819 if (!scc)
820 return;
821
822 sbitmap_free (scc->nodes);
823 if (scc->num_backarcs > 0)
824 free (scc->backarcs);
825 free (scc);
826 }
827
828
829 /* Add a given edge known to be a backarc to the given DDG. */
830 static void
831 add_backarc_to_ddg (ddg_ptr g, ddg_edge_ptr e)
832 {
833 int size = (g->num_backarcs + 1) * sizeof (ddg_edge_ptr);
834
835 add_edge_to_ddg (g, e);
836 g->backarcs = (ddg_edge_ptr *) xrealloc (g->backarcs, size);
837 g->backarcs[g->num_backarcs++] = e;
838 }
839
840 /* Add backarc to an SCC. */
841 static void
842 add_backarc_to_scc (ddg_scc_ptr scc, ddg_edge_ptr e)
843 {
844 int size = (scc->num_backarcs + 1) * sizeof (ddg_edge_ptr);
845
846 scc->backarcs = (ddg_edge_ptr *) xrealloc (scc->backarcs, size);
847 scc->backarcs[scc->num_backarcs++] = e;
848 }
849
850 /* Add the given SCC to the DDG. */
851 static void
852 add_scc_to_ddg (ddg_all_sccs_ptr g, ddg_scc_ptr scc)
853 {
854 int size = (g->num_sccs + 1) * sizeof (ddg_scc_ptr);
855
856 g->sccs = (ddg_scc_ptr *) xrealloc (g->sccs, size);
857 g->sccs[g->num_sccs++] = scc;
858 }
859
860 /* Given the instruction INSN return the node that represents it. */
861 ddg_node_ptr
862 get_node_of_insn (ddg_ptr g, rtx insn)
863 {
864 int i;
865
866 for (i = 0; i < g->num_nodes; i++)
867 if (insn == g->nodes[i].insn)
868 return &g->nodes[i];
869 return NULL;
870 }
871
872 /* Given a set OPS of nodes in the DDG, find the set of their successors
873 which are not in OPS, and set their bits in SUCC. Bits corresponding to
874 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
875 void
876 find_successors (sbitmap succ, ddg_ptr g, sbitmap ops)
877 {
878 unsigned int i = 0;
879 sbitmap_iterator sbi;
880
881 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
882 {
883 const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
884 sbitmap_a_or_b (succ, succ, node_succ);
885 };
886
887 /* We want those that are not in ops. */
888 sbitmap_difference (succ, succ, ops);
889 }
890
891 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
892 which are not in OPS, and set their bits in PREDS. Bits corresponding to
893 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
894 void
895 find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops)
896 {
897 unsigned int i = 0;
898 sbitmap_iterator sbi;
899
900 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
901 {
902 const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
903 sbitmap_a_or_b (preds, preds, node_preds);
904 };
905
906 /* We want those that are not in ops. */
907 sbitmap_difference (preds, preds, ops);
908 }
909
910
911 /* Compare function to be passed to qsort to order the backarcs in descending
912 recMII order. */
913 static int
914 compare_sccs (const void *s1, const void *s2)
915 {
916 const int rec_l1 = (*(const ddg_scc_ptr *)s1)->recurrence_length;
917 const int rec_l2 = (*(const ddg_scc_ptr *)s2)->recurrence_length;
918 return ((rec_l2 > rec_l1) - (rec_l2 < rec_l1));
919
920 }
921
922 /* Order the backarcs in descending recMII order using compare_sccs. */
923 static void
924 order_sccs (ddg_all_sccs_ptr g)
925 {
926 qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr),
927 (int (*) (const void *, const void *)) compare_sccs);
928 }
929
930 #ifdef ENABLE_CHECKING
931 /* Check that every node in SCCS belongs to exactly one strongly connected
932 component and that no element of SCCS is empty. */
933 static void
934 check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
935 {
936 int i = 0;
937 sbitmap tmp = sbitmap_alloc (num_nodes);
938
939 sbitmap_zero (tmp);
940 for (i = 0; i < sccs->num_sccs; i++)
941 {
942 gcc_assert (!sbitmap_empty_p (sccs->sccs[i]->nodes));
943 /* Verify that every node in sccs is in exactly one strongly
944 connected component. */
945 gcc_assert (!sbitmap_any_common_bits (tmp, sccs->sccs[i]->nodes));
946 sbitmap_a_or_b (tmp, tmp, sccs->sccs[i]->nodes);
947 }
948 sbitmap_free (tmp);
949 }
950 #endif
951
952 /* Perform the Strongly Connected Components decomposing algorithm on the
953 DDG and return DDG_ALL_SCCS structure that contains them. */
954 ddg_all_sccs_ptr
955 create_ddg_all_sccs (ddg_ptr g)
956 {
957 int i;
958 int num_nodes = g->num_nodes;
959 sbitmap from = sbitmap_alloc (num_nodes);
960 sbitmap to = sbitmap_alloc (num_nodes);
961 sbitmap scc_nodes = sbitmap_alloc (num_nodes);
962 ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
963 xmalloc (sizeof (struct ddg_all_sccs));
964
965 sccs->ddg = g;
966 sccs->sccs = NULL;
967 sccs->num_sccs = 0;
968
969 for (i = 0; i < g->num_backarcs; i++)
970 {
971 ddg_scc_ptr scc;
972 ddg_edge_ptr backarc = g->backarcs[i];
973 ddg_node_ptr src = backarc->src;
974 ddg_node_ptr dest = backarc->dest;
975
976 /* If the backarc already belongs to an SCC, continue. */
977 if (backarc->aux.count == IN_SCC)
978 continue;
979
980 sbitmap_zero (scc_nodes);
981 sbitmap_zero (from);
982 sbitmap_zero (to);
983 SET_BIT (from, dest->cuid);
984 SET_BIT (to, src->cuid);
985
986 if (find_nodes_on_paths (scc_nodes, g, from, to))
987 {
988 scc = create_scc (g, scc_nodes);
989 add_scc_to_ddg (sccs, scc);
990 }
991 }
992 order_sccs (sccs);
993 sbitmap_free (from);
994 sbitmap_free (to);
995 sbitmap_free (scc_nodes);
996 #ifdef ENABLE_CHECKING
997 check_sccs (sccs, num_nodes);
998 #endif
999 return sccs;
1000 }
1001
1002 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1003 void
1004 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
1005 {
1006 int i;
1007
1008 if (!all_sccs)
1009 return;
1010
1011 for (i = 0; i < all_sccs->num_sccs; i++)
1012 free_scc (all_sccs->sccs[i]);
1013
1014 free (all_sccs);
1015 }
1016
1017 \f
1018 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1019 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1020 nodes from FROM and TO). Return nonzero if nodes exist. */
1021 int
1022 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
1023 {
1024 int answer;
1025 int change;
1026 unsigned int u = 0;
1027 int num_nodes = g->num_nodes;
1028 sbitmap_iterator sbi;
1029
1030 sbitmap workset = sbitmap_alloc (num_nodes);
1031 sbitmap reachable_from = sbitmap_alloc (num_nodes);
1032 sbitmap reach_to = sbitmap_alloc (num_nodes);
1033 sbitmap tmp = sbitmap_alloc (num_nodes);
1034
1035 sbitmap_copy (reachable_from, from);
1036 sbitmap_copy (tmp, from);
1037
1038 change = 1;
1039 while (change)
1040 {
1041 change = 0;
1042 sbitmap_copy (workset, tmp);
1043 sbitmap_zero (tmp);
1044 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1045 {
1046 ddg_edge_ptr e;
1047 ddg_node_ptr u_node = &g->nodes[u];
1048
1049 for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out)
1050 {
1051 ddg_node_ptr v_node = e->dest;
1052 int v = v_node->cuid;
1053
1054 if (!TEST_BIT (reachable_from, v))
1055 {
1056 SET_BIT (reachable_from, v);
1057 SET_BIT (tmp, v);
1058 change = 1;
1059 }
1060 }
1061 }
1062 }
1063
1064 sbitmap_copy (reach_to, to);
1065 sbitmap_copy (tmp, to);
1066
1067 change = 1;
1068 while (change)
1069 {
1070 change = 0;
1071 sbitmap_copy (workset, tmp);
1072 sbitmap_zero (tmp);
1073 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1074 {
1075 ddg_edge_ptr e;
1076 ddg_node_ptr u_node = &g->nodes[u];
1077
1078 for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in)
1079 {
1080 ddg_node_ptr v_node = e->src;
1081 int v = v_node->cuid;
1082
1083 if (!TEST_BIT (reach_to, v))
1084 {
1085 SET_BIT (reach_to, v);
1086 SET_BIT (tmp, v);
1087 change = 1;
1088 }
1089 }
1090 }
1091 }
1092
1093 answer = sbitmap_a_and_b_cg (result, reachable_from, reach_to);
1094 sbitmap_free (workset);
1095 sbitmap_free (reachable_from);
1096 sbitmap_free (reach_to);
1097 sbitmap_free (tmp);
1098 return answer;
1099 }
1100
1101
1102 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1103 at-least as large as the count of U_NODE plus the latency between them.
1104 Sets a bit in TMP for each successor whose count was changed (increased).
1105 Returns nonzero if any count was changed. */
1106 static int
1107 update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
1108 {
1109 ddg_edge_ptr e;
1110 int result = 0;
1111
1112 for (e = u_node->out; e; e = e->next_out)
1113 {
1114 ddg_node_ptr v_node = e->dest;
1115 int v = v_node->cuid;
1116
1117 if (TEST_BIT (nodes, v)
1118 && (e->distance == 0)
1119 && (v_node->aux.count < u_node->aux.count + e->latency))
1120 {
1121 v_node->aux.count = u_node->aux.count + e->latency;
1122 SET_BIT (tmp, v);
1123 result = 1;
1124 }
1125 }
1126 return result;
1127 }
1128
1129
1130 /* Find the length of a longest path from SRC to DEST in G,
1131 going only through NODES, and disregarding backarcs. */
1132 int
1133 longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
1134 {
1135 int i;
1136 unsigned int u = 0;
1137 int change = 1;
1138 int result;
1139 int num_nodes = g->num_nodes;
1140 sbitmap workset = sbitmap_alloc (num_nodes);
1141 sbitmap tmp = sbitmap_alloc (num_nodes);
1142
1143
1144 /* Data will hold the distance of the longest path found so far from
1145 src to each node. Initialize to -1 = less than minimum. */
1146 for (i = 0; i < g->num_nodes; i++)
1147 g->nodes[i].aux.count = -1;
1148 g->nodes[src].aux.count = 0;
1149
1150 sbitmap_zero (tmp);
1151 SET_BIT (tmp, src);
1152
1153 while (change)
1154 {
1155 sbitmap_iterator sbi;
1156
1157 change = 0;
1158 sbitmap_copy (workset, tmp);
1159 sbitmap_zero (tmp);
1160 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1161 {
1162 ddg_node_ptr u_node = &g->nodes[u];
1163
1164 change |= update_dist_to_successors (u_node, nodes, tmp);
1165 }
1166 }
1167 result = g->nodes[dest].aux.count;
1168 sbitmap_free (workset);
1169 sbitmap_free (tmp);
1170 return result;
1171 }
1172
1173 #endif /* INSN_SCHEDULING */