Daily bump.
[gcc.git] / gcc / ddg.c
1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "regs.h"
32 #include "function.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "except.h"
37 #include "recog.h"
38 #include "sched-int.h"
39 #include "target.h"
40 #include "cfglayout.h"
41 #include "cfgloop.h"
42 #include "sbitmap.h"
43 #include "expr.h"
44 #include "bitmap.h"
45 #include "ddg.h"
46
47 #ifdef INSN_SCHEDULING
48
49 /* A flag indicating that a ddg edge belongs to an SCC or not. */
50 enum edge_flag {NOT_IN_SCC = 0, IN_SCC};
51
52 /* Forward declarations. */
53 static void add_backarc_to_ddg (ddg_ptr, ddg_edge_ptr);
54 static void add_backarc_to_scc (ddg_scc_ptr, ddg_edge_ptr);
55 static void add_scc_to_ddg (ddg_all_sccs_ptr, ddg_scc_ptr);
56 static void create_ddg_dep_from_intra_loop_link (ddg_ptr, ddg_node_ptr,
57 ddg_node_ptr, dep_t);
58 static void create_ddg_dep_no_link (ddg_ptr, ddg_node_ptr, ddg_node_ptr,
59 dep_type, dep_data_type, int);
60 static ddg_edge_ptr create_ddg_edge (ddg_node_ptr, ddg_node_ptr, dep_type,
61 dep_data_type, int, int);
62 static void add_edge_to_ddg (ddg_ptr g, ddg_edge_ptr);
63 \f
64 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
65 static bool mem_ref_p;
66
67 /* Auxiliary function for mem_read_insn_p. */
68 static int
69 mark_mem_use (rtx *x, void *data ATTRIBUTE_UNUSED)
70 {
71 if (MEM_P (*x))
72 mem_ref_p = true;
73 return 0;
74 }
75
76 /* Auxiliary function for mem_read_insn_p. */
77 static void
78 mark_mem_use_1 (rtx *x, void *data)
79 {
80 for_each_rtx (x, mark_mem_use, data);
81 }
82
83 /* Returns nonzero if INSN reads from memory. */
84 static bool
85 mem_read_insn_p (rtx insn)
86 {
87 mem_ref_p = false;
88 note_uses (&PATTERN (insn), mark_mem_use_1, NULL);
89 return mem_ref_p;
90 }
91
92 static void
93 mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
94 {
95 if (MEM_P (loc))
96 mem_ref_p = true;
97 }
98
99 /* Returns nonzero if INSN writes to memory. */
100 static bool
101 mem_write_insn_p (rtx insn)
102 {
103 mem_ref_p = false;
104 note_stores (PATTERN (insn), mark_mem_store, NULL);
105 return mem_ref_p;
106 }
107
108 /* Returns nonzero if X has access to memory. */
109 static bool
110 rtx_mem_access_p (rtx x)
111 {
112 int i, j;
113 const char *fmt;
114 enum rtx_code code;
115
116 if (x == 0)
117 return false;
118
119 if (MEM_P (x))
120 return true;
121
122 code = GET_CODE (x);
123 fmt = GET_RTX_FORMAT (code);
124 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
125 {
126 if (fmt[i] == 'e')
127 {
128 if (rtx_mem_access_p (XEXP (x, i)))
129 return true;
130 }
131 else if (fmt[i] == 'E')
132 for (j = 0; j < XVECLEN (x, i); j++)
133 {
134 if (rtx_mem_access_p (XVECEXP (x, i, j)))
135 return true;
136 }
137 }
138 return false;
139 }
140
141 /* Returns nonzero if INSN reads to or writes from memory. */
142 static bool
143 mem_access_insn_p (rtx insn)
144 {
145 return rtx_mem_access_p (PATTERN (insn));
146 }
147
148 /* Return true if DEF_INSN contains address being auto-inc or auto-dec
149 which is used in USE_INSN. Otherwise return false. The result is
150 being used to decide whether to remove the edge between def_insn and
151 use_insn when -fmodulo-sched-allow-regmoves is set. This function
152 doesn't need to consider the specific address register; no reg_moves
153 will be allowed for any life range defined by def_insn and used
154 by use_insn, if use_insn uses an address register auto-inc'ed by
155 def_insn. */
156 bool
157 autoinc_var_is_used_p (rtx def_insn, rtx use_insn)
158 {
159 rtx note;
160
161 for (note = REG_NOTES (def_insn); note; note = XEXP (note, 1))
162 if (REG_NOTE_KIND (note) == REG_INC
163 && reg_referenced_p (XEXP (note, 0), PATTERN (use_insn)))
164 return true;
165
166 return false;
167 }
168
169 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
170 return false. */
171 static bool
172 def_has_ccmode_p (rtx insn)
173 {
174 df_ref *def;
175
176 for (def = DF_INSN_DEFS (insn); *def; def++)
177 {
178 enum machine_mode mode = GET_MODE (DF_REF_REG (*def));
179
180 if (GET_MODE_CLASS (mode) == MODE_CC)
181 return true;
182 }
183
184 return false;
185 }
186
187 /* Computes the dependence parameters (latency, distance etc.), creates
188 a ddg_edge and adds it to the given DDG. */
189 static void
190 create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
191 ddg_node_ptr dest_node, dep_t link)
192 {
193 ddg_edge_ptr e;
194 int latency, distance = 0;
195 dep_type t = TRUE_DEP;
196 dep_data_type dt = (mem_access_insn_p (src_node->insn)
197 && mem_access_insn_p (dest_node->insn) ? MEM_DEP
198 : REG_DEP);
199 gcc_assert (src_node->cuid < dest_node->cuid);
200 gcc_assert (link);
201
202 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
203 if (DEP_TYPE (link) == REG_DEP_ANTI)
204 t = ANTI_DEP;
205 else if (DEP_TYPE (link) == REG_DEP_OUTPUT)
206 t = OUTPUT_DEP;
207
208 gcc_assert (!DEBUG_INSN_P (dest_node->insn) || t == ANTI_DEP);
209 gcc_assert (!DEBUG_INSN_P (src_node->insn) || t == ANTI_DEP);
210
211 /* We currently choose not to create certain anti-deps edges and
212 compensate for that by generating reg-moves based on the life-range
213 analysis. The anti-deps that will be deleted are the ones which
214 have true-deps edges in the opposite direction (in other words
215 the kernel has only one def of the relevant register).
216 If the address that is being auto-inc or auto-dec in DEST_NODE
217 is used in SRC_NODE then do not remove the edge to make sure
218 reg-moves will not be created for this address.
219 TODO: support the removal of all anti-deps edges, i.e. including those
220 whose register has multiple defs in the loop. */
221 if (flag_modulo_sched_allow_regmoves
222 && (t == ANTI_DEP && dt == REG_DEP)
223 && !def_has_ccmode_p (dest_node->insn)
224 && !autoinc_var_is_used_p (dest_node->insn, src_node->insn))
225 {
226 rtx set;
227
228 set = single_set (dest_node->insn);
229 /* TODO: Handle registers that REG_P is not true for them, i.e.
230 subregs and special registers. */
231 if (set && REG_P (SET_DEST (set)))
232 {
233 int regno = REGNO (SET_DEST (set));
234 df_ref first_def;
235 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
236
237 first_def = df_bb_regno_first_def_find (g->bb, regno);
238 gcc_assert (first_def);
239
240 if (bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)))
241 return;
242 }
243 }
244
245 latency = dep_cost (link);
246 e = create_ddg_edge (src_node, dest_node, t, dt, latency, distance);
247 add_edge_to_ddg (g, e);
248 }
249
250 /* The same as the above function, but it doesn't require a link parameter. */
251 static void
252 create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
253 dep_type d_t, dep_data_type d_dt, int distance)
254 {
255 ddg_edge_ptr e;
256 int l;
257 enum reg_note dep_kind;
258 struct _dep _dep, *dep = &_dep;
259
260 gcc_assert (!DEBUG_INSN_P (to->insn) || d_t == ANTI_DEP);
261 gcc_assert (!DEBUG_INSN_P (from->insn) || d_t == ANTI_DEP);
262
263 if (d_t == ANTI_DEP)
264 dep_kind = REG_DEP_ANTI;
265 else if (d_t == OUTPUT_DEP)
266 dep_kind = REG_DEP_OUTPUT;
267 else
268 {
269 gcc_assert (d_t == TRUE_DEP);
270
271 dep_kind = REG_DEP_TRUE;
272 }
273
274 init_dep (dep, from->insn, to->insn, dep_kind);
275
276 l = dep_cost (dep);
277
278 e = create_ddg_edge (from, to, d_t, d_dt, l, distance);
279 if (distance > 0)
280 add_backarc_to_ddg (g, e);
281 else
282 add_edge_to_ddg (g, e);
283 }
284
285
286 /* Given a downwards exposed register def LAST_DEF (which is the last
287 definition of that register in the bb), add inter-loop true dependences
288 to all its uses in the next iteration, an output dependence to the
289 first def of the same register (possibly itself) in the next iteration
290 and anti-dependences from its uses in the current iteration to the
291 first definition in the next iteration. */
292 static void
293 add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
294 {
295 int regno = DF_REF_REGNO (last_def);
296 struct df_link *r_use;
297 int has_use_in_bb_p = false;
298 rtx def_insn = DF_REF_INSN (last_def);
299 ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn);
300 ddg_node_ptr use_node;
301 #ifdef ENABLE_CHECKING
302 struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
303 #endif
304 df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
305
306 gcc_assert (last_def_node);
307 gcc_assert (first_def);
308
309 #ifdef ENABLE_CHECKING
310 if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
311 gcc_assert (!bitmap_bit_p (&bb_info->gen,
312 DF_REF_ID (first_def)));
313 #endif
314
315 /* Create inter-loop true dependences and anti dependences. */
316 for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
317 {
318 rtx use_insn = DF_REF_INSN (r_use->ref);
319
320 if (BLOCK_FOR_INSN (use_insn) != g->bb)
321 continue;
322
323 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
324 use_node = get_node_of_insn (g, use_insn);
325 gcc_assert (use_node);
326 has_use_in_bb_p = true;
327 if (use_node->cuid <= last_def_node->cuid)
328 {
329 /* Add true deps from last_def to it's uses in the next
330 iteration. Any such upwards exposed use appears before
331 the last_def def. */
332 create_ddg_dep_no_link (g, last_def_node, use_node,
333 DEBUG_INSN_P (use_insn) ? ANTI_DEP : TRUE_DEP,
334 REG_DEP, 1);
335 }
336 else if (!DEBUG_INSN_P (use_insn))
337 {
338 /* Add anti deps from last_def's uses in the current iteration
339 to the first def in the next iteration. We do not add ANTI
340 dep when there is an intra-loop TRUE dep in the opposite
341 direction, but use regmoves to fix such disregarded ANTI
342 deps when broken. If the first_def reaches the USE then
343 there is such a dep. */
344 ddg_node_ptr first_def_node = get_node_of_insn (g,
345 DF_REF_INSN (first_def));
346
347 gcc_assert (first_def_node);
348
349 /* Always create the edge if the use node is a branch in
350 order to prevent the creation of reg-moves.
351 If the address that is being auto-inc or auto-dec in LAST_DEF
352 is used in USE_INSN then do not remove the edge to make sure
353 reg-moves will not be created for that address. */
354 if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
355 || !flag_modulo_sched_allow_regmoves
356 || JUMP_P (use_node->insn)
357 || autoinc_var_is_used_p (DF_REF_INSN (last_def), use_insn)
358 || def_has_ccmode_p (DF_REF_INSN (last_def)))
359 create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
360 REG_DEP, 1);
361
362 }
363 }
364 /* Create an inter-loop output dependence between LAST_DEF (which is the
365 last def in its block, being downwards exposed) and the first def in
366 its block. Avoid creating a self output dependence. Avoid creating
367 an output dependence if there is a dependence path between the two
368 defs starting with a true dependence to a use which can be in the
369 next iteration; followed by an anti dependence of that use to the
370 first def (i.e. if there is a use between the two defs.) */
371 if (!has_use_in_bb_p)
372 {
373 ddg_node_ptr dest_node;
374
375 if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
376 return;
377
378 dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
379 gcc_assert (dest_node);
380 create_ddg_dep_no_link (g, last_def_node, dest_node,
381 OUTPUT_DEP, REG_DEP, 1);
382 }
383 }
384 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
385 static void
386 build_inter_loop_deps (ddg_ptr g)
387 {
388 unsigned rd_num;
389 struct df_rd_bb_info *rd_bb_info;
390 bitmap_iterator bi;
391
392 rd_bb_info = DF_RD_BB_INFO (g->bb);
393
394 /* Find inter-loop register output, true and anti deps. */
395 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info->gen, 0, rd_num, bi)
396 {
397 df_ref rd = DF_DEFS_GET (rd_num);
398
399 add_cross_iteration_register_deps (g, rd);
400 }
401 }
402
403
404 static int
405 walk_mems_2 (rtx *x, rtx mem)
406 {
407 if (MEM_P (*x))
408 {
409 if (may_alias_p (*x, mem))
410 return 1;
411
412 return -1;
413 }
414 return 0;
415 }
416
417 static int
418 walk_mems_1 (rtx *x, rtx *pat)
419 {
420 if (MEM_P (*x))
421 {
422 /* Visit all MEMs in *PAT and check indepedence. */
423 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
424 /* Indicate that dependence was determined and stop traversal. */
425 return 1;
426
427 return -1;
428 }
429 return 0;
430 }
431
432 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
433 static int
434 insns_may_alias_p (rtx insn1, rtx insn2)
435 {
436 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
437 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
438 &PATTERN (insn2));
439 }
440
441 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
442 to ddg G. */
443 static void
444 add_intra_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
445 {
446
447 if ((from->cuid == to->cuid)
448 || !insns_may_alias_p (from->insn, to->insn))
449 /* Do not create edge if memory references have disjoint alias sets
450 or 'to' and 'from' are the same instruction. */
451 return;
452
453 if (mem_write_insn_p (from->insn))
454 {
455 if (mem_read_insn_p (to->insn))
456 create_ddg_dep_no_link (g, from, to,
457 DEBUG_INSN_P (to->insn)
458 ? ANTI_DEP : TRUE_DEP, MEM_DEP, 0);
459 else
460 create_ddg_dep_no_link (g, from, to,
461 DEBUG_INSN_P (to->insn)
462 ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 0);
463 }
464 else if (!mem_read_insn_p (to->insn))
465 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 0);
466 }
467
468 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
469 to ddg G. */
470 static void
471 add_inter_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
472 {
473 if (!insns_may_alias_p (from->insn, to->insn))
474 /* Do not create edge if memory references have disjoint alias sets. */
475 return;
476
477 if (mem_write_insn_p (from->insn))
478 {
479 if (mem_read_insn_p (to->insn))
480 create_ddg_dep_no_link (g, from, to,
481 DEBUG_INSN_P (to->insn)
482 ? ANTI_DEP : TRUE_DEP, MEM_DEP, 1);
483 else if (from->cuid != to->cuid)
484 create_ddg_dep_no_link (g, from, to,
485 DEBUG_INSN_P (to->insn)
486 ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 1);
487 }
488 else
489 {
490 if (mem_read_insn_p (to->insn))
491 return;
492 else if (from->cuid != to->cuid)
493 {
494 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 1);
495 if (DEBUG_INSN_P (from->insn) || DEBUG_INSN_P (to->insn))
496 create_ddg_dep_no_link (g, to, from, ANTI_DEP, MEM_DEP, 1);
497 else
498 create_ddg_dep_no_link (g, to, from, TRUE_DEP, MEM_DEP, 1);
499 }
500 }
501
502 }
503
504 /* Perform intra-block Data Dependency analysis and connect the nodes in
505 the DDG. We assume the loop has a single basic block. */
506 static void
507 build_intra_loop_deps (ddg_ptr g)
508 {
509 int i;
510 /* Hold the dependency analysis state during dependency calculations. */
511 struct deps_desc tmp_deps;
512 rtx head, tail;
513
514 /* Build the dependence information, using the sched_analyze function. */
515 init_deps_global ();
516 init_deps (&tmp_deps, false);
517
518 /* Do the intra-block data dependence analysis for the given block. */
519 get_ebb_head_tail (g->bb, g->bb, &head, &tail);
520 sched_analyze (&tmp_deps, head, tail);
521
522 /* Build intra-loop data dependencies using the scheduler dependency
523 analysis. */
524 for (i = 0; i < g->num_nodes; i++)
525 {
526 ddg_node_ptr dest_node = &g->nodes[i];
527 sd_iterator_def sd_it;
528 dep_t dep;
529
530 if (! INSN_P (dest_node->insn))
531 continue;
532
533 FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep)
534 {
535 ddg_node_ptr src_node = get_node_of_insn (g, DEP_PRO (dep));
536
537 if (!src_node)
538 continue;
539
540 create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep);
541 }
542
543 /* If this insn modifies memory, add an edge to all insns that access
544 memory. */
545 if (mem_access_insn_p (dest_node->insn))
546 {
547 int j;
548
549 for (j = 0; j <= i; j++)
550 {
551 ddg_node_ptr j_node = &g->nodes[j];
552 if (DEBUG_INSN_P (j_node->insn))
553 continue;
554 if (mem_access_insn_p (j_node->insn))
555 {
556 /* Don't bother calculating inter-loop dep if an intra-loop dep
557 already exists. */
558 if (! TEST_BIT (dest_node->successors, j))
559 add_inter_loop_mem_dep (g, dest_node, j_node);
560 /* If -fmodulo-sched-allow-regmoves
561 is set certain anti-dep edges are not created.
562 It might be that these anti-dep edges are on the
563 path from one memory instruction to another such that
564 removing these edges could cause a violation of the
565 memory dependencies. Thus we add intra edges between
566 every two memory instructions in this case. */
567 if (flag_modulo_sched_allow_regmoves
568 && !TEST_BIT (dest_node->predecessors, j))
569 add_intra_loop_mem_dep (g, j_node, dest_node);
570 }
571 }
572 }
573 }
574
575 /* Free the INSN_LISTs. */
576 finish_deps_global ();
577 free_deps (&tmp_deps);
578
579 /* Free dependencies. */
580 sched_free_deps (head, tail, false);
581 }
582
583
584 /* Given a basic block, create its DDG and return a pointer to a variable
585 of ddg type that represents it.
586 Initialize the ddg structure fields to the appropriate values. */
587 ddg_ptr
588 create_ddg (basic_block bb, int closing_branch_deps)
589 {
590 ddg_ptr g;
591 rtx insn, first_note;
592 int i;
593 int num_nodes = 0;
594
595 g = (ddg_ptr) xcalloc (1, sizeof (struct ddg));
596
597 g->bb = bb;
598 g->closing_branch_deps = closing_branch_deps;
599
600 /* Count the number of insns in the BB. */
601 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
602 insn = NEXT_INSN (insn))
603 {
604 if (! INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
605 continue;
606
607 if (DEBUG_INSN_P (insn))
608 g->num_debug++;
609 else
610 {
611 if (mem_read_insn_p (insn))
612 g->num_loads++;
613 if (mem_write_insn_p (insn))
614 g->num_stores++;
615 }
616 num_nodes++;
617 }
618
619 /* There is nothing to do for this BB. */
620 if ((num_nodes - g->num_debug) <= 1)
621 {
622 free (g);
623 return NULL;
624 }
625
626 /* Allocate the nodes array, and initialize the nodes. */
627 g->num_nodes = num_nodes;
628 g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node));
629 g->closing_branch = NULL;
630 i = 0;
631 first_note = NULL_RTX;
632 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
633 insn = NEXT_INSN (insn))
634 {
635 if (! INSN_P (insn))
636 {
637 if (! first_note && NOTE_P (insn)
638 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
639 first_note = insn;
640 continue;
641 }
642 if (JUMP_P (insn))
643 {
644 gcc_assert (!g->closing_branch);
645 g->closing_branch = &g->nodes[i];
646 }
647 else if (GET_CODE (PATTERN (insn)) == USE)
648 {
649 if (! first_note)
650 first_note = insn;
651 continue;
652 }
653
654 g->nodes[i].cuid = i;
655 g->nodes[i].successors = sbitmap_alloc (num_nodes);
656 sbitmap_zero (g->nodes[i].successors);
657 g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
658 sbitmap_zero (g->nodes[i].predecessors);
659 g->nodes[i].first_note = (first_note ? first_note : insn);
660 g->nodes[i++].insn = insn;
661 first_note = NULL_RTX;
662 }
663
664 /* We must have found a branch in DDG. */
665 gcc_assert (g->closing_branch);
666
667
668 /* Build the data dependency graph. */
669 build_intra_loop_deps (g);
670 build_inter_loop_deps (g);
671 return g;
672 }
673
674 /* Free all the memory allocated for the DDG. */
675 void
676 free_ddg (ddg_ptr g)
677 {
678 int i;
679
680 if (!g)
681 return;
682
683 for (i = 0; i < g->num_nodes; i++)
684 {
685 ddg_edge_ptr e = g->nodes[i].out;
686
687 while (e)
688 {
689 ddg_edge_ptr next = e->next_out;
690
691 free (e);
692 e = next;
693 }
694 sbitmap_free (g->nodes[i].successors);
695 sbitmap_free (g->nodes[i].predecessors);
696 }
697 if (g->num_backarcs > 0)
698 free (g->backarcs);
699 free (g->nodes);
700 free (g);
701 }
702
703 void
704 print_ddg_edge (FILE *file, ddg_edge_ptr e)
705 {
706 char dep_c;
707
708 switch (e->type)
709 {
710 case OUTPUT_DEP :
711 dep_c = 'O';
712 break;
713 case ANTI_DEP :
714 dep_c = 'A';
715 break;
716 default:
717 dep_c = 'T';
718 }
719
720 fprintf (file, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e->src->insn),
721 dep_c, e->latency, e->distance, INSN_UID (e->dest->insn));
722 }
723
724 /* Print the DDG nodes with there in/out edges to the dump file. */
725 void
726 print_ddg (FILE *file, ddg_ptr g)
727 {
728 int i;
729
730 for (i = 0; i < g->num_nodes; i++)
731 {
732 ddg_edge_ptr e;
733
734 fprintf (file, "Node num: %d\n", g->nodes[i].cuid);
735 print_rtl_single (file, g->nodes[i].insn);
736 fprintf (file, "OUT ARCS: ");
737 for (e = g->nodes[i].out; e; e = e->next_out)
738 print_ddg_edge (file, e);
739
740 fprintf (file, "\nIN ARCS: ");
741 for (e = g->nodes[i].in; e; e = e->next_in)
742 print_ddg_edge (file, e);
743
744 fprintf (file, "\n");
745 }
746 }
747
748 /* Print the given DDG in VCG format. */
749 void
750 vcg_print_ddg (FILE *file, ddg_ptr g)
751 {
752 int src_cuid;
753
754 fprintf (file, "graph: {\n");
755 for (src_cuid = 0; src_cuid < g->num_nodes; src_cuid++)
756 {
757 ddg_edge_ptr e;
758 int src_uid = INSN_UID (g->nodes[src_cuid].insn);
759
760 fprintf (file, "node: {title: \"%d_%d\" info1: \"", src_cuid, src_uid);
761 print_rtl_single (file, g->nodes[src_cuid].insn);
762 fprintf (file, "\"}\n");
763 for (e = g->nodes[src_cuid].out; e; e = e->next_out)
764 {
765 int dst_uid = INSN_UID (e->dest->insn);
766 int dst_cuid = e->dest->cuid;
767
768 /* Give the backarcs a different color. */
769 if (e->distance > 0)
770 fprintf (file, "backedge: {color: red ");
771 else
772 fprintf (file, "edge: { ");
773
774 fprintf (file, "sourcename: \"%d_%d\" ", src_cuid, src_uid);
775 fprintf (file, "targetname: \"%d_%d\" ", dst_cuid, dst_uid);
776 fprintf (file, "label: \"%d_%d\"}\n", e->latency, e->distance);
777 }
778 }
779 fprintf (file, "}\n");
780 }
781
782 /* Dump the sccs in SCCS. */
783 void
784 print_sccs (FILE *file, ddg_all_sccs_ptr sccs, ddg_ptr g)
785 {
786 unsigned int u = 0;
787 sbitmap_iterator sbi;
788 int i;
789
790 if (!file)
791 return;
792
793 fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs);
794 for (i = 0; i < sccs->num_sccs; i++)
795 {
796 fprintf (file, "SCC number: %d\n", i);
797 EXECUTE_IF_SET_IN_SBITMAP (sccs->sccs[i]->nodes, 0, u, sbi)
798 {
799 fprintf (file, "insn num %d\n", u);
800 print_rtl_single (file, g->nodes[u].insn);
801 }
802 }
803 fprintf (file, "\n");
804 }
805
806 /* Create an edge and initialize it with given values. */
807 static ddg_edge_ptr
808 create_ddg_edge (ddg_node_ptr src, ddg_node_ptr dest,
809 dep_type t, dep_data_type dt, int l, int d)
810 {
811 ddg_edge_ptr e = (ddg_edge_ptr) xmalloc (sizeof (struct ddg_edge));
812
813 e->src = src;
814 e->dest = dest;
815 e->type = t;
816 e->data_type = dt;
817 e->latency = l;
818 e->distance = d;
819 e->next_in = e->next_out = NULL;
820 e->aux.info = 0;
821 return e;
822 }
823
824 /* Add the given edge to the in/out linked lists of the DDG nodes. */
825 static void
826 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
827 {
828 ddg_node_ptr src = e->src;
829 ddg_node_ptr dest = e->dest;
830
831 /* Should have allocated the sbitmaps. */
832 gcc_assert (src->successors && dest->predecessors);
833
834 SET_BIT (src->successors, dest->cuid);
835 SET_BIT (dest->predecessors, src->cuid);
836 e->next_in = dest->in;
837 dest->in = e;
838 e->next_out = src->out;
839 src->out = e;
840 }
841
842
843 \f
844 /* Algorithm for computing the recurrence_length of an scc. We assume at
845 for now that cycles in the data dependence graph contain a single backarc.
846 This simplifies the algorithm, and can be generalized later. */
847 static void
848 set_recurrence_length (ddg_scc_ptr scc, ddg_ptr g)
849 {
850 int j;
851 int result = -1;
852
853 for (j = 0; j < scc->num_backarcs; j++)
854 {
855 ddg_edge_ptr backarc = scc->backarcs[j];
856 int length;
857 int distance = backarc->distance;
858 ddg_node_ptr src = backarc->dest;
859 ddg_node_ptr dest = backarc->src;
860
861 length = longest_simple_path (g, src->cuid, dest->cuid, scc->nodes);
862 if (length < 0 )
863 {
864 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
865 continue;
866 }
867 length += backarc->latency;
868 result = MAX (result, (length / distance));
869 }
870 scc->recurrence_length = result;
871 }
872
873 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
874 and mark edges that belong to this scc as IN_SCC. */
875 static ddg_scc_ptr
876 create_scc (ddg_ptr g, sbitmap nodes)
877 {
878 ddg_scc_ptr scc;
879 unsigned int u = 0;
880 sbitmap_iterator sbi;
881
882 scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc));
883 scc->backarcs = NULL;
884 scc->num_backarcs = 0;
885 scc->nodes = sbitmap_alloc (g->num_nodes);
886 sbitmap_copy (scc->nodes, nodes);
887
888 /* Mark the backarcs that belong to this SCC. */
889 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, u, sbi)
890 {
891 ddg_edge_ptr e;
892 ddg_node_ptr n = &g->nodes[u];
893
894 for (e = n->out; e; e = e->next_out)
895 if (TEST_BIT (nodes, e->dest->cuid))
896 {
897 e->aux.count = IN_SCC;
898 if (e->distance > 0)
899 add_backarc_to_scc (scc, e);
900 }
901 }
902
903 set_recurrence_length (scc, g);
904 return scc;
905 }
906
907 /* Cleans the memory allocation of a given SCC. */
908 static void
909 free_scc (ddg_scc_ptr scc)
910 {
911 if (!scc)
912 return;
913
914 sbitmap_free (scc->nodes);
915 if (scc->num_backarcs > 0)
916 free (scc->backarcs);
917 free (scc);
918 }
919
920
921 /* Add a given edge known to be a backarc to the given DDG. */
922 static void
923 add_backarc_to_ddg (ddg_ptr g, ddg_edge_ptr e)
924 {
925 int size = (g->num_backarcs + 1) * sizeof (ddg_edge_ptr);
926
927 add_edge_to_ddg (g, e);
928 g->backarcs = (ddg_edge_ptr *) xrealloc (g->backarcs, size);
929 g->backarcs[g->num_backarcs++] = e;
930 }
931
932 /* Add backarc to an SCC. */
933 static void
934 add_backarc_to_scc (ddg_scc_ptr scc, ddg_edge_ptr e)
935 {
936 int size = (scc->num_backarcs + 1) * sizeof (ddg_edge_ptr);
937
938 scc->backarcs = (ddg_edge_ptr *) xrealloc (scc->backarcs, size);
939 scc->backarcs[scc->num_backarcs++] = e;
940 }
941
942 /* Add the given SCC to the DDG. */
943 static void
944 add_scc_to_ddg (ddg_all_sccs_ptr g, ddg_scc_ptr scc)
945 {
946 int size = (g->num_sccs + 1) * sizeof (ddg_scc_ptr);
947
948 g->sccs = (ddg_scc_ptr *) xrealloc (g->sccs, size);
949 g->sccs[g->num_sccs++] = scc;
950 }
951
952 /* Given the instruction INSN return the node that represents it. */
953 ddg_node_ptr
954 get_node_of_insn (ddg_ptr g, rtx insn)
955 {
956 int i;
957
958 for (i = 0; i < g->num_nodes; i++)
959 if (insn == g->nodes[i].insn)
960 return &g->nodes[i];
961 return NULL;
962 }
963
964 /* Given a set OPS of nodes in the DDG, find the set of their successors
965 which are not in OPS, and set their bits in SUCC. Bits corresponding to
966 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
967 void
968 find_successors (sbitmap succ, ddg_ptr g, sbitmap ops)
969 {
970 unsigned int i = 0;
971 sbitmap_iterator sbi;
972
973 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
974 {
975 const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
976 sbitmap_a_or_b (succ, succ, node_succ);
977 };
978
979 /* We want those that are not in ops. */
980 sbitmap_difference (succ, succ, ops);
981 }
982
983 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
984 which are not in OPS, and set their bits in PREDS. Bits corresponding to
985 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
986 void
987 find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops)
988 {
989 unsigned int i = 0;
990 sbitmap_iterator sbi;
991
992 EXECUTE_IF_SET_IN_SBITMAP (ops, 0, i, sbi)
993 {
994 const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
995 sbitmap_a_or_b (preds, preds, node_preds);
996 };
997
998 /* We want those that are not in ops. */
999 sbitmap_difference (preds, preds, ops);
1000 }
1001
1002
1003 /* Compare function to be passed to qsort to order the backarcs in descending
1004 recMII order. */
1005 static int
1006 compare_sccs (const void *s1, const void *s2)
1007 {
1008 const int rec_l1 = (*(const ddg_scc_ptr *)s1)->recurrence_length;
1009 const int rec_l2 = (*(const ddg_scc_ptr *)s2)->recurrence_length;
1010 return ((rec_l2 > rec_l1) - (rec_l2 < rec_l1));
1011
1012 }
1013
1014 /* Order the backarcs in descending recMII order using compare_sccs. */
1015 static void
1016 order_sccs (ddg_all_sccs_ptr g)
1017 {
1018 qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr),
1019 (int (*) (const void *, const void *)) compare_sccs);
1020 }
1021
1022 #ifdef ENABLE_CHECKING
1023 /* Check that every node in SCCS belongs to exactly one strongly connected
1024 component and that no element of SCCS is empty. */
1025 static void
1026 check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
1027 {
1028 int i = 0;
1029 sbitmap tmp = sbitmap_alloc (num_nodes);
1030
1031 sbitmap_zero (tmp);
1032 for (i = 0; i < sccs->num_sccs; i++)
1033 {
1034 gcc_assert (!sbitmap_empty_p (sccs->sccs[i]->nodes));
1035 /* Verify that every node in sccs is in exactly one strongly
1036 connected component. */
1037 gcc_assert (!sbitmap_any_common_bits (tmp, sccs->sccs[i]->nodes));
1038 sbitmap_a_or_b (tmp, tmp, sccs->sccs[i]->nodes);
1039 }
1040 sbitmap_free (tmp);
1041 }
1042 #endif
1043
1044 /* Perform the Strongly Connected Components decomposing algorithm on the
1045 DDG and return DDG_ALL_SCCS structure that contains them. */
1046 ddg_all_sccs_ptr
1047 create_ddg_all_sccs (ddg_ptr g)
1048 {
1049 int i;
1050 int num_nodes = g->num_nodes;
1051 sbitmap from = sbitmap_alloc (num_nodes);
1052 sbitmap to = sbitmap_alloc (num_nodes);
1053 sbitmap scc_nodes = sbitmap_alloc (num_nodes);
1054 ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
1055 xmalloc (sizeof (struct ddg_all_sccs));
1056
1057 sccs->ddg = g;
1058 sccs->sccs = NULL;
1059 sccs->num_sccs = 0;
1060
1061 for (i = 0; i < g->num_backarcs; i++)
1062 {
1063 ddg_scc_ptr scc;
1064 ddg_edge_ptr backarc = g->backarcs[i];
1065 ddg_node_ptr src = backarc->src;
1066 ddg_node_ptr dest = backarc->dest;
1067
1068 /* If the backarc already belongs to an SCC, continue. */
1069 if (backarc->aux.count == IN_SCC)
1070 continue;
1071
1072 sbitmap_zero (scc_nodes);
1073 sbitmap_zero (from);
1074 sbitmap_zero (to);
1075 SET_BIT (from, dest->cuid);
1076 SET_BIT (to, src->cuid);
1077
1078 if (find_nodes_on_paths (scc_nodes, g, from, to))
1079 {
1080 scc = create_scc (g, scc_nodes);
1081 add_scc_to_ddg (sccs, scc);
1082 }
1083 }
1084 order_sccs (sccs);
1085 sbitmap_free (from);
1086 sbitmap_free (to);
1087 sbitmap_free (scc_nodes);
1088 #ifdef ENABLE_CHECKING
1089 check_sccs (sccs, num_nodes);
1090 #endif
1091 return sccs;
1092 }
1093
1094 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1095 void
1096 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
1097 {
1098 int i;
1099
1100 if (!all_sccs)
1101 return;
1102
1103 for (i = 0; i < all_sccs->num_sccs; i++)
1104 free_scc (all_sccs->sccs[i]);
1105
1106 free (all_sccs->sccs);
1107 free (all_sccs);
1108 }
1109
1110 \f
1111 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1112 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1113 nodes from FROM and TO). Return nonzero if nodes exist. */
1114 int
1115 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
1116 {
1117 int answer;
1118 int change;
1119 unsigned int u = 0;
1120 int num_nodes = g->num_nodes;
1121 sbitmap_iterator sbi;
1122
1123 sbitmap workset = sbitmap_alloc (num_nodes);
1124 sbitmap reachable_from = sbitmap_alloc (num_nodes);
1125 sbitmap reach_to = sbitmap_alloc (num_nodes);
1126 sbitmap tmp = sbitmap_alloc (num_nodes);
1127
1128 sbitmap_copy (reachable_from, from);
1129 sbitmap_copy (tmp, from);
1130
1131 change = 1;
1132 while (change)
1133 {
1134 change = 0;
1135 sbitmap_copy (workset, tmp);
1136 sbitmap_zero (tmp);
1137 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1138 {
1139 ddg_edge_ptr e;
1140 ddg_node_ptr u_node = &g->nodes[u];
1141
1142 for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out)
1143 {
1144 ddg_node_ptr v_node = e->dest;
1145 int v = v_node->cuid;
1146
1147 if (!TEST_BIT (reachable_from, v))
1148 {
1149 SET_BIT (reachable_from, v);
1150 SET_BIT (tmp, v);
1151 change = 1;
1152 }
1153 }
1154 }
1155 }
1156
1157 sbitmap_copy (reach_to, to);
1158 sbitmap_copy (tmp, to);
1159
1160 change = 1;
1161 while (change)
1162 {
1163 change = 0;
1164 sbitmap_copy (workset, tmp);
1165 sbitmap_zero (tmp);
1166 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1167 {
1168 ddg_edge_ptr e;
1169 ddg_node_ptr u_node = &g->nodes[u];
1170
1171 for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in)
1172 {
1173 ddg_node_ptr v_node = e->src;
1174 int v = v_node->cuid;
1175
1176 if (!TEST_BIT (reach_to, v))
1177 {
1178 SET_BIT (reach_to, v);
1179 SET_BIT (tmp, v);
1180 change = 1;
1181 }
1182 }
1183 }
1184 }
1185
1186 answer = sbitmap_a_and_b_cg (result, reachable_from, reach_to);
1187 sbitmap_free (workset);
1188 sbitmap_free (reachable_from);
1189 sbitmap_free (reach_to);
1190 sbitmap_free (tmp);
1191 return answer;
1192 }
1193
1194
1195 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1196 at-least as large as the count of U_NODE plus the latency between them.
1197 Sets a bit in TMP for each successor whose count was changed (increased).
1198 Returns nonzero if any count was changed. */
1199 static int
1200 update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
1201 {
1202 ddg_edge_ptr e;
1203 int result = 0;
1204
1205 for (e = u_node->out; e; e = e->next_out)
1206 {
1207 ddg_node_ptr v_node = e->dest;
1208 int v = v_node->cuid;
1209
1210 if (TEST_BIT (nodes, v)
1211 && (e->distance == 0)
1212 && (v_node->aux.count < u_node->aux.count + e->latency))
1213 {
1214 v_node->aux.count = u_node->aux.count + e->latency;
1215 SET_BIT (tmp, v);
1216 result = 1;
1217 }
1218 }
1219 return result;
1220 }
1221
1222
1223 /* Find the length of a longest path from SRC to DEST in G,
1224 going only through NODES, and disregarding backarcs. */
1225 int
1226 longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
1227 {
1228 int i;
1229 unsigned int u = 0;
1230 int change = 1;
1231 int result;
1232 int num_nodes = g->num_nodes;
1233 sbitmap workset = sbitmap_alloc (num_nodes);
1234 sbitmap tmp = sbitmap_alloc (num_nodes);
1235
1236
1237 /* Data will hold the distance of the longest path found so far from
1238 src to each node. Initialize to -1 = less than minimum. */
1239 for (i = 0; i < g->num_nodes; i++)
1240 g->nodes[i].aux.count = -1;
1241 g->nodes[src].aux.count = 0;
1242
1243 sbitmap_zero (tmp);
1244 SET_BIT (tmp, src);
1245
1246 while (change)
1247 {
1248 sbitmap_iterator sbi;
1249
1250 change = 0;
1251 sbitmap_copy (workset, tmp);
1252 sbitmap_zero (tmp);
1253 EXECUTE_IF_SET_IN_SBITMAP (workset, 0, u, sbi)
1254 {
1255 ddg_node_ptr u_node = &g->nodes[u];
1256
1257 change |= update_dist_to_successors (u_node, nodes, tmp);
1258 }
1259 }
1260 result = g->nodes[dest].aux.count;
1261 sbitmap_free (workset);
1262 sbitmap_free (tmp);
1263 return result;
1264 }
1265
1266 #endif /* INSN_SCHEDULING */