[ARM/AArch64][testsuite] Add vmull tests.
[gcc.git] / gcc / ipa-utils.c
1 /* Utilities for ipa analysis.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "input.h"
40 #include "function.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "tree-inline.h"
50 #include "dumpfile.h"
51 #include "langhooks.h"
52 #include "splay-tree.h"
53 #include "hash-map.h"
54 #include "plugin-api.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "ipa-utils.h"
58 #include "bitmap.h"
59 #include "ipa-reference.h"
60 #include "flags.h"
61 #include "diagnostic.h"
62 #include "langhooks.h"
63 #include "lto-streamer.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "ipa-prop.h"
67 #include "ipa-inline.h"
68
69 /* Debugging function for postorder and inorder code. NOTE is a string
70 that is printed before the nodes are printed. ORDER is an array of
71 cgraph_nodes that has COUNT useful nodes in it. */
72
73 void
74 ipa_print_order (FILE* out,
75 const char * note,
76 struct cgraph_node** order,
77 int count)
78 {
79 int i;
80 fprintf (out, "\n\n ordered call graph: %s\n", note);
81
82 for (i = count - 1; i >= 0; i--)
83 order[i]->dump (out);
84 fprintf (out, "\n");
85 fflush (out);
86 }
87
88
89 struct searchc_env {
90 struct cgraph_node **stack;
91 int stack_size;
92 struct cgraph_node **result;
93 int order_pos;
94 splay_tree nodes_marked_new;
95 bool reduce;
96 bool allow_overwritable;
97 int count;
98 };
99
100 /* This is an implementation of Tarjan's strongly connected region
101 finder as reprinted in Aho Hopcraft and Ullman's The Design and
102 Analysis of Computer Programs (1975) pages 192-193. This version
103 has been customized for cgraph_nodes. The env parameter is because
104 it is recursive and there are no nested functions here. This
105 function should only be called from itself or
106 ipa_reduced_postorder. ENV is a stack env and would be
107 unnecessary if C had nested functions. V is the node to start
108 searching from. */
109
110 static void
111 searchc (struct searchc_env* env, struct cgraph_node *v,
112 bool (*ignore_edge) (struct cgraph_edge *))
113 {
114 struct cgraph_edge *edge;
115 struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->aux;
116
117 /* mark node as old */
118 v_info->new_node = false;
119 splay_tree_remove (env->nodes_marked_new, v->uid);
120
121 v_info->dfn_number = env->count;
122 v_info->low_link = env->count;
123 env->count++;
124 env->stack[(env->stack_size)++] = v;
125 v_info->on_stack = true;
126
127 for (edge = v->callees; edge; edge = edge->next_callee)
128 {
129 struct ipa_dfs_info * w_info;
130 enum availability avail;
131 struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
132
133 if (!w || (ignore_edge && ignore_edge (edge)))
134 continue;
135
136 if (w->aux
137 && (avail > AVAIL_INTERPOSABLE
138 || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
139 {
140 w_info = (struct ipa_dfs_info *) w->aux;
141 if (w_info->new_node)
142 {
143 searchc (env, w, ignore_edge);
144 v_info->low_link =
145 (v_info->low_link < w_info->low_link) ?
146 v_info->low_link : w_info->low_link;
147 }
148 else
149 if ((w_info->dfn_number < v_info->dfn_number)
150 && (w_info->on_stack))
151 v_info->low_link =
152 (w_info->dfn_number < v_info->low_link) ?
153 w_info->dfn_number : v_info->low_link;
154 }
155 }
156
157
158 if (v_info->low_link == v_info->dfn_number)
159 {
160 struct cgraph_node *last = NULL;
161 struct cgraph_node *x;
162 struct ipa_dfs_info *x_info;
163 do {
164 x = env->stack[--(env->stack_size)];
165 x_info = (struct ipa_dfs_info *) x->aux;
166 x_info->on_stack = false;
167 x_info->scc_no = v_info->dfn_number;
168
169 if (env->reduce)
170 {
171 x_info->next_cycle = last;
172 last = x;
173 }
174 else
175 env->result[env->order_pos++] = x;
176 }
177 while (v != x);
178 if (env->reduce)
179 env->result[env->order_pos++] = v;
180 }
181 }
182
183 /* Topsort the call graph by caller relation. Put the result in ORDER.
184
185 The REDUCE flag is true if you want the cycles reduced to single nodes.
186 You can use ipa_get_nodes_in_cycle to obtain a vector containing all real
187 call graph nodes in a reduced node.
188
189 Set ALLOW_OVERWRITABLE if nodes with such availability should be included.
190 IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant
191 for the topological sort. */
192
193 int
194 ipa_reduced_postorder (struct cgraph_node **order,
195 bool reduce, bool allow_overwritable,
196 bool (*ignore_edge) (struct cgraph_edge *))
197 {
198 struct cgraph_node *node;
199 struct searchc_env env;
200 splay_tree_node result;
201 env.stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
202 env.stack_size = 0;
203 env.result = order;
204 env.order_pos = 0;
205 env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0);
206 env.count = 1;
207 env.reduce = reduce;
208 env.allow_overwritable = allow_overwritable;
209
210 FOR_EACH_DEFINED_FUNCTION (node)
211 {
212 enum availability avail = node->get_availability ();
213
214 if (avail > AVAIL_INTERPOSABLE
215 || (allow_overwritable
216 && (avail == AVAIL_INTERPOSABLE)))
217 {
218 /* Reuse the info if it is already there. */
219 struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
220 if (!info)
221 info = XCNEW (struct ipa_dfs_info);
222 info->new_node = true;
223 info->on_stack = false;
224 info->next_cycle = NULL;
225 node->aux = info;
226
227 splay_tree_insert (env.nodes_marked_new,
228 (splay_tree_key)node->uid,
229 (splay_tree_value)node);
230 }
231 else
232 node->aux = NULL;
233 }
234 result = splay_tree_min (env.nodes_marked_new);
235 while (result)
236 {
237 node = (struct cgraph_node *)result->value;
238 searchc (&env, node, ignore_edge);
239 result = splay_tree_min (env.nodes_marked_new);
240 }
241 splay_tree_delete (env.nodes_marked_new);
242 free (env.stack);
243
244 return env.order_pos;
245 }
246
247 /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call
248 graph nodes. */
249
250 void
251 ipa_free_postorder_info (void)
252 {
253 struct cgraph_node *node;
254 FOR_EACH_DEFINED_FUNCTION (node)
255 {
256 /* Get rid of the aux information. */
257 if (node->aux)
258 {
259 free (node->aux);
260 node->aux = NULL;
261 }
262 }
263 }
264
265 /* Get the set of nodes for the cycle in the reduced call graph starting
266 from NODE. */
267
268 vec<cgraph_node *>
269 ipa_get_nodes_in_cycle (struct cgraph_node *node)
270 {
271 vec<cgraph_node *> v = vNULL;
272 struct ipa_dfs_info *node_dfs_info;
273 while (node)
274 {
275 v.safe_push (node);
276 node_dfs_info = (struct ipa_dfs_info *) node->aux;
277 node = node_dfs_info->next_cycle;
278 }
279 return v;
280 }
281
282 /* Return true iff the CS is an edge within a strongly connected component as
283 computed by ipa_reduced_postorder. */
284
285 bool
286 ipa_edge_within_scc (struct cgraph_edge *cs)
287 {
288 struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
289 struct ipa_dfs_info *callee_dfs;
290 struct cgraph_node *callee = cs->callee->function_symbol ();
291
292 callee_dfs = (struct ipa_dfs_info *) callee->aux;
293 return (caller_dfs
294 && callee_dfs
295 && caller_dfs->scc_no == callee_dfs->scc_no);
296 }
297
298 struct postorder_stack
299 {
300 struct cgraph_node *node;
301 struct cgraph_edge *edge;
302 int ref;
303 };
304
305 /* Fill array order with all nodes with output flag set in the reverse
306 topological order. Return the number of elements in the array.
307 FIXME: While walking, consider aliases, too. */
308
309 int
310 ipa_reverse_postorder (struct cgraph_node **order)
311 {
312 struct cgraph_node *node, *node2;
313 int stack_size = 0;
314 int order_pos = 0;
315 struct cgraph_edge *edge;
316 int pass;
317 struct ipa_ref *ref = NULL;
318
319 struct postorder_stack *stack =
320 XCNEWVEC (struct postorder_stack, symtab->cgraph_count);
321
322 /* We have to deal with cycles nicely, so use a depth first traversal
323 output algorithm. Ignore the fact that some functions won't need
324 to be output and put them into order as well, so we get dependencies
325 right through inline functions. */
326 FOR_EACH_FUNCTION (node)
327 node->aux = NULL;
328 for (pass = 0; pass < 2; pass++)
329 FOR_EACH_FUNCTION (node)
330 if (!node->aux
331 && (pass
332 || (!node->address_taken
333 && !node->global.inlined_to
334 && !node->alias && !node->thunk.thunk_p
335 && !node->only_called_directly_p ())))
336 {
337 stack_size = 0;
338 stack[stack_size].node = node;
339 stack[stack_size].edge = node->callers;
340 stack[stack_size].ref = 0;
341 node->aux = (void *)(size_t)1;
342 while (stack_size >= 0)
343 {
344 while (true)
345 {
346 node2 = NULL;
347 while (stack[stack_size].edge && !node2)
348 {
349 edge = stack[stack_size].edge;
350 node2 = edge->caller;
351 stack[stack_size].edge = edge->next_caller;
352 /* Break possible cycles involving always-inline
353 functions by ignoring edges from always-inline
354 functions to non-always-inline functions. */
355 if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
356 && !DECL_DISREGARD_INLINE_LIMITS
357 (edge->callee->function_symbol ()->decl))
358 node2 = NULL;
359 }
360 for (; stack[stack_size].node->iterate_referring (
361 stack[stack_size].ref,
362 ref) && !node2;
363 stack[stack_size].ref++)
364 {
365 if (ref->use == IPA_REF_ALIAS)
366 node2 = dyn_cast <cgraph_node *> (ref->referring);
367 }
368 if (!node2)
369 break;
370 if (!node2->aux)
371 {
372 stack[++stack_size].node = node2;
373 stack[stack_size].edge = node2->callers;
374 stack[stack_size].ref = 0;
375 node2->aux = (void *)(size_t)1;
376 }
377 }
378 order[order_pos++] = stack[stack_size--].node;
379 }
380 }
381 free (stack);
382 FOR_EACH_FUNCTION (node)
383 node->aux = NULL;
384 return order_pos;
385 }
386
387
388
389 /* Given a memory reference T, will return the variable at the bottom
390 of the access. Unlike get_base_address, this will recurse through
391 INDIRECT_REFS. */
392
393 tree
394 get_base_var (tree t)
395 {
396 while (!SSA_VAR_P (t)
397 && (!CONSTANT_CLASS_P (t))
398 && TREE_CODE (t) != LABEL_DECL
399 && TREE_CODE (t) != FUNCTION_DECL
400 && TREE_CODE (t) != CONST_DECL
401 && TREE_CODE (t) != CONSTRUCTOR)
402 {
403 t = TREE_OPERAND (t, 0);
404 }
405 return t;
406 }
407
408
409 /* SRC and DST are going to be merged. Take SRC's profile and merge it into
410 DST so it is not going to be lost. Destroy SRC's body on the way. */
411
412 void
413 ipa_merge_profiles (struct cgraph_node *dst,
414 struct cgraph_node *src)
415 {
416 tree oldsrcdecl = src->decl;
417 struct function *srccfun, *dstcfun;
418 bool match = true;
419
420 if (!src->definition
421 || !dst->definition)
422 return;
423 if (src->frequency < dst->frequency)
424 src->frequency = dst->frequency;
425
426 /* Time profiles are merged. */
427 if (dst->tp_first_run > src->tp_first_run && src->tp_first_run)
428 dst->tp_first_run = src->tp_first_run;
429
430 if (src->profile_id && !dst->profile_id)
431 dst->profile_id = src->profile_id;
432
433 if (!dst->count)
434 return;
435 if (symtab->dump_file)
436 {
437 fprintf (symtab->dump_file, "Merging profiles of %s/%i to %s/%i\n",
438 xstrdup_for_dump (src->name ()), src->order,
439 xstrdup_for_dump (dst->name ()), dst->order);
440 }
441 dst->count += src->count;
442
443 /* This is ugly. We need to get both function bodies into memory.
444 If declaration is merged, we need to duplicate it to be able
445 to load body that is being replaced. This makes symbol table
446 temporarily inconsistent. */
447 if (src->decl == dst->decl)
448 {
449 struct lto_in_decl_state temp;
450 struct lto_in_decl_state *state;
451
452 /* We are going to move the decl, we want to remove its file decl data.
453 and link these with the new decl. */
454 temp.fn_decl = src->decl;
455 lto_in_decl_state **slot
456 = src->lto_file_data->function_decl_states->find_slot (&temp,
457 NO_INSERT);
458 state = *slot;
459 src->lto_file_data->function_decl_states->clear_slot (slot);
460 gcc_assert (state);
461
462 /* Duplicate the decl and be sure it does not link into body of DST. */
463 src->decl = copy_node (src->decl);
464 DECL_STRUCT_FUNCTION (src->decl) = NULL;
465 DECL_ARGUMENTS (src->decl) = NULL;
466 DECL_INITIAL (src->decl) = NULL;
467 DECL_RESULT (src->decl) = NULL;
468
469 /* Associate the decl state with new declaration, so LTO streamer
470 can look it up. */
471 state->fn_decl = src->decl;
472 slot
473 = src->lto_file_data->function_decl_states->find_slot (state, INSERT);
474 gcc_assert (!*slot);
475 *slot = state;
476 }
477 src->get_untransformed_body ();
478 dst->get_untransformed_body ();
479 srccfun = DECL_STRUCT_FUNCTION (src->decl);
480 dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
481 if (n_basic_blocks_for_fn (srccfun)
482 != n_basic_blocks_for_fn (dstcfun))
483 {
484 if (symtab->dump_file)
485 fprintf (symtab->dump_file,
486 "Giving up; number of basic block mismatch.\n");
487 match = false;
488 }
489 else if (last_basic_block_for_fn (srccfun)
490 != last_basic_block_for_fn (dstcfun))
491 {
492 if (symtab->dump_file)
493 fprintf (symtab->dump_file,
494 "Giving up; last block mismatch.\n");
495 match = false;
496 }
497 else
498 {
499 basic_block srcbb, dstbb;
500
501 FOR_ALL_BB_FN (srcbb, srccfun)
502 {
503 unsigned int i;
504
505 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
506 if (dstbb == NULL)
507 {
508 if (symtab->dump_file)
509 fprintf (symtab->dump_file,
510 "No matching block for bb %i.\n",
511 srcbb->index);
512 match = false;
513 break;
514 }
515 if (EDGE_COUNT (srcbb->succs) != EDGE_COUNT (dstbb->succs))
516 {
517 if (symtab->dump_file)
518 fprintf (symtab->dump_file,
519 "Edge count mistmatch for bb %i.\n",
520 srcbb->index);
521 match = false;
522 break;
523 }
524 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
525 {
526 edge srce = EDGE_SUCC (srcbb, i);
527 edge dste = EDGE_SUCC (dstbb, i);
528 if (srce->dest->index != dste->dest->index)
529 {
530 if (symtab->dump_file)
531 fprintf (symtab->dump_file,
532 "Succ edge mistmatch for bb %i.\n",
533 srce->dest->index);
534 match = false;
535 break;
536 }
537 }
538 }
539 }
540 if (match)
541 {
542 struct cgraph_edge *e, *e2;
543 basic_block srcbb, dstbb;
544
545 /* TODO: merge also statement histograms. */
546 FOR_ALL_BB_FN (srcbb, srccfun)
547 {
548 unsigned int i;
549
550 dstbb = BASIC_BLOCK_FOR_FN (dstcfun, srcbb->index);
551 dstbb->count += srcbb->count;
552 for (i = 0; i < EDGE_COUNT (srcbb->succs); i++)
553 {
554 edge srce = EDGE_SUCC (srcbb, i);
555 edge dste = EDGE_SUCC (dstbb, i);
556 dste->count += srce->count;
557 }
558 }
559 push_cfun (dstcfun);
560 counts_to_freqs ();
561 compute_function_frequency ();
562 pop_cfun ();
563 for (e = dst->callees; e; e = e->next_callee)
564 {
565 if (e->speculative)
566 continue;
567 e->count = gimple_bb (e->call_stmt)->count;
568 e->frequency = compute_call_stmt_bb_frequency
569 (dst->decl,
570 gimple_bb (e->call_stmt));
571 }
572 for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
573 e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
574 {
575 gcov_type count = gimple_bb (e->call_stmt)->count;
576 int freq = compute_call_stmt_bb_frequency
577 (dst->decl,
578 gimple_bb (e->call_stmt));
579 /* When call is speculative, we need to re-distribute probabilities
580 the same way as they was. This is not really correct because
581 in the other copy the speculation may differ; but probably it
582 is not really worth the effort. */
583 if (e->speculative)
584 {
585 cgraph_edge *direct, *indirect;
586 cgraph_edge *direct2 = NULL, *indirect2 = NULL;
587 ipa_ref *ref;
588
589 e->speculative_call_info (direct, indirect, ref);
590 gcc_assert (e == indirect);
591 if (e2 && e2->speculative)
592 e2->speculative_call_info (direct2, indirect2, ref);
593 if (indirect->count || direct->count)
594 {
595 /* We should mismatch earlier if there is no matching
596 indirect edge. */
597 if (!e2)
598 {
599 if (dump_file)
600 fprintf (dump_file,
601 "Mismatch in merging indirect edges\n");
602 }
603 else if (!e2->speculative)
604 indirect->count += e2->count;
605 else if (e2->speculative)
606 {
607 if (DECL_ASSEMBLER_NAME (direct2->callee->decl)
608 != DECL_ASSEMBLER_NAME (direct->callee->decl))
609 {
610 if (direct2->count >= direct->count)
611 {
612 direct->redirect_callee (direct2->callee);
613 indirect->count += indirect2->count
614 + direct->count;
615 direct->count = direct2->count;
616 }
617 else
618 indirect->count += indirect2->count + direct2->count;
619 }
620 else
621 {
622 direct->count += direct2->count;
623 indirect->count += indirect2->count;
624 }
625 }
626 int prob = RDIV (direct->count * REG_BR_PROB_BASE ,
627 direct->count + indirect->count);
628 direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
629 indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
630 REG_BR_PROB_BASE);
631 }
632 else
633 /* At the moment we should have only profile feedback based
634 speculations when merging. */
635 gcc_unreachable ();
636 }
637 else if (e2->speculative)
638 {
639 cgraph_edge *direct, *indirect;
640 ipa_ref *ref;
641
642 e2->speculative_call_info (direct, indirect, ref);
643 e->count = count;
644 e->frequency = freq;
645 int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
646 e->make_speculative (direct->callee, direct->count,
647 RDIV (freq * prob, REG_BR_PROB_BASE));
648 }
649 else
650 {
651 e->count = count;
652 e->frequency = freq;
653 }
654 }
655 src->release_body ();
656 inline_update_overall_summary (dst);
657 }
658 /* TODO: if there is no match, we can scale up. */
659 src->decl = oldsrcdecl;
660 }
661
662 /* Return true if call to DEST is known to be self-recusive call withing FUNC. */
663
664 bool
665 recursive_call_p (tree func, tree dest)
666 {
667 struct cgraph_node *dest_node = cgraph_node::get_create (dest);
668 struct cgraph_node *cnode = cgraph_node::get_create (func);
669
670 return dest_node->semantically_equivalent_p (cnode);
671 }