cgraph.c (cgraph_turn_edge_to_speculative): Return newly introduced edge; fix typo...
[gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "cgraph.h"
25 #include "tree-pass.h"
26 #include "gimple.h"
27 #include "ggc.h"
28 #include "flags.h"
29 #include "pointer-set.h"
30 #include "target.h"
31 #include "tree-iterator.h"
32 #include "ipa-utils.h"
33 #include "pointer-set.h"
34 #include "ipa-inline.h"
35 #include "hash-table.h"
36 #include "tree-inline.h"
37 #include "profile.h"
38 #include "params.h"
39 #include "lto-streamer.h"
40 #include "data-streamer.h"
41 #include "value-prof.h"
42
43 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
44
45 static bool
46 cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
47 {
48 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
49 return !(cgraph_only_called_directly_or_aliased_p (node)
50 && !ipa_ref_has_aliases_p (&node->symbol.ref_list)
51 && node->symbol.definition
52 && !DECL_EXTERNAL (node->symbol.decl)
53 && !node->symbol.externally_visible
54 && !node->symbol.used_from_other_partition
55 && !node->symbol.in_other_partition);
56 }
57
58 /* Return true when function can be marked local. */
59
60 static bool
61 cgraph_local_node_p (struct cgraph_node *node)
62 {
63 struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
64
65 /* FIXME: thunks can be considered local, but we need prevent i386
66 from attempting to change calling convention of them. */
67 if (n->thunk.thunk_p)
68 return false;
69 return !cgraph_for_node_and_aliases (n,
70 cgraph_non_local_node_p_1, NULL, true);
71
72 }
73
74 /* Return true when NODE has ADDR reference. */
75
76 static bool
77 has_addr_references_p (struct cgraph_node *node,
78 void *data ATTRIBUTE_UNUSED)
79 {
80 int i;
81 struct ipa_ref *ref;
82
83 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
84 i, ref); i++)
85 if (ref->use == IPA_REF_ADDR)
86 return true;
87 return false;
88 }
89
90 /* Look for all functions inlined to NODE and update their inlined_to pointers
91 to INLINED_TO. */
92
93 static void
94 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
95 {
96 struct cgraph_edge *e;
97 for (e = node->callees; e; e = e->next_callee)
98 if (e->callee->global.inlined_to)
99 {
100 e->callee->global.inlined_to = inlined_to;
101 update_inlined_to_pointer (e->callee, inlined_to);
102 }
103 }
104
105 /* Add symtab NODE to queue starting at FIRST.
106
107 The queue is linked via AUX pointers and terminated by pointer to 1.
108 We enqueue nodes at two occasions: when we find them reachable or when we find
109 their bodies needed for further clonning. In the second case we mark them
110 by pointer to 2 after processing so they are re-queue when they become
111 reachable. */
112
113 static void
114 enqueue_node (symtab_node node, symtab_node *first,
115 struct pointer_set_t *reachable)
116 {
117 /* Node is still in queue; do nothing. */
118 if (node->symbol.aux && node->symbol.aux != (void *) 2)
119 return;
120 /* Node was already processed as unreachable, re-enqueue
121 only if it became reachable now. */
122 if (node->symbol.aux == (void *)2 && !pointer_set_contains (reachable, node))
123 return;
124 node->symbol.aux = *first;
125 *first = node;
126 }
127
128 /* Process references. */
129
130 static void
131 process_references (struct ipa_ref_list *list,
132 symtab_node *first,
133 bool before_inlining_p,
134 struct pointer_set_t *reachable)
135 {
136 int i;
137 struct ipa_ref *ref;
138 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
139 {
140 symtab_node node = ref->referred;
141
142 if (node->symbol.definition && !node->symbol.in_other_partition
143 && ((!DECL_EXTERNAL (node->symbol.decl) || node->symbol.alias)
144 || (before_inlining_p
145 /* We use variable constructors during late complation for
146 constant folding. Keep references alive so partitioning
147 knows about potential references. */
148 || (TREE_CODE (node->symbol.decl) == VAR_DECL
149 && flag_wpa
150 && ctor_for_folding (node->symbol.decl)
151 != error_mark_node))))
152 pointer_set_insert (reachable, node);
153 enqueue_node ((symtab_node) node, first, reachable);
154 }
155 }
156
157
158 /* Perform reachability analysis and reclaim all unreachable nodes.
159
160 The algorithm is basically mark&sweep but with some extra refinements:
161
162 - reachable extern inline functions needs special handling; the bodies needs
163 to stay in memory until inlining in hope that they will be inlined.
164 After inlining we release their bodies and turn them into unanalyzed
165 nodes even when they are reachable.
166
167 BEFORE_INLINING_P specify whether we are before or after inlining.
168
169 - virtual functions are kept in callgraph even if they seem unreachable in
170 hope calls to them will be devirtualized.
171
172 Again we remove them after inlining. In late optimization some
173 devirtualization may happen, but it is not importnat since we won't inline
174 the call. In theory early opts and IPA should work out all important cases.
175
176 - virtual clones needs bodies of their origins for later materialization;
177 this means that we want to keep the body even if the origin is unreachable
178 otherwise. To avoid origin from sitting in the callgraph and being
179 walked by IPA passes, we turn them into unanalyzed nodes with body
180 defined.
181
182 We maintain set of function declaration where body needs to stay in
183 body_needed_for_clonning
184
185 Inline clones represent special case: their declaration match the
186 declaration of origin and cgraph_remove_node already knows how to
187 reshape callgraph and preserve body when offline copy of function or
188 inline clone is being removed.
189
190 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
191 variables with DECL_INITIAL set. We finalize these and keep reachable
192 ones around for constant folding purposes. After inlining we however
193 stop walking their references to let everything static referneced by them
194 to be removed when it is otherwise unreachable.
195
196 We maintain queue of both reachable symbols (i.e. defined symbols that needs
197 to stay) and symbols that are in boundary (i.e. external symbols referenced
198 by reachable symbols or origins of clones). The queue is represented
199 as linked list by AUX pointer terminated by 1.
200
201 A the end we keep all reachable symbols. For symbols in boundary we always
202 turn definition into a declaration, but we may keep function body around
203 based on body_needed_for_clonning
204
205 All symbols that enter the queue have AUX pointer non-zero and are in the
206 boundary. Pointer set REACHABLE is used to track reachable symbols.
207
208 Every symbol can be visited twice - once as part of boundary and once
209 as real reachable symbol. enqueue_node needs to decide whether the
210 node needs to be re-queued for second processing. For this purpose
211 we set AUX pointer of processed symbols in the boundary to constant 2. */
212
213 bool
214 symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
215 {
216 symtab_node first = (symtab_node) (void *) 1;
217 struct cgraph_node *node, *next;
218 struct varpool_node *vnode, *vnext;
219 bool changed = false;
220 struct pointer_set_t *reachable = pointer_set_create ();
221 struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
222
223 #ifdef ENABLE_CHECKING
224 verify_symtab ();
225 #endif
226 if (file)
227 fprintf (file, "\nReclaiming functions:");
228 #ifdef ENABLE_CHECKING
229 FOR_EACH_FUNCTION (node)
230 gcc_assert (!node->symbol.aux);
231 FOR_EACH_VARIABLE (vnode)
232 gcc_assert (!vnode->symbol.aux);
233 #endif
234 /* Mark functions whose bodies are obviously needed.
235 This is mostly when they can be referenced externally. Inline clones
236 are special since their declarations are shared with master clone and thus
237 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
238 FOR_EACH_FUNCTION (node)
239 {
240 node->used_as_abstract_origin = false;
241 if (node->symbol.definition
242 && !node->global.inlined_to
243 && !node->symbol.in_other_partition
244 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
245 /* Keep around virtual functions for possible devirtualization. */
246 || (before_inlining_p
247 && DECL_VIRTUAL_P (node->symbol.decl))))
248 {
249 gcc_assert (!node->global.inlined_to);
250 pointer_set_insert (reachable, node);
251 enqueue_node ((symtab_node)node, &first, reachable);
252 }
253 else
254 gcc_assert (!node->symbol.aux);
255 }
256
257 /* Mark variables that are obviously needed. */
258 FOR_EACH_DEFINED_VARIABLE (vnode)
259 if (!varpool_can_remove_if_no_refs (vnode)
260 && !vnode->symbol.in_other_partition)
261 {
262 pointer_set_insert (reachable, vnode);
263 enqueue_node ((symtab_node)vnode, &first, reachable);
264 }
265
266 /* Perform reachability analysis. */
267 while (first != (symtab_node) (void *) 1)
268 {
269 bool in_boundary_p = !pointer_set_contains (reachable, first);
270 symtab_node node = first;
271
272 first = (symtab_node)first->symbol.aux;
273
274 /* If we are processing symbol in boundary, mark its AUX pointer for
275 possible later re-processing in enqueue_node. */
276 if (in_boundary_p)
277 node->symbol.aux = (void *)2;
278 else
279 {
280 if (DECL_ABSTRACT_ORIGIN (node->symbol.decl))
281 {
282 struct cgraph_node *origin_node
283 = cgraph_get_create_real_symbol_node (DECL_ABSTRACT_ORIGIN (node->symbol.decl));
284 origin_node->used_as_abstract_origin = true;
285 enqueue_node ((symtab_node) origin_node, &first, reachable);
286 }
287 /* If any symbol in a comdat group is reachable, force
288 all other in the same comdat group to be also reachable. */
289 if (node->symbol.same_comdat_group)
290 {
291 symtab_node next;
292 for (next = node->symbol.same_comdat_group;
293 next != node;
294 next = next->symbol.same_comdat_group)
295 if (!pointer_set_insert (reachable, next))
296 enqueue_node ((symtab_node) next, &first, reachable);
297 }
298 /* Mark references as reachable. */
299 process_references (&node->symbol.ref_list, &first,
300 before_inlining_p, reachable);
301 }
302
303 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
304 {
305 /* Mark the callees reachable unless they are direct calls to extern
306 inline functions we decided to not inline. */
307 if (!in_boundary_p)
308 {
309 struct cgraph_edge *e;
310 for (e = cnode->callees; e; e = e->next_callee)
311 {
312 if (e->callee->symbol.definition
313 && !e->callee->symbol.in_other_partition
314 && (!e->inline_failed
315 || !DECL_EXTERNAL (e->callee->symbol.decl)
316 || e->callee->symbol.alias
317 || before_inlining_p))
318 pointer_set_insert (reachable, e->callee);
319 enqueue_node ((symtab_node) e->callee, &first, reachable);
320 }
321
322 /* When inline clone exists, mark body to be preserved so when removing
323 offline copy of the function we don't kill it. */
324 if (cnode->global.inlined_to)
325 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
326
327 /* For non-inline clones, force their origins to the boundary and ensure
328 that body is not removed. */
329 while (cnode->clone_of)
330 {
331 bool noninline = cnode->clone_of->symbol.decl != cnode->symbol.decl;
332 cnode = cnode->clone_of;
333 if (noninline)
334 {
335 pointer_set_insert (body_needed_for_clonning, cnode->symbol.decl);
336 enqueue_node ((symtab_node)cnode, &first, reachable);
337 }
338 }
339 }
340 }
341 /* When we see constructor of external variable, keep referred nodes in the
342 boundary. This will also hold initializers of the external vars NODE
343 refers to. */
344 varpool_node *vnode = dyn_cast <varpool_node> (node);
345 if (vnode
346 && DECL_EXTERNAL (node->symbol.decl)
347 && !vnode->symbol.alias
348 && in_boundary_p)
349 {
350 struct ipa_ref *ref;
351 for (int i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
352 enqueue_node (ref->referred, &first, reachable);
353 }
354 }
355
356 /* Remove unreachable functions. */
357 for (node = cgraph_first_function (); node; node = next)
358 {
359 next = cgraph_next_function (node);
360
361 /* If node is not needed at all, remove it. */
362 if (!node->symbol.aux)
363 {
364 if (file)
365 fprintf (file, " %s", cgraph_node_name (node));
366 cgraph_remove_node (node);
367 changed = true;
368 }
369 /* If node is unreachable, remove its body. */
370 else if (!pointer_set_contains (reachable, node))
371 {
372 if (!pointer_set_contains (body_needed_for_clonning, node->symbol.decl))
373 cgraph_release_function_body (node);
374 else if (!node->clone_of)
375 gcc_assert (in_lto_p || DECL_RESULT (node->symbol.decl));
376 if (node->symbol.definition)
377 {
378 if (file)
379 fprintf (file, " %s", cgraph_node_name (node));
380 node->symbol.analyzed = false;
381 node->symbol.definition = false;
382 node->symbol.cpp_implicit_alias = false;
383 node->symbol.alias = false;
384 node->symbol.weakref = false;
385 if (!node->symbol.in_other_partition)
386 node->local.local = false;
387 cgraph_node_remove_callees (node);
388 ipa_remove_all_references (&node->symbol.ref_list);
389 changed = true;
390 }
391 }
392 else
393 gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
394 || in_lto_p || DECL_RESULT (node->symbol.decl));
395 }
396
397 /* Inline clones might be kept around so their materializing allows further
398 cloning. If the function the clone is inlined into is removed, we need
399 to turn it into normal cone. */
400 FOR_EACH_FUNCTION (node)
401 {
402 if (node->global.inlined_to
403 && !node->callers)
404 {
405 gcc_assert (node->clones);
406 node->global.inlined_to = NULL;
407 update_inlined_to_pointer (node, node);
408 }
409 node->symbol.aux = NULL;
410 }
411
412 /* Remove unreachable variables. */
413 if (file)
414 fprintf (file, "\nReclaiming variables:");
415 for (vnode = varpool_first_variable (); vnode; vnode = vnext)
416 {
417 vnext = varpool_next_variable (vnode);
418 if (!vnode->symbol.aux
419 /* For can_refer_decl_in_current_unit_p we want to track for
420 all external variables if they are defined in other partition
421 or not. */
422 && (!flag_ltrans || !DECL_EXTERNAL (vnode->symbol.decl)))
423 {
424 if (file)
425 fprintf (file, " %s", varpool_node_name (vnode));
426 varpool_remove_node (vnode);
427 changed = true;
428 }
429 else if (!pointer_set_contains (reachable, vnode))
430 {
431 tree init;
432 if (vnode->symbol.definition)
433 {
434 if (file)
435 fprintf (file, " %s", varpool_node_name (vnode));
436 changed = true;
437 }
438 vnode->symbol.definition = false;
439 vnode->symbol.analyzed = false;
440 vnode->symbol.aux = NULL;
441
442 /* Keep body if it may be useful for constant folding. */
443 if ((init = ctor_for_folding (vnode->symbol.decl)) == error_mark_node)
444 varpool_remove_initializer (vnode);
445 else
446 DECL_INITIAL (vnode->symbol.decl) = init;
447 ipa_remove_all_references (&vnode->symbol.ref_list);
448 }
449 else
450 vnode->symbol.aux = NULL;
451 }
452
453 pointer_set_destroy (reachable);
454 pointer_set_destroy (body_needed_for_clonning);
455
456 /* Now update address_taken flags and try to promote functions to be local. */
457 if (file)
458 fprintf (file, "\nClearing address taken flags:");
459 FOR_EACH_DEFINED_FUNCTION (node)
460 if (node->symbol.address_taken
461 && !node->symbol.used_from_other_partition)
462 {
463 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
464 {
465 if (file)
466 fprintf (file, " %s", cgraph_node_name (node));
467 node->symbol.address_taken = false;
468 changed = true;
469 if (cgraph_local_node_p (node))
470 {
471 node->local.local = true;
472 if (file)
473 fprintf (file, " (local)");
474 }
475 }
476 }
477 if (file)
478 fprintf (file, "\n");
479
480 #ifdef ENABLE_CHECKING
481 verify_symtab ();
482 #endif
483
484 /* If we removed something, perhaps profile could be improved. */
485 if (changed && optimize && inline_edge_summary_vec.exists ())
486 FOR_EACH_DEFINED_FUNCTION (node)
487 cgraph_propagate_frequency (node);
488
489 return changed;
490 }
491
492 /* Discover variables that have no longer address taken or that are read only
493 and update their flags.
494
495 FIXME: This can not be done in between gimplify and omp_expand since
496 readonly flag plays role on what is shared and what is not. Currently we do
497 this transformation as part of whole program visibility and re-do at
498 ipa-reference pass (to take into account clonning), but it would
499 make sense to do it before early optimizations. */
500
501 void
502 ipa_discover_readonly_nonaddressable_vars (void)
503 {
504 struct varpool_node *vnode;
505 if (dump_file)
506 fprintf (dump_file, "Clearing variable flags:");
507 FOR_EACH_VARIABLE (vnode)
508 if (vnode->symbol.definition && varpool_all_refs_explicit_p (vnode)
509 && (TREE_ADDRESSABLE (vnode->symbol.decl)
510 || !TREE_READONLY (vnode->symbol.decl)))
511 {
512 bool written = false;
513 bool address_taken = false;
514 int i;
515 struct ipa_ref *ref;
516 for (i = 0; ipa_ref_list_referring_iterate (&vnode->symbol.ref_list,
517 i, ref)
518 && (!written || !address_taken); i++)
519 switch (ref->use)
520 {
521 case IPA_REF_ADDR:
522 address_taken = true;
523 break;
524 case IPA_REF_LOAD:
525 break;
526 case IPA_REF_STORE:
527 written = true;
528 break;
529 }
530 if (TREE_ADDRESSABLE (vnode->symbol.decl) && !address_taken)
531 {
532 if (dump_file)
533 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
534 TREE_ADDRESSABLE (vnode->symbol.decl) = 0;
535 }
536 if (!TREE_READONLY (vnode->symbol.decl) && !address_taken && !written
537 /* Making variable in explicit section readonly can cause section
538 type conflict.
539 See e.g. gcc.c-torture/compile/pr23237.c */
540 && DECL_SECTION_NAME (vnode->symbol.decl) == NULL)
541 {
542 if (dump_file)
543 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
544 TREE_READONLY (vnode->symbol.decl) = 1;
545 }
546 }
547 if (dump_file)
548 fprintf (dump_file, "\n");
549 }
550
551 /* Return true when there is a reference to node and it is not vtable. */
552 static bool
553 address_taken_from_non_vtable_p (symtab_node node)
554 {
555 int i;
556 struct ipa_ref *ref;
557 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
558 i, ref); i++)
559 if (ref->use == IPA_REF_ADDR)
560 {
561 struct varpool_node *node;
562 if (is_a <cgraph_node> (ref->referring))
563 return true;
564 node = ipa_ref_referring_varpool_node (ref);
565 if (!DECL_VIRTUAL_P (node->symbol.decl))
566 return true;
567 }
568 return false;
569 }
570
571 /* A helper for comdat_can_be_unshared_p. */
572
573 static bool
574 comdat_can_be_unshared_p_1 (symtab_node node)
575 {
576 /* When address is taken, we don't know if equality comparison won't
577 break eventually. Exception are virutal functions and vtables,
578 where this is not possible by language standard. */
579 if (!DECL_VIRTUAL_P (node->symbol.decl)
580 && address_taken_from_non_vtable_p (node))
581 return false;
582
583 /* If the symbol is used in some weird way, better to not touch it. */
584 if (node->symbol.force_output)
585 return false;
586
587 /* Explicit instantiations needs to be output when possibly
588 used externally. */
589 if (node->symbol.forced_by_abi
590 && TREE_PUBLIC (node->symbol.decl)
591 && (node->symbol.resolution != LDPR_PREVAILING_DEF_IRONLY
592 && !flag_whole_program))
593 return false;
594
595 /* Non-readonly and volatile variables can not be duplicated. */
596 if (is_a <varpool_node> (node)
597 && (!TREE_READONLY (node->symbol.decl)
598 || TREE_THIS_VOLATILE (node->symbol.decl)))
599 return false;
600 return true;
601 }
602
603 /* COMDAT functions must be shared only if they have address taken,
604 otherwise we can produce our own private implementation with
605 -fwhole-program.
606 Return true when turning COMDAT functoin static can not lead to wrong
607 code when the resulting object links with a library defining same COMDAT.
608
609 Virtual functions do have their addresses taken from the vtables,
610 but in C++ there is no way to compare their addresses for equality. */
611
612 static bool
613 comdat_can_be_unshared_p (symtab_node node)
614 {
615 if (!comdat_can_be_unshared_p_1 (node))
616 return false;
617 if (node->symbol.same_comdat_group)
618 {
619 symtab_node next;
620
621 /* If more than one function is in the same COMDAT group, it must
622 be shared even if just one function in the comdat group has
623 address taken. */
624 for (next = node->symbol.same_comdat_group;
625 next != node; next = next->symbol.same_comdat_group)
626 if (!comdat_can_be_unshared_p_1 (next))
627 return false;
628 }
629 return true;
630 }
631
632 /* Return true when function NODE should be considered externally visible. */
633
634 static bool
635 cgraph_externally_visible_p (struct cgraph_node *node,
636 bool whole_program)
637 {
638 if (!node->symbol.definition)
639 return false;
640 if (!TREE_PUBLIC (node->symbol.decl)
641 || DECL_EXTERNAL (node->symbol.decl))
642 return false;
643
644 /* Do not try to localize built-in functions yet. One of problems is that we
645 end up mangling their asm for WHOPR that makes it impossible to call them
646 using the implicit built-in declarations anymore. Similarly this enables
647 us to remove them as unreachable before actual calls may appear during
648 expansion or folding. */
649 if (DECL_BUILT_IN (node->symbol.decl))
650 return true;
651
652 /* If linker counts on us, we must preserve the function. */
653 if (symtab_used_from_object_file_p ((symtab_node) node))
654 return true;
655 if (DECL_PRESERVE_P (node->symbol.decl))
656 return true;
657 if (lookup_attribute ("externally_visible",
658 DECL_ATTRIBUTES (node->symbol.decl)))
659 return true;
660 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
661 && lookup_attribute ("dllexport",
662 DECL_ATTRIBUTES (node->symbol.decl)))
663 return true;
664 if (node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
665 return false;
666 /* When doing LTO or whole program, we can bring COMDAT functoins static.
667 This improves code quality and we know we will duplicate them at most twice
668 (in the case that we are not using plugin and link with object file
669 implementing same COMDAT) */
670 if ((in_lto_p || whole_program)
671 && DECL_COMDAT (node->symbol.decl)
672 && comdat_can_be_unshared_p ((symtab_node) node))
673 return false;
674
675 /* When doing link time optimizations, hidden symbols become local. */
676 if (in_lto_p
677 && (DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_HIDDEN
678 || DECL_VISIBILITY (node->symbol.decl) == VISIBILITY_INTERNAL)
679 /* Be sure that node is defined in IR file, not in other object
680 file. In that case we don't set used_from_other_object_file. */
681 && node->symbol.definition)
682 ;
683 else if (!whole_program)
684 return true;
685
686 if (MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
687 return true;
688
689 return false;
690 }
691
692 /* Return true when variable VNODE should be considered externally visible. */
693
694 bool
695 varpool_externally_visible_p (struct varpool_node *vnode)
696 {
697 if (DECL_EXTERNAL (vnode->symbol.decl))
698 return true;
699
700 if (!TREE_PUBLIC (vnode->symbol.decl))
701 return false;
702
703 /* If linker counts on us, we must preserve the function. */
704 if (symtab_used_from_object_file_p ((symtab_node) vnode))
705 return true;
706
707 if (DECL_HARD_REGISTER (vnode->symbol.decl))
708 return true;
709 if (DECL_PRESERVE_P (vnode->symbol.decl))
710 return true;
711 if (lookup_attribute ("externally_visible",
712 DECL_ATTRIBUTES (vnode->symbol.decl)))
713 return true;
714 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
715 && lookup_attribute ("dllexport",
716 DECL_ATTRIBUTES (vnode->symbol.decl)))
717 return true;
718
719 /* See if we have linker information about symbol not being used or
720 if we need to make guess based on the declaration.
721
722 Even if the linker clams the symbol is unused, never bring internal
723 symbols that are declared by user as used or externally visible.
724 This is needed for i.e. references from asm statements. */
725 if (symtab_used_from_object_file_p ((symtab_node) vnode))
726 return true;
727 if (vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY)
728 return false;
729
730 /* As a special case, the COMDAT virtual tables can be unshared.
731 In LTO mode turn vtables into static variables. The variable is readonly,
732 so this does not enable more optimization, but referring static var
733 is faster for dynamic linking. Also this match logic hidding vtables
734 from LTO symbol tables. */
735 if ((in_lto_p || flag_whole_program)
736 && DECL_COMDAT (vnode->symbol.decl)
737 && comdat_can_be_unshared_p ((symtab_node) vnode))
738 return false;
739
740 /* When doing link time optimizations, hidden symbols become local. */
741 if (in_lto_p
742 && (DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_HIDDEN
743 || DECL_VISIBILITY (vnode->symbol.decl) == VISIBILITY_INTERNAL)
744 /* Be sure that node is defined in IR file, not in other object
745 file. In that case we don't set used_from_other_object_file. */
746 && vnode->symbol.definition)
747 ;
748 else if (!flag_whole_program)
749 return true;
750
751 /* Do not attempt to privatize COMDATS by default.
752 This would break linking with C++ libraries sharing
753 inline definitions.
754
755 FIXME: We can do so for readonly vars with no address taken and
756 possibly also for vtables since no direct pointer comparsion is done.
757 It might be interesting to do so to reduce linking overhead. */
758 if (DECL_COMDAT (vnode->symbol.decl) || DECL_WEAK (vnode->symbol.decl))
759 return true;
760 return false;
761 }
762
763 /* Return true if reference to NODE can be replaced by a local alias.
764 Local aliases save dynamic linking overhead and enable more optimizations.
765 */
766
767 bool
768 can_replace_by_local_alias (symtab_node node)
769 {
770 return (symtab_node_availability (node) > AVAIL_OVERWRITABLE
771 && !symtab_can_be_discarded (node));
772 }
773
774 /* Mark visibility of all functions.
775
776 A local function is one whose calls can occur only in the current
777 compilation unit and all its calls are explicit, so we can change
778 its calling convention. We simply mark all static functions whose
779 address is not taken as local.
780
781 We also change the TREE_PUBLIC flag of all declarations that are public
782 in language point of view but we want to overwrite this default
783 via visibilities for the backend point of view. */
784
785 static unsigned int
786 function_and_variable_visibility (bool whole_program)
787 {
788 struct cgraph_node *node;
789 struct varpool_node *vnode;
790
791 /* All aliases should be procssed at this point. */
792 gcc_checking_assert (!alias_pairs || !alias_pairs->length());
793
794 FOR_EACH_FUNCTION (node)
795 {
796 int flags = flags_from_decl_or_type (node->symbol.decl);
797
798 /* Optimize away PURE and CONST constructors and destructors. */
799 if (optimize
800 && (flags & (ECF_CONST | ECF_PURE))
801 && !(flags & ECF_LOOPING_CONST_OR_PURE))
802 {
803 DECL_STATIC_CONSTRUCTOR (node->symbol.decl) = 0;
804 DECL_STATIC_DESTRUCTOR (node->symbol.decl) = 0;
805 }
806
807 /* Frontends and alias code marks nodes as needed before parsing is finished.
808 We may end up marking as node external nodes where this flag is meaningless
809 strip it. */
810 if (DECL_EXTERNAL (node->symbol.decl) || !node->symbol.definition)
811 {
812 node->symbol.force_output = 0;
813 node->symbol.forced_by_abi = 0;
814 }
815
816 /* C++ FE on lack of COMDAT support create local COMDAT functions
817 (that ought to be shared but can not due to object format
818 limitations). It is necessary to keep the flag to make rest of C++ FE
819 happy. Clear the flag here to avoid confusion in middle-end. */
820 if (DECL_COMDAT (node->symbol.decl) && !TREE_PUBLIC (node->symbol.decl))
821 DECL_COMDAT (node->symbol.decl) = 0;
822
823 /* For external decls stop tracking same_comdat_group. It doesn't matter
824 what comdat group they are in when they won't be emitted in this TU. */
825 if (node->symbol.same_comdat_group && DECL_EXTERNAL (node->symbol.decl))
826 {
827 #ifdef ENABLE_CHECKING
828 symtab_node n;
829
830 for (n = node->symbol.same_comdat_group;
831 n != (symtab_node)node;
832 n = n->symbol.same_comdat_group)
833 /* If at least one of same comdat group functions is external,
834 all of them have to be, otherwise it is a front-end bug. */
835 gcc_assert (DECL_EXTERNAL (n->symbol.decl));
836 #endif
837 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
838 }
839 gcc_assert ((!DECL_WEAK (node->symbol.decl)
840 && !DECL_COMDAT (node->symbol.decl))
841 || TREE_PUBLIC (node->symbol.decl)
842 || node->symbol.weakref
843 || DECL_EXTERNAL (node->symbol.decl));
844 if (cgraph_externally_visible_p (node, whole_program))
845 {
846 gcc_assert (!node->global.inlined_to);
847 node->symbol.externally_visible = true;
848 }
849 else
850 {
851 node->symbol.externally_visible = false;
852 node->symbol.forced_by_abi = false;
853 }
854 if (!node->symbol.externally_visible
855 && node->symbol.definition && !node->symbol.weakref
856 && !DECL_EXTERNAL (node->symbol.decl))
857 {
858 gcc_assert (whole_program || in_lto_p
859 || !TREE_PUBLIC (node->symbol.decl));
860 node->symbol.unique_name = ((node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
861 || node->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
862 && TREE_PUBLIC (node->symbol.decl));
863 symtab_make_decl_local (node->symbol.decl);
864 node->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
865 if (node->symbol.same_comdat_group)
866 /* cgraph_externally_visible_p has already checked all other nodes
867 in the group and they will all be made local. We need to
868 dissolve the group at once so that the predicate does not
869 segfault though. */
870 symtab_dissolve_same_comdat_group_list ((symtab_node) node);
871 }
872
873 if (node->thunk.thunk_p
874 && TREE_PUBLIC (node->symbol.decl))
875 {
876 struct cgraph_node *decl_node = node;
877
878 decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
879
880 /* Thunks have the same visibility as function they are attached to.
881 Make sure the C++ front end set this up properly. */
882 if (DECL_ONE_ONLY (decl_node->symbol.decl))
883 {
884 gcc_checking_assert (DECL_COMDAT (node->symbol.decl)
885 == DECL_COMDAT (decl_node->symbol.decl));
886 gcc_checking_assert (DECL_COMDAT_GROUP (node->symbol.decl)
887 == DECL_COMDAT_GROUP (decl_node->symbol.decl));
888 gcc_checking_assert (node->symbol.same_comdat_group);
889 }
890 if (DECL_EXTERNAL (decl_node->symbol.decl))
891 DECL_EXTERNAL (node->symbol.decl) = 1;
892 }
893 }
894 FOR_EACH_DEFINED_FUNCTION (node)
895 {
896 node->local.local |= cgraph_local_node_p (node);
897
898 /* If we know that function can not be overwritten by a different semantics
899 and moreover its section can not be discarded, replace all direct calls
900 by calls to an nonoverwritable alias. This make dynamic linking
901 cheaper and enable more optimization.
902
903 TODO: We can also update virtual tables. */
904 if (node->callers && can_replace_by_local_alias ((symtab_node)node))
905 {
906 struct cgraph_node *alias = cgraph (symtab_nonoverwritable_alias ((symtab_node) node));
907
908 if (alias != node)
909 {
910 while (node->callers)
911 {
912 struct cgraph_edge *e = node->callers;
913
914 cgraph_redirect_edge_callee (e, alias);
915 if (gimple_has_body_p (e->caller->symbol.decl))
916 {
917 push_cfun (DECL_STRUCT_FUNCTION (e->caller->symbol.decl));
918 cgraph_redirect_edge_call_stmt_to_callee (e);
919 pop_cfun ();
920 }
921 }
922 }
923 }
924 }
925 FOR_EACH_VARIABLE (vnode)
926 {
927 /* weak flag makes no sense on local variables. */
928 gcc_assert (!DECL_WEAK (vnode->symbol.decl)
929 || vnode->symbol.weakref
930 || TREE_PUBLIC (vnode->symbol.decl)
931 || DECL_EXTERNAL (vnode->symbol.decl));
932 /* In several cases declarations can not be common:
933
934 - when declaration has initializer
935 - when it is in weak
936 - when it has specific section
937 - when it resides in non-generic address space.
938 - if declaration is local, it will get into .local common section
939 so common flag is not needed. Frontends still produce these in
940 certain cases, such as for:
941
942 static int a __attribute__ ((common))
943
944 Canonicalize things here and clear the redundant flag. */
945 if (DECL_COMMON (vnode->symbol.decl)
946 && (!(TREE_PUBLIC (vnode->symbol.decl)
947 || DECL_EXTERNAL (vnode->symbol.decl))
948 || (DECL_INITIAL (vnode->symbol.decl)
949 && DECL_INITIAL (vnode->symbol.decl) != error_mark_node)
950 || DECL_WEAK (vnode->symbol.decl)
951 || DECL_SECTION_NAME (vnode->symbol.decl) != NULL
952 || ! (ADDR_SPACE_GENERIC_P
953 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->symbol.decl))))))
954 DECL_COMMON (vnode->symbol.decl) = 0;
955 }
956 FOR_EACH_DEFINED_VARIABLE (vnode)
957 {
958 if (!vnode->symbol.definition)
959 continue;
960 if (varpool_externally_visible_p (vnode))
961 vnode->symbol.externally_visible = true;
962 else
963 {
964 vnode->symbol.externally_visible = false;
965 vnode->symbol.forced_by_abi = false;
966 }
967 if (!vnode->symbol.externally_visible
968 && !vnode->symbol.weakref)
969 {
970 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->symbol.decl));
971 symtab_make_decl_local (vnode->symbol.decl);
972 vnode->symbol.unique_name = ((vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY
973 || vnode->symbol.resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
974 && TREE_PUBLIC (vnode->symbol.decl));
975 if (vnode->symbol.same_comdat_group)
976 symtab_dissolve_same_comdat_group_list ((symtab_node) vnode);
977 vnode->symbol.resolution = LDPR_PREVAILING_DEF_IRONLY;
978 }
979 }
980
981 if (dump_file)
982 {
983 fprintf (dump_file, "\nMarking local functions:");
984 FOR_EACH_DEFINED_FUNCTION (node)
985 if (node->local.local)
986 fprintf (dump_file, " %s", cgraph_node_name (node));
987 fprintf (dump_file, "\n\n");
988 fprintf (dump_file, "\nMarking externally visible functions:");
989 FOR_EACH_DEFINED_FUNCTION (node)
990 if (node->symbol.externally_visible)
991 fprintf (dump_file, " %s", cgraph_node_name (node));
992 fprintf (dump_file, "\n\n");
993 fprintf (dump_file, "\nMarking externally visible variables:");
994 FOR_EACH_DEFINED_VARIABLE (vnode)
995 if (vnode->symbol.externally_visible)
996 fprintf (dump_file, " %s", varpool_node_name (vnode));
997 fprintf (dump_file, "\n\n");
998 }
999 cgraph_function_flags_ready = true;
1000 return 0;
1001 }
1002
1003 /* Local function pass handling visibilities. This happens before LTO streaming
1004 so in particular -fwhole-program should be ignored at this level. */
1005
1006 static unsigned int
1007 local_function_and_variable_visibility (void)
1008 {
1009 return function_and_variable_visibility (flag_whole_program && !flag_lto);
1010 }
1011
1012 namespace {
1013
1014 const pass_data pass_data_ipa_function_and_variable_visibility =
1015 {
1016 SIMPLE_IPA_PASS, /* type */
1017 "visibility", /* name */
1018 OPTGROUP_NONE, /* optinfo_flags */
1019 false, /* has_gate */
1020 true, /* has_execute */
1021 TV_CGRAPHOPT, /* tv_id */
1022 0, /* properties_required */
1023 0, /* properties_provided */
1024 0, /* properties_destroyed */
1025 0, /* todo_flags_start */
1026 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
1027 };
1028
1029 class pass_ipa_function_and_variable_visibility : public simple_ipa_opt_pass
1030 {
1031 public:
1032 pass_ipa_function_and_variable_visibility(gcc::context *ctxt)
1033 : simple_ipa_opt_pass(pass_data_ipa_function_and_variable_visibility, ctxt)
1034 {}
1035
1036 /* opt_pass methods: */
1037 unsigned int execute () {
1038 return local_function_and_variable_visibility ();
1039 }
1040
1041 }; // class pass_ipa_function_and_variable_visibility
1042
1043 } // anon namespace
1044
1045 simple_ipa_opt_pass *
1046 make_pass_ipa_function_and_variable_visibility (gcc::context *ctxt)
1047 {
1048 return new pass_ipa_function_and_variable_visibility (ctxt);
1049 }
1050
1051 /* Free inline summary. */
1052
1053 static unsigned
1054 free_inline_summary (void)
1055 {
1056 inline_free_summary ();
1057 return 0;
1058 }
1059
1060 namespace {
1061
1062 const pass_data pass_data_ipa_free_inline_summary =
1063 {
1064 SIMPLE_IPA_PASS, /* type */
1065 "*free_inline_summary", /* name */
1066 OPTGROUP_NONE, /* optinfo_flags */
1067 false, /* has_gate */
1068 true, /* has_execute */
1069 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
1070 0, /* properties_required */
1071 0, /* properties_provided */
1072 0, /* properties_destroyed */
1073 0, /* todo_flags_start */
1074 0, /* todo_flags_finish */
1075 };
1076
1077 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
1078 {
1079 public:
1080 pass_ipa_free_inline_summary(gcc::context *ctxt)
1081 : simple_ipa_opt_pass(pass_data_ipa_free_inline_summary, ctxt)
1082 {}
1083
1084 /* opt_pass methods: */
1085 unsigned int execute () { return free_inline_summary (); }
1086
1087 }; // class pass_ipa_free_inline_summary
1088
1089 } // anon namespace
1090
1091 simple_ipa_opt_pass *
1092 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
1093 {
1094 return new pass_ipa_free_inline_summary (ctxt);
1095 }
1096
1097 /* Do not re-run on ltrans stage. */
1098
1099 static bool
1100 gate_whole_program_function_and_variable_visibility (void)
1101 {
1102 return !flag_ltrans;
1103 }
1104
1105 /* Bring functionss local at LTO time with -fwhole-program. */
1106
1107 static unsigned int
1108 whole_program_function_and_variable_visibility (void)
1109 {
1110 function_and_variable_visibility (flag_whole_program);
1111 if (optimize)
1112 ipa_discover_readonly_nonaddressable_vars ();
1113 return 0;
1114 }
1115
1116 namespace {
1117
1118 const pass_data pass_data_ipa_whole_program_visibility =
1119 {
1120 IPA_PASS, /* type */
1121 "whole-program", /* name */
1122 OPTGROUP_NONE, /* optinfo_flags */
1123 true, /* has_gate */
1124 true, /* has_execute */
1125 TV_CGRAPHOPT, /* tv_id */
1126 0, /* properties_required */
1127 0, /* properties_provided */
1128 0, /* properties_destroyed */
1129 0, /* todo_flags_start */
1130 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
1131 };
1132
1133 class pass_ipa_whole_program_visibility : public ipa_opt_pass_d
1134 {
1135 public:
1136 pass_ipa_whole_program_visibility(gcc::context *ctxt)
1137 : ipa_opt_pass_d(pass_data_ipa_whole_program_visibility, ctxt,
1138 NULL, /* generate_summary */
1139 NULL, /* write_summary */
1140 NULL, /* read_summary */
1141 NULL, /* write_optimization_summary */
1142 NULL, /* read_optimization_summary */
1143 NULL, /* stmt_fixup */
1144 0, /* function_transform_todo_flags_start */
1145 NULL, /* function_transform */
1146 NULL) /* variable_transform */
1147 {}
1148
1149 /* opt_pass methods: */
1150 bool gate () {
1151 return gate_whole_program_function_and_variable_visibility ();
1152 }
1153 unsigned int execute () {
1154 return whole_program_function_and_variable_visibility ();
1155 }
1156
1157 }; // class pass_ipa_whole_program_visibility
1158
1159 } // anon namespace
1160
1161 ipa_opt_pass_d *
1162 make_pass_ipa_whole_program_visibility (gcc::context *ctxt)
1163 {
1164 return new pass_ipa_whole_program_visibility (ctxt);
1165 }
1166
1167 /* Entry in the histogram. */
1168
1169 struct histogram_entry
1170 {
1171 gcov_type count;
1172 int time;
1173 int size;
1174 };
1175
1176 /* Histogram of profile values.
1177 The histogram is represented as an ordered vector of entries allocated via
1178 histogram_pool. During construction a separate hashtable is kept to lookup
1179 duplicate entries. */
1180
1181 vec<histogram_entry *> histogram;
1182 static alloc_pool histogram_pool;
1183
1184 /* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
1185
1186 struct histogram_hash : typed_noop_remove <histogram_entry>
1187 {
1188 typedef histogram_entry value_type;
1189 typedef histogram_entry compare_type;
1190 static inline hashval_t hash (const value_type *);
1191 static inline int equal (const value_type *, const compare_type *);
1192 };
1193
1194 inline hashval_t
1195 histogram_hash::hash (const histogram_entry *val)
1196 {
1197 return val->count;
1198 }
1199
1200 inline int
1201 histogram_hash::equal (const histogram_entry *val, const histogram_entry *val2)
1202 {
1203 return val->count == val2->count;
1204 }
1205
1206 /* Account TIME and SIZE executed COUNT times into HISTOGRAM.
1207 HASHTABLE is the on-side hash kept to avoid duplicates. */
1208
1209 static void
1210 account_time_size (hash_table <histogram_hash> hashtable,
1211 vec<histogram_entry *> &histogram,
1212 gcov_type count, int time, int size)
1213 {
1214 histogram_entry key = {count, 0, 0};
1215 histogram_entry **val = hashtable.find_slot (&key, INSERT);
1216
1217 if (!*val)
1218 {
1219 *val = (histogram_entry *) pool_alloc (histogram_pool);
1220 **val = key;
1221 histogram.safe_push (*val);
1222 }
1223 (*val)->time += time;
1224 (*val)->size += size;
1225 }
1226
1227 int
1228 cmp_counts (const void *v1, const void *v2)
1229 {
1230 const histogram_entry *h1 = *(const histogram_entry * const *)v1;
1231 const histogram_entry *h2 = *(const histogram_entry * const *)v2;
1232 if (h1->count < h2->count)
1233 return 1;
1234 if (h1->count > h2->count)
1235 return -1;
1236 return 0;
1237 }
1238
1239 /* Dump HISTOGRAM to FILE. */
1240
1241 static void
1242 dump_histogram (FILE *file, vec<histogram_entry *> histogram)
1243 {
1244 unsigned int i;
1245 gcov_type overall_time = 0, cumulated_time = 0, cumulated_size = 0, overall_size = 0;
1246
1247 fprintf (dump_file, "Histogram:\n");
1248 for (i = 0; i < histogram.length (); i++)
1249 {
1250 overall_time += histogram[i]->count * histogram[i]->time;
1251 overall_size += histogram[i]->size;
1252 }
1253 if (!overall_time)
1254 overall_time = 1;
1255 if (!overall_size)
1256 overall_size = 1;
1257 for (i = 0; i < histogram.length (); i++)
1258 {
1259 cumulated_time += histogram[i]->count * histogram[i]->time;
1260 cumulated_size += histogram[i]->size;
1261 fprintf (file, " "HOST_WIDEST_INT_PRINT_DEC": time:%i (%2.2f) size:%i (%2.2f)\n",
1262 (HOST_WIDEST_INT) histogram[i]->count,
1263 histogram[i]->time,
1264 cumulated_time * 100.0 / overall_time,
1265 histogram[i]->size,
1266 cumulated_size * 100.0 / overall_size);
1267 }
1268 }
1269
1270 /* Collect histogram from CFG profiles. */
1271
1272 static void
1273 ipa_profile_generate_summary (void)
1274 {
1275 struct cgraph_node *node;
1276 gimple_stmt_iterator gsi;
1277 hash_table <histogram_hash> hashtable;
1278 basic_block bb;
1279
1280 hashtable.create (10);
1281 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1282 10);
1283
1284 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1285 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->symbol.decl))
1286 {
1287 int time = 0;
1288 int size = 0;
1289 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1290 {
1291 gimple stmt = gsi_stmt (gsi);
1292 if (gimple_code (stmt) == GIMPLE_CALL
1293 && !gimple_call_fndecl (stmt))
1294 {
1295 histogram_value h;
1296 h = gimple_histogram_value_of_type
1297 (DECL_STRUCT_FUNCTION (node->symbol.decl),
1298 stmt, HIST_TYPE_INDIR_CALL);
1299 /* No need to do sanity check: gimple_ic_transform already
1300 takes away bad histograms. */
1301 if (h)
1302 {
1303 /* counter 0 is target, counter 1 is number of execution we called target,
1304 counter 2 is total number of executions. */
1305 if (h->hvalue.counters[2])
1306 {
1307 struct cgraph_edge * e = cgraph_edge (node, stmt);
1308 e->indirect_info->common_target_id
1309 = h->hvalue.counters [0];
1310 e->indirect_info->common_target_probability
1311 = GCOV_COMPUTE_SCALE (h->hvalue.counters [1], h->hvalue.counters [2]);
1312 if (e->indirect_info->common_target_probability > REG_BR_PROB_BASE)
1313 {
1314 if (dump_file)
1315 fprintf (dump_file, "Probability capped to 1\n");
1316 e->indirect_info->common_target_probability = REG_BR_PROB_BASE;
1317 }
1318 }
1319 gimple_remove_histogram_value (DECL_STRUCT_FUNCTION (node->symbol.decl),
1320 stmt, h);
1321 }
1322 }
1323 time += estimate_num_insns (stmt, &eni_time_weights);
1324 size += estimate_num_insns (stmt, &eni_size_weights);
1325 }
1326 account_time_size (hashtable, histogram, bb->count, time, size);
1327 }
1328 hashtable.dispose ();
1329 histogram.qsort (cmp_counts);
1330 }
1331
1332 /* Serialize the ipa info for lto. */
1333
1334 static void
1335 ipa_profile_write_summary (void)
1336 {
1337 struct lto_simple_output_block *ob
1338 = lto_create_simple_output_block (LTO_section_ipa_profile);
1339 unsigned int i;
1340
1341 streamer_write_uhwi_stream (ob->main_stream, histogram.length());
1342 for (i = 0; i < histogram.length (); i++)
1343 {
1344 streamer_write_gcov_count_stream (ob->main_stream, histogram[i]->count);
1345 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->time);
1346 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->size);
1347 }
1348 lto_destroy_simple_output_block (ob);
1349 }
1350
1351 /* Deserialize the ipa info for lto. */
1352
1353 static void
1354 ipa_profile_read_summary (void)
1355 {
1356 struct lto_file_decl_data ** file_data_vec
1357 = lto_get_file_decl_data ();
1358 struct lto_file_decl_data * file_data;
1359 hash_table <histogram_hash> hashtable;
1360 int j = 0;
1361
1362 hashtable.create (10);
1363 histogram_pool = create_alloc_pool ("IPA histogram", sizeof (struct histogram_entry),
1364 10);
1365
1366 while ((file_data = file_data_vec[j++]))
1367 {
1368 const char *data;
1369 size_t len;
1370 struct lto_input_block *ib
1371 = lto_create_simple_input_block (file_data,
1372 LTO_section_ipa_profile,
1373 &data, &len);
1374 if (ib)
1375 {
1376 unsigned int num = streamer_read_uhwi (ib);
1377 unsigned int n;
1378 for (n = 0; n < num; n++)
1379 {
1380 gcov_type count = streamer_read_gcov_count (ib);
1381 int time = streamer_read_uhwi (ib);
1382 int size = streamer_read_uhwi (ib);
1383 account_time_size (hashtable, histogram,
1384 count, time, size);
1385 }
1386 lto_destroy_simple_input_block (file_data,
1387 LTO_section_ipa_profile,
1388 ib, data, len);
1389 }
1390 }
1391 hashtable.dispose ();
1392 histogram.qsort (cmp_counts);
1393 }
1394
1395 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1396
1397 static unsigned int
1398 ipa_profile (void)
1399 {
1400 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1401 struct cgraph_edge *e;
1402 int order_pos;
1403 bool something_changed = false;
1404 int i;
1405 gcov_type overall_time = 0, cutoff = 0, cumulated = 0, overall_size = 0;
1406 struct cgraph_node *n,*n2;
1407 int nindirect = 0, ncommon = 0, nunknown = 0, nuseless = 0, nconverted = 0;
1408 bool node_map_initialized = false;
1409
1410 if (dump_file)
1411 dump_histogram (dump_file, histogram);
1412 for (i = 0; i < (int)histogram.length (); i++)
1413 {
1414 overall_time += histogram[i]->count * histogram[i]->time;
1415 overall_size += histogram[i]->size;
1416 }
1417 if (overall_time)
1418 {
1419 gcov_type threshold;
1420
1421 gcc_assert (overall_size);
1422 if (dump_file)
1423 {
1424 gcov_type min, cumulated_time = 0, cumulated_size = 0;
1425
1426 fprintf (dump_file, "Overall time: "HOST_WIDEST_INT_PRINT_DEC"\n",
1427 (HOST_WIDEST_INT)overall_time);
1428 min = get_hot_bb_threshold ();
1429 for (i = 0; i < (int)histogram.length () && histogram[i]->count >= min;
1430 i++)
1431 {
1432 cumulated_time += histogram[i]->count * histogram[i]->time;
1433 cumulated_size += histogram[i]->size;
1434 }
1435 fprintf (dump_file, "GCOV min count: "HOST_WIDEST_INT_PRINT_DEC
1436 " Time:%3.2f%% Size:%3.2f%%\n",
1437 (HOST_WIDEST_INT)min,
1438 cumulated_time * 100.0 / overall_time,
1439 cumulated_size * 100.0 / overall_size);
1440 }
1441 cutoff = (overall_time * PARAM_VALUE (HOT_BB_COUNT_WS_PERMILLE) + 500) / 1000;
1442 threshold = 0;
1443 for (i = 0; cumulated < cutoff; i++)
1444 {
1445 cumulated += histogram[i]->count * histogram[i]->time;
1446 threshold = histogram[i]->count;
1447 }
1448 if (!threshold)
1449 threshold = 1;
1450 if (dump_file)
1451 {
1452 gcov_type cumulated_time = 0, cumulated_size = 0;
1453
1454 for (i = 0;
1455 i < (int)histogram.length () && histogram[i]->count >= threshold;
1456 i++)
1457 {
1458 cumulated_time += histogram[i]->count * histogram[i]->time;
1459 cumulated_size += histogram[i]->size;
1460 }
1461 fprintf (dump_file, "Determined min count: "HOST_WIDEST_INT_PRINT_DEC
1462 " Time:%3.2f%% Size:%3.2f%%\n",
1463 (HOST_WIDEST_INT)threshold,
1464 cumulated_time * 100.0 / overall_time,
1465 cumulated_size * 100.0 / overall_size);
1466 }
1467 if (threshold > get_hot_bb_threshold ()
1468 || in_lto_p)
1469 {
1470 if (dump_file)
1471 fprintf (dump_file, "Threshold updated.\n");
1472 set_hot_bb_threshold (threshold);
1473 }
1474 }
1475 histogram.release();
1476 free_alloc_pool (histogram_pool);
1477
1478 /* Produce speculative calls: we saved common traget from porfiling into
1479 e->common_target_id. Now, at link time, we can look up corresponding
1480 function node and produce speculative call. */
1481
1482 FOR_EACH_DEFINED_FUNCTION (n)
1483 {
1484 bool update = false;
1485
1486 for (e = n->indirect_calls; e; e = e->next_callee)
1487 {
1488 if (n->count)
1489 nindirect++;
1490 if (e->indirect_info->common_target_id)
1491 {
1492 if (!node_map_initialized)
1493 init_node_map (false);
1494 node_map_initialized = true;
1495 ncommon++;
1496 n2 = find_func_by_profile_id (e->indirect_info->common_target_id);
1497 if (n2)
1498 {
1499 if (dump_file)
1500 {
1501 fprintf (dump_file, "Indirect call -> direct call from"
1502 " other module %s/%i => %s/%i, prob %3.2f\n",
1503 xstrdup (cgraph_node_name (n)), n->symbol.order,
1504 xstrdup (cgraph_node_name (n2)), n2->symbol.order,
1505 e->indirect_info->common_target_probability
1506 / (float)REG_BR_PROB_BASE);
1507 }
1508 if (e->indirect_info->common_target_probability
1509 < REG_BR_PROB_BASE / 2)
1510 {
1511 nuseless++;
1512 if (dump_file)
1513 fprintf (dump_file,
1514 "Not speculating: probability is too low.\n");
1515 }
1516 else if (!cgraph_maybe_hot_edge_p (e))
1517 {
1518 nuseless++;
1519 if (dump_file)
1520 fprintf (dump_file,
1521 "Not speculating: call is cold.\n");
1522 }
1523 else if (cgraph_function_body_availability (n2)
1524 <= AVAIL_OVERWRITABLE
1525 && symtab_can_be_discarded ((symtab_node) n2))
1526 {
1527 nuseless++;
1528 if (dump_file)
1529 fprintf (dump_file,
1530 "Not speculating: target is overwritable "
1531 "and can be discarded.\n");
1532 }
1533 else
1534 {
1535 /* Target may be overwritable, but profile says that
1536 control flow goes to this particular implementation
1537 of N2. Speculate on the local alias to allow inlining.
1538 */
1539 if (!symtab_can_be_discarded ((symtab_node) n2))
1540 n2 = cgraph (symtab_nonoverwritable_alias ((symtab_node)n2));
1541 nconverted++;
1542 cgraph_turn_edge_to_speculative
1543 (e, n2,
1544 apply_scale (e->count,
1545 e->indirect_info->common_target_probability),
1546 apply_scale (e->frequency,
1547 e->indirect_info->common_target_probability));
1548 update = true;
1549 }
1550 }
1551 else
1552 {
1553 if (dump_file)
1554 fprintf (dump_file, "Function with profile-id %i not found.\n",
1555 e->indirect_info->common_target_id);
1556 nunknown++;
1557 }
1558 }
1559 }
1560 if (update)
1561 inline_update_overall_summary (n);
1562 }
1563 if (node_map_initialized)
1564 del_node_map ();
1565 if (dump_file && nindirect)
1566 fprintf (dump_file,
1567 "%i indirect calls trained.\n"
1568 "%i (%3.2f%%) have common target.\n"
1569 "%i (%3.2f%%) targets was not found.\n"
1570 "%i (%3.2f%%) speculations seems useless.\n"
1571 "%i (%3.2f%%) speculations produced.\n",
1572 nindirect,
1573 ncommon, ncommon * 100.0 / nindirect,
1574 nunknown, nunknown * 100.0 / nindirect,
1575 nuseless, nuseless * 100.0 / nindirect,
1576 nconverted, nconverted * 100.0 / nindirect);
1577
1578 order_pos = ipa_reverse_postorder (order);
1579 for (i = order_pos - 1; i >= 0; i--)
1580 {
1581 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1582 {
1583 for (e = order[i]->callees; e; e = e->next_callee)
1584 if (e->callee->local.local && !e->callee->symbol.aux)
1585 {
1586 something_changed = true;
1587 e->callee->symbol.aux = (void *)1;
1588 }
1589 }
1590 order[i]->symbol.aux = NULL;
1591 }
1592
1593 while (something_changed)
1594 {
1595 something_changed = false;
1596 for (i = order_pos - 1; i >= 0; i--)
1597 {
1598 if (order[i]->symbol.aux && cgraph_propagate_frequency (order[i]))
1599 {
1600 for (e = order[i]->callees; e; e = e->next_callee)
1601 if (e->callee->local.local && !e->callee->symbol.aux)
1602 {
1603 something_changed = true;
1604 e->callee->symbol.aux = (void *)1;
1605 }
1606 }
1607 order[i]->symbol.aux = NULL;
1608 }
1609 }
1610 free (order);
1611 return 0;
1612 }
1613
1614 static bool
1615 gate_ipa_profile (void)
1616 {
1617 return flag_ipa_profile;
1618 }
1619
1620 namespace {
1621
1622 const pass_data pass_data_ipa_profile =
1623 {
1624 IPA_PASS, /* type */
1625 "profile_estimate", /* name */
1626 OPTGROUP_NONE, /* optinfo_flags */
1627 true, /* has_gate */
1628 true, /* has_execute */
1629 TV_IPA_PROFILE, /* tv_id */
1630 0, /* properties_required */
1631 0, /* properties_provided */
1632 0, /* properties_destroyed */
1633 0, /* todo_flags_start */
1634 0, /* todo_flags_finish */
1635 };
1636
1637 class pass_ipa_profile : public ipa_opt_pass_d
1638 {
1639 public:
1640 pass_ipa_profile(gcc::context *ctxt)
1641 : ipa_opt_pass_d(pass_data_ipa_profile, ctxt,
1642 ipa_profile_generate_summary, /* generate_summary */
1643 ipa_profile_write_summary, /* write_summary */
1644 ipa_profile_read_summary, /* read_summary */
1645 NULL, /* write_optimization_summary */
1646 NULL, /* read_optimization_summary */
1647 NULL, /* stmt_fixup */
1648 0, /* function_transform_todo_flags_start */
1649 NULL, /* function_transform */
1650 NULL) /* variable_transform */
1651 {}
1652
1653 /* opt_pass methods: */
1654 bool gate () { return gate_ipa_profile (); }
1655 unsigned int execute () { return ipa_profile (); }
1656
1657 }; // class pass_ipa_profile
1658
1659 } // anon namespace
1660
1661 ipa_opt_pass_d *
1662 make_pass_ipa_profile (gcc::context *ctxt)
1663 {
1664 return new pass_ipa_profile (ctxt);
1665 }
1666
1667 /* Generate and emit a static constructor or destructor. WHICH must
1668 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1669 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1670 initialization priority for this constructor or destructor.
1671
1672 FINAL specify whether the externally visible name for collect2 should
1673 be produced. */
1674
1675 static void
1676 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
1677 {
1678 static int counter = 0;
1679 char which_buf[16];
1680 tree decl, name, resdecl;
1681
1682 /* The priority is encoded in the constructor or destructor name.
1683 collect2 will sort the names and arrange that they are called at
1684 program startup. */
1685 if (final)
1686 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1687 else
1688 /* Proudce sane name but one not recognizable by collect2, just for the
1689 case we fail to inline the function. */
1690 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
1691 name = get_file_function_name (which_buf);
1692
1693 decl = build_decl (input_location, FUNCTION_DECL, name,
1694 build_function_type_list (void_type_node, NULL_TREE));
1695 current_function_decl = decl;
1696
1697 resdecl = build_decl (input_location,
1698 RESULT_DECL, NULL_TREE, void_type_node);
1699 DECL_ARTIFICIAL (resdecl) = 1;
1700 DECL_RESULT (decl) = resdecl;
1701 DECL_CONTEXT (resdecl) = decl;
1702
1703 allocate_struct_function (decl, false);
1704
1705 TREE_STATIC (decl) = 1;
1706 TREE_USED (decl) = 1;
1707 DECL_ARTIFICIAL (decl) = 1;
1708 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1709 DECL_SAVED_TREE (decl) = body;
1710 if (!targetm.have_ctors_dtors && final)
1711 {
1712 TREE_PUBLIC (decl) = 1;
1713 DECL_PRESERVE_P (decl) = 1;
1714 }
1715 DECL_UNINLINABLE (decl) = 1;
1716
1717 DECL_INITIAL (decl) = make_node (BLOCK);
1718 TREE_USED (DECL_INITIAL (decl)) = 1;
1719
1720 DECL_SOURCE_LOCATION (decl) = input_location;
1721 cfun->function_end_locus = input_location;
1722
1723 switch (which)
1724 {
1725 case 'I':
1726 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1727 decl_init_priority_insert (decl, priority);
1728 break;
1729 case 'D':
1730 DECL_STATIC_DESTRUCTOR (decl) = 1;
1731 decl_fini_priority_insert (decl, priority);
1732 break;
1733 default:
1734 gcc_unreachable ();
1735 }
1736
1737 gimplify_function_tree (decl);
1738
1739 cgraph_add_new_function (decl, false);
1740
1741 set_cfun (NULL);
1742 current_function_decl = NULL;
1743 }
1744
1745 /* Generate and emit a static constructor or destructor. WHICH must
1746 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1747 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1748 initialization priority for this constructor or destructor. */
1749
1750 void
1751 cgraph_build_static_cdtor (char which, tree body, int priority)
1752 {
1753 cgraph_build_static_cdtor_1 (which, body, priority, false);
1754 }
1755
1756 /* A vector of FUNCTION_DECLs declared as static constructors. */
1757 static vec<tree> static_ctors;
1758 /* A vector of FUNCTION_DECLs declared as static destructors. */
1759 static vec<tree> static_dtors;
1760
1761 /* When target does not have ctors and dtors, we call all constructor
1762 and destructor by special initialization/destruction function
1763 recognized by collect2.
1764
1765 When we are going to build this function, collect all constructors and
1766 destructors and turn them into normal functions. */
1767
1768 static void
1769 record_cdtor_fn (struct cgraph_node *node)
1770 {
1771 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
1772 static_ctors.safe_push (node->symbol.decl);
1773 if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1774 static_dtors.safe_push (node->symbol.decl);
1775 node = cgraph_get_node (node->symbol.decl);
1776 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) = 1;
1777 }
1778
1779 /* Define global constructors/destructor functions for the CDTORS, of
1780 which they are LEN. The CDTORS are sorted by initialization
1781 priority. If CTOR_P is true, these are constructors; otherwise,
1782 they are destructors. */
1783
1784 static void
1785 build_cdtor (bool ctor_p, vec<tree> cdtors)
1786 {
1787 size_t i,j;
1788 size_t len = cdtors.length ();
1789
1790 i = 0;
1791 while (i < len)
1792 {
1793 tree body;
1794 tree fn;
1795 priority_type priority;
1796
1797 priority = 0;
1798 body = NULL_TREE;
1799 j = i;
1800 do
1801 {
1802 priority_type p;
1803 fn = cdtors[j];
1804 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1805 if (j == i)
1806 priority = p;
1807 else if (p != priority)
1808 break;
1809 j++;
1810 }
1811 while (j < len);
1812
1813 /* When there is only one cdtor and target supports them, do nothing. */
1814 if (j == i + 1
1815 && targetm.have_ctors_dtors)
1816 {
1817 i++;
1818 continue;
1819 }
1820 /* Find the next batch of constructors/destructors with the same
1821 initialization priority. */
1822 for (;i < j; i++)
1823 {
1824 tree call;
1825 fn = cdtors[i];
1826 call = build_call_expr (fn, 0);
1827 if (ctor_p)
1828 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1829 else
1830 DECL_STATIC_DESTRUCTOR (fn) = 0;
1831 /* We do not want to optimize away pure/const calls here.
1832 When optimizing, these should be already removed, when not
1833 optimizing, we want user to be able to breakpoint in them. */
1834 TREE_SIDE_EFFECTS (call) = 1;
1835 append_to_statement_list (call, &body);
1836 }
1837 gcc_assert (body != NULL_TREE);
1838 /* Generate a function to call all the function of like
1839 priority. */
1840 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1841 }
1842 }
1843
1844 /* Comparison function for qsort. P1 and P2 are actually of type
1845 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1846 used to determine the sort order. */
1847
1848 static int
1849 compare_ctor (const void *p1, const void *p2)
1850 {
1851 tree f1;
1852 tree f2;
1853 int priority1;
1854 int priority2;
1855
1856 f1 = *(const tree *)p1;
1857 f2 = *(const tree *)p2;
1858 priority1 = DECL_INIT_PRIORITY (f1);
1859 priority2 = DECL_INIT_PRIORITY (f2);
1860
1861 if (priority1 < priority2)
1862 return -1;
1863 else if (priority1 > priority2)
1864 return 1;
1865 else
1866 /* Ensure a stable sort. Constructors are executed in backwarding
1867 order to make LTO initialize braries first. */
1868 return DECL_UID (f2) - DECL_UID (f1);
1869 }
1870
1871 /* Comparison function for qsort. P1 and P2 are actually of type
1872 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1873 used to determine the sort order. */
1874
1875 static int
1876 compare_dtor (const void *p1, const void *p2)
1877 {
1878 tree f1;
1879 tree f2;
1880 int priority1;
1881 int priority2;
1882
1883 f1 = *(const tree *)p1;
1884 f2 = *(const tree *)p2;
1885 priority1 = DECL_FINI_PRIORITY (f1);
1886 priority2 = DECL_FINI_PRIORITY (f2);
1887
1888 if (priority1 < priority2)
1889 return -1;
1890 else if (priority1 > priority2)
1891 return 1;
1892 else
1893 /* Ensure a stable sort. */
1894 return DECL_UID (f1) - DECL_UID (f2);
1895 }
1896
1897 /* Generate functions to call static constructors and destructors
1898 for targets that do not support .ctors/.dtors sections. These
1899 functions have magic names which are detected by collect2. */
1900
1901 static void
1902 build_cdtor_fns (void)
1903 {
1904 if (!static_ctors.is_empty ())
1905 {
1906 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1907 static_ctors.qsort (compare_ctor);
1908 build_cdtor (/*ctor_p=*/true, static_ctors);
1909 }
1910
1911 if (!static_dtors.is_empty ())
1912 {
1913 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1914 static_dtors.qsort (compare_dtor);
1915 build_cdtor (/*ctor_p=*/false, static_dtors);
1916 }
1917 }
1918
1919 /* Look for constructors and destructors and produce function calling them.
1920 This is needed for targets not supporting ctors or dtors, but we perform the
1921 transformation also at linktime to merge possibly numerous
1922 constructors/destructors into single function to improve code locality and
1923 reduce size. */
1924
1925 static unsigned int
1926 ipa_cdtor_merge (void)
1927 {
1928 struct cgraph_node *node;
1929 FOR_EACH_DEFINED_FUNCTION (node)
1930 if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl)
1931 || DECL_STATIC_DESTRUCTOR (node->symbol.decl))
1932 record_cdtor_fn (node);
1933 build_cdtor_fns ();
1934 static_ctors.release ();
1935 static_dtors.release ();
1936 return 0;
1937 }
1938
1939 /* Perform the pass when we have no ctors/dtors support
1940 or at LTO time to merge multiple constructors into single
1941 function. */
1942
1943 static bool
1944 gate_ipa_cdtor_merge (void)
1945 {
1946 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1947 }
1948
1949 namespace {
1950
1951 const pass_data pass_data_ipa_cdtor_merge =
1952 {
1953 IPA_PASS, /* type */
1954 "cdtor", /* name */
1955 OPTGROUP_NONE, /* optinfo_flags */
1956 true, /* has_gate */
1957 true, /* has_execute */
1958 TV_CGRAPHOPT, /* tv_id */
1959 0, /* properties_required */
1960 0, /* properties_provided */
1961 0, /* properties_destroyed */
1962 0, /* todo_flags_start */
1963 0, /* todo_flags_finish */
1964 };
1965
1966 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1967 {
1968 public:
1969 pass_ipa_cdtor_merge(gcc::context *ctxt)
1970 : ipa_opt_pass_d(pass_data_ipa_cdtor_merge, ctxt,
1971 NULL, /* generate_summary */
1972 NULL, /* write_summary */
1973 NULL, /* read_summary */
1974 NULL, /* write_optimization_summary */
1975 NULL, /* read_optimization_summary */
1976 NULL, /* stmt_fixup */
1977 0, /* function_transform_todo_flags_start */
1978 NULL, /* function_transform */
1979 NULL) /* variable_transform */
1980 {}
1981
1982 /* opt_pass methods: */
1983 bool gate () { return gate_ipa_cdtor_merge (); }
1984 unsigned int execute () { return ipa_cdtor_merge (); }
1985
1986 }; // class pass_ipa_cdtor_merge
1987
1988 } // anon namespace
1989
1990 ipa_opt_pass_d *
1991 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1992 {
1993 return new pass_ipa_cdtor_merge (ctxt);
1994 }