1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
31 #include "pointer-set.h"
33 #include "tree-iterator.h"
35 /* Fill array order with all nodes with output flag set in the reverse
39 cgraph_postorder (struct cgraph_node
**order
)
41 struct cgraph_node
*node
, *node2
;
44 struct cgraph_edge
*edge
, last
;
47 struct cgraph_node
**stack
=
48 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node
= cgraph_nodes
; node
; node
= node
->next
)
56 for (pass
= 0; pass
< 2; pass
++)
57 for (node
= cgraph_nodes
; node
; node
= node
->next
)
60 || (!cgraph_only_called_directly_p (node
)
61 && !node
->address_taken
)))
67 node
->aux
= node
->callers
;
70 while (node2
->aux
!= &last
)
72 edge
= (struct cgraph_edge
*) node2
->aux
;
73 if (edge
->next_caller
)
74 node2
->aux
= edge
->next_caller
;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge
->caller
->local
.disregard_inline_limits
81 && !edge
->callee
->local
.disregard_inline_limits
)
83 if (!edge
->caller
->aux
)
85 if (!edge
->caller
->callers
)
86 edge
->caller
->aux
= &last
;
88 edge
->caller
->aux
= edge
->caller
->callers
;
89 stack
[stack_size
++] = node2
;
94 if (node2
->aux
== &last
)
96 order
[order_pos
++] = node2
;
98 node2
= stack
[--stack_size
];
105 for (node
= cgraph_nodes
; node
; node
= node
->next
)
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
114 update_inlined_to_pointer (struct cgraph_node
*node
, struct cgraph_node
*inlined_to
)
116 struct cgraph_edge
*e
;
117 for (e
= node
->callees
; e
; e
= e
->next_callee
)
118 if (e
->callee
->global
.inlined_to
)
120 e
->callee
->global
.inlined_to
= inlined_to
;
121 update_inlined_to_pointer (e
->callee
, inlined_to
);
125 /* Add cgraph NODE to queue starting at FIRST.
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
134 enqueue_cgraph_node (struct cgraph_node
*node
, struct cgraph_node
**first
)
136 /* Node is still in queue; do nothing. */
137 if (node
->aux
&& node
->aux
!= (void *) 2)
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node
->aux
== (void *)2 && !node
->reachable
)
147 /* Add varpool NODE to queue starting at FIRST. */
150 enqueue_varpool_node (struct varpool_node
*node
, struct varpool_node
**first
)
156 /* Process references. */
159 process_references (struct ipa_ref_list
*list
,
160 struct cgraph_node
**first
,
161 struct varpool_node
**first_varpool
,
162 bool before_inlining_p
)
166 for (i
= 0; ipa_ref_list_reference_iterate (list
, i
, ref
); i
++)
168 if (ref
->refered_type
== IPA_REF_CGRAPH
)
170 struct cgraph_node
*node
= ipa_ref_node (ref
);
172 && (!DECL_EXTERNAL (node
->decl
)
173 || before_inlining_p
))
175 node
->reachable
= true;
176 enqueue_cgraph_node (node
, first
);
181 struct varpool_node
*node
= ipa_ref_varpool_node (ref
);
184 varpool_mark_needed_node (node
);
185 enqueue_varpool_node (node
, first_varpool
);
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
195 varpool_can_remove_if_no_refs (struct varpool_node
*node
)
197 return (!node
->force_output
&& !node
->used_from_other_partition
198 && (DECL_COMDAT (node
->decl
) || !node
->externally_visible
));
201 /* Return true when function can be marked local. */
204 cgraph_local_node_p (struct cgraph_node
*node
)
206 return (cgraph_only_called_directly_p (node
)
208 && !DECL_EXTERNAL (node
->decl
)
209 && !node
->local
.externally_visible
210 && !node
->reachable_from_other_partition
211 && !node
->in_other_partition
);
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
220 cgraph_remove_unreachable_nodes (bool before_inlining_p
, FILE *file
)
222 struct cgraph_node
*first
= (struct cgraph_node
*) (void *) 1;
223 struct varpool_node
*first_varpool
= (struct varpool_node
*) (void *) 1;
224 struct cgraph_node
*node
, *next
;
225 struct varpool_node
*vnode
, *vnext
;
226 bool changed
= false;
228 #ifdef ENABLE_CHECKING
232 fprintf (file
, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node
= cgraph_nodes
; node
; node
= node
->next
)
235 gcc_assert (!node
->aux
);
236 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
237 gcc_assert (!vnode
->aux
);
239 varpool_reset_queue ();
240 for (node
= cgraph_nodes
; node
; node
= node
->next
)
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node
)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node
->global
.inlined_to
245 && DECL_VIRTUAL_P (node
->decl
)
246 && (DECL_COMDAT (node
->decl
) || DECL_EXTERNAL (node
->decl
))))
247 && ((!DECL_EXTERNAL (node
->decl
))
248 || before_inlining_p
))
250 gcc_assert (!node
->global
.inlined_to
);
251 enqueue_cgraph_node (node
, &first
);
252 node
->reachable
= true;
256 gcc_assert (!node
->aux
);
257 node
->reachable
= false;
259 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
261 vnode
->next_needed
= NULL
;
262 vnode
->prev_needed
= NULL
;
263 if (!varpool_can_remove_if_no_refs (vnode
))
265 vnode
->needed
= false;
266 varpool_mark_needed_node (vnode
);
267 enqueue_varpool_node (vnode
, &first_varpool
);
270 vnode
->needed
= false;
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
280 while (first
!= (struct cgraph_node
*) (void *) 1
281 || first_varpool
!= (struct varpool_node
*) (void *) 1)
283 if (first
!= (struct cgraph_node
*) (void *) 1)
285 struct cgraph_edge
*e
;
287 first
= (struct cgraph_node
*) first
->aux
;
288 if (!node
->reachable
)
289 node
->aux
= (void *)2;
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
296 for (e
= node
->callees
; e
; e
= e
->next_callee
)
297 if (!e
->callee
->reachable
299 && (!e
->inline_failed
|| !e
->callee
->analyzed
300 || (!DECL_EXTERNAL (e
->callee
->decl
))
301 || before_inlining_p
))
303 e
->callee
->reachable
= true;
304 enqueue_cgraph_node (e
->callee
, &first
);
306 process_references (&node
->ref_list
, &first
, &first_varpool
, before_inlining_p
);
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
312 if (node
->same_comdat_group
314 && !node
->global
.inlined_to
)
316 for (next
= node
->same_comdat_group
;
318 next
= next
->same_comdat_group
)
319 if (!next
->reachable
)
321 next
->reachable
= true;
322 enqueue_cgraph_node (next
, &first
);
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
330 while (node
->clone_of
&& !node
->clone_of
->aux
331 && !gimple_has_body_p (node
->decl
))
333 bool noninline
= node
->clone_of
->decl
!= node
->decl
;
334 node
= node
->clone_of
;
335 if (noninline
&& !node
->reachable
&& !node
->aux
)
337 enqueue_cgraph_node (node
, &first
);
342 if (first_varpool
!= (struct varpool_node
*) (void *) 1)
344 vnode
= first_varpool
;
345 first_varpool
= (struct varpool_node
*)first_varpool
->aux
;
347 process_references (&vnode
->ref_list
, &first
, &first_varpool
, before_inlining_p
);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
351 if (vnode
->same_comdat_group
)
353 struct varpool_node
*next
;
354 for (next
= vnode
->same_comdat_group
;
356 next
= next
->same_comdat_group
)
359 varpool_mark_needed_node (next
);
360 enqueue_varpool_node (next
, &first_varpool
);
366 /* Remove unreachable nodes.
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node
= cgraph_nodes
; node
; node
= next
)
378 if (node
->aux
&& !node
->reachable
)
380 cgraph_node_remove_callees (node
);
381 ipa_remove_all_references (&node
->ref_list
);
382 node
->analyzed
= false;
383 node
->local
.inlinable
= false;
387 node
->global
.inlined_to
= NULL
;
389 fprintf (file
, " %s", cgraph_node_name (node
));
390 if (!node
->analyzed
|| !DECL_EXTERNAL (node
->decl
) || before_inlining_p
)
391 cgraph_remove_node (node
);
394 struct cgraph_edge
*e
;
396 /* See if there is reachable caller. */
397 for (e
= node
->callers
; e
; e
= e
->next_caller
)
398 if (e
->caller
->reachable
)
401 /* If so, we need to keep node in the callgraph. */
402 if (e
|| node
->needed
)
404 struct cgraph_node
*clone
;
406 /* If there are still clones, we must keep body around.
407 Otherwise we can just remove the body but keep the clone. */
408 for (clone
= node
->clones
; clone
;
409 clone
= clone
->next_sibling_clone
)
414 cgraph_release_function_body (node
);
415 node
->local
.inlinable
= false;
416 if (node
->prev_sibling_clone
)
417 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
418 else if (node
->clone_of
)
419 node
->clone_of
->clones
= node
->next_sibling_clone
;
420 if (node
->next_sibling_clone
)
421 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
422 #ifdef ENABLE_CHECKING
424 node
->former_clone_of
= node
->clone_of
->decl
;
426 node
->clone_of
= NULL
;
427 node
->next_sibling_clone
= NULL
;
428 node
->prev_sibling_clone
= NULL
;
431 gcc_assert (!clone
->in_other_partition
);
432 node
->analyzed
= false;
433 cgraph_node_remove_callees (node
);
434 ipa_remove_all_references (&node
->ref_list
);
437 cgraph_remove_node (node
);
442 for (node
= cgraph_nodes
; node
; node
= node
->next
)
444 /* Inline clones might be kept around so their materializing allows further
445 cloning. If the function the clone is inlined into is removed, we need
446 to turn it into normal cone. */
447 if (node
->global
.inlined_to
450 gcc_assert (node
->clones
);
451 node
->global
.inlined_to
= NULL
;
452 update_inlined_to_pointer (node
, node
);
458 fprintf (file
, "\n");
460 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
461 are undesirable at -O0 since we do not want to remove anything. */
466 fprintf (file
, "Reclaiming variables:");
467 for (vnode
= varpool_nodes
; vnode
; vnode
= vnext
)
473 fprintf (file
, " %s", varpool_node_name (vnode
));
474 varpool_remove_node (vnode
);
479 /* Now update address_taken flags and try to promote functions to be local. */
482 fprintf (file
, "\nClearing address taken flags:");
483 for (node
= cgraph_nodes
; node
; node
= node
->next
)
484 if (node
->address_taken
485 && !node
->reachable_from_other_partition
)
490 for (i
= 0; ipa_ref_list_refering_iterate (&node
->ref_list
, i
, ref
)
493 gcc_assert (ref
->use
== IPA_REF_ADDR
);
499 fprintf (file
, " %s", cgraph_node_name (node
));
500 node
->address_taken
= false;
502 if (cgraph_local_node_p (node
))
504 node
->local
.local
= true;
506 fprintf (file
, " (local)");
511 #ifdef ENABLE_CHECKING
515 /* Reclaim alias pairs for functions that have disappeared from the
517 remove_unreachable_alias_pairs ();
522 /* Discover variables that have no longer address taken or that are read only
523 and update their flags.
525 FIXME: This can not be done in between gimplify and omp_expand since
526 readonly flag plays role on what is shared and what is not. Currently we do
527 this transformation as part of whole program visibility and re-do at
528 ipa-reference pass (to take into account clonning), but it would
529 make sense to do it before early optimizations. */
532 ipa_discover_readonly_nonaddressable_vars (void)
534 struct varpool_node
*vnode
;
536 fprintf (dump_file
, "Clearing variable flags:");
537 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
538 if (vnode
->finalized
&& varpool_all_refs_explicit_p (vnode
)
539 && (TREE_ADDRESSABLE (vnode
->decl
) || !TREE_READONLY (vnode
->decl
)))
541 bool written
= false;
542 bool address_taken
= false;
545 for (i
= 0; ipa_ref_list_refering_iterate (&vnode
->ref_list
, i
, ref
)
546 && (!written
|| !address_taken
); i
++)
550 address_taken
= true;
558 if (TREE_ADDRESSABLE (vnode
->decl
) && !address_taken
)
561 fprintf (dump_file
, " %s (addressable)", varpool_node_name (vnode
));
562 TREE_ADDRESSABLE (vnode
->decl
) = 0;
564 if (!TREE_READONLY (vnode
->decl
) && !address_taken
&& !written
565 /* Making variable in explicit section readonly can cause section
567 See e.g. gcc.c-torture/compile/pr23237.c */
568 && DECL_SECTION_NAME (vnode
->decl
) == NULL
)
571 fprintf (dump_file
, " %s (read-only)", varpool_node_name (vnode
));
572 TREE_READONLY (vnode
->decl
) = 1;
576 fprintf (dump_file
, "\n");
579 /* Return true when function NODE should be considered externally visible. */
582 cgraph_externally_visible_p (struct cgraph_node
*node
, bool whole_program
, bool aliased
)
584 if (!node
->local
.finalized
)
586 if (!DECL_COMDAT (node
->decl
)
587 && (!TREE_PUBLIC (node
->decl
) || DECL_EXTERNAL (node
->decl
)))
590 /* Do not even try to be smart about aliased nodes. Until we properly
591 represent everything by same body alias, these are just evil. */
595 /* If linker counts on us, we must preserve the function. */
596 if (cgraph_used_from_object_file_p (node
))
598 /* When doing link time optimizations, hidden symbols become local. */
600 && (DECL_VISIBILITY (node
->decl
) == VISIBILITY_HIDDEN
601 || DECL_VISIBILITY (node
->decl
) == VISIBILITY_INTERNAL
)
602 /* Be sure that node is defined in IR file, not in other object
603 file. In that case we don't set used_from_other_object_file. */
606 else if (!whole_program
)
608 /* COMDAT functions must be shared only if they have address taken,
609 otherwise we can produce our own private implementation with
611 else if (DECL_COMDAT (node
->decl
))
613 if (node
->address_taken
|| !node
->analyzed
)
615 if (node
->same_comdat_group
)
617 struct cgraph_node
*next
;
619 /* If more than one function is in the same COMDAT group, it must
620 be shared even if just one function in the comdat group has
622 for (next
= node
->same_comdat_group
;
624 next
= next
->same_comdat_group
)
625 if (next
->address_taken
|| !next
->analyzed
)
629 if (DECL_PRESERVE_P (node
->decl
))
631 if (MAIN_NAME_P (DECL_NAME (node
->decl
)))
633 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node
->decl
)))
638 /* Dissolve the same_comdat_group list in which NODE resides. */
641 dissolve_same_comdat_group_list (struct cgraph_node
*node
)
643 struct cgraph_node
*n
= node
, *next
;
646 next
= n
->same_comdat_group
;
647 n
->same_comdat_group
= NULL
;
653 /* Mark visibility of all functions.
655 A local function is one whose calls can occur only in the current
656 compilation unit and all its calls are explicit, so we can change
657 its calling convention. We simply mark all static functions whose
658 address is not taken as local.
660 We also change the TREE_PUBLIC flag of all declarations that are public
661 in language point of view but we want to overwrite this default
662 via visibilities for the backend point of view. */
665 function_and_variable_visibility (bool whole_program
)
667 struct cgraph_node
*node
;
668 struct varpool_node
*vnode
;
669 struct pointer_set_t
*aliased_nodes
= pointer_set_create ();
670 struct pointer_set_t
*aliased_vnodes
= pointer_set_create ();
674 /* Discover aliased nodes. */
675 FOR_EACH_VEC_ELT (alias_pair
, alias_pairs
, i
, p
)
678 fprintf (dump_file
, "Alias %s->%s",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p
->decl
)),
680 IDENTIFIER_POINTER (p
->target
));
682 if ((node
= cgraph_node_for_asm (p
->target
)) != NULL
)
684 gcc_assert (node
->needed
);
685 pointer_set_insert (aliased_nodes
, node
);
687 fprintf (dump_file
, " node %s/%i",
688 cgraph_node_name (node
), node
->uid
);
690 else if ((vnode
= varpool_node_for_asm (p
->target
)) != NULL
)
692 gcc_assert (vnode
->needed
);
693 pointer_set_insert (aliased_vnodes
, vnode
);
695 fprintf (dump_file
, " varpool node %s",
696 varpool_node_name (vnode
));
699 fprintf (dump_file
, "\n");
702 for (node
= cgraph_nodes
; node
; node
= node
->next
)
704 /* C++ FE on lack of COMDAT support create local COMDAT functions
705 (that ought to be shared but can not due to object format
706 limitations). It is neccesary to keep the flag to make rest of C++ FE
707 happy. Clear the flag here to avoid confusion in middle-end. */
708 if (DECL_COMDAT (node
->decl
) && !TREE_PUBLIC (node
->decl
))
709 DECL_COMDAT (node
->decl
) = 0;
710 /* For external decls stop tracking same_comdat_group, it doesn't matter
711 what comdat group they are in when they won't be emitted in this TU,
712 and simplifies later passes. */
713 if (node
->same_comdat_group
&& DECL_EXTERNAL (node
->decl
))
715 #ifdef ENABLE_CHECKING
716 struct cgraph_node
*n
;
718 for (n
= node
->same_comdat_group
;
720 n
= n
->same_comdat_group
)
721 /* If at least one of same comdat group functions is external,
722 all of them have to be, otherwise it is a front-end bug. */
723 gcc_assert (DECL_EXTERNAL (n
->decl
));
725 dissolve_same_comdat_group_list (node
);
727 gcc_assert ((!DECL_WEAK (node
->decl
) && !DECL_COMDAT (node
->decl
))
728 || TREE_PUBLIC (node
->decl
) || DECL_EXTERNAL (node
->decl
));
729 if (cgraph_externally_visible_p (node
, whole_program
,
730 pointer_set_contains (aliased_nodes
,
733 gcc_assert (!node
->global
.inlined_to
);
734 node
->local
.externally_visible
= true;
737 node
->local
.externally_visible
= false;
738 if (!node
->local
.externally_visible
&& node
->analyzed
739 && !DECL_EXTERNAL (node
->decl
))
741 struct cgraph_node
*alias
;
742 gcc_assert (whole_program
|| in_lto_p
|| !TREE_PUBLIC (node
->decl
));
743 cgraph_make_decl_local (node
->decl
);
744 node
->resolution
= LDPR_PREVAILING_DEF_IRONLY
;
745 for (alias
= node
->same_body
; alias
; alias
= alias
->next
)
746 cgraph_make_decl_local (alias
->decl
);
747 if (node
->same_comdat_group
)
748 /* cgraph_externally_visible_p has already checked all other nodes
749 in the group and they will all be made local. We need to
750 dissolve the group at once so that the predicate does not
752 dissolve_same_comdat_group_list (node
);
754 node
->local
.local
= cgraph_local_node_p (node
);
756 for (vnode
= varpool_nodes
; vnode
; vnode
= vnode
->next
)
758 /* weak flag makes no sense on local variables. */
759 gcc_assert (!DECL_WEAK (vnode
->decl
)
760 || TREE_PUBLIC (vnode
->decl
) || DECL_EXTERNAL (vnode
->decl
));
761 /* In several cases declarations can not be common:
763 - when declaration has initializer
765 - when it has specific section
766 - when it resides in non-generic address space.
767 - if declaration is local, it will get into .local common section
768 so common flag is not needed. Frontends still produce these in
769 certain cases, such as for:
771 static int a __attribute__ ((common))
773 Canonicalize things here and clear the redundant flag. */
774 if (DECL_COMMON (vnode
->decl
)
775 && (!(TREE_PUBLIC (vnode
->decl
) || DECL_EXTERNAL (vnode
->decl
))
776 || (DECL_INITIAL (vnode
->decl
)
777 && DECL_INITIAL (vnode
->decl
) != error_mark_node
)
778 || DECL_WEAK (vnode
->decl
)
779 || DECL_SECTION_NAME (vnode
->decl
) != NULL
780 || ! (ADDR_SPACE_GENERIC_P
781 (TYPE_ADDR_SPACE (TREE_TYPE (vnode
->decl
))))))
782 DECL_COMMON (vnode
->decl
) = 0;
784 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
786 if (!vnode
->finalized
)
789 && (DECL_COMDAT (vnode
->decl
) || TREE_PUBLIC (vnode
->decl
))
791 /* We can privatize comdat readonly variables whose address is
792 not taken, but doing so is not going to bring us
793 optimization oppurtunities until we start reordering
795 || DECL_COMDAT (vnode
->decl
)
796 || DECL_WEAK (vnode
->decl
))
797 /* When doing linktime optimizations, all hidden symbols will
800 || (DECL_VISIBILITY (vnode
->decl
) != VISIBILITY_HIDDEN
801 && DECL_VISIBILITY (vnode
->decl
) != VISIBILITY_INTERNAL
)
802 /* We can get prevailing decision in other object file.
803 In this case we do not sed used_from_object_file. */
804 || !vnode
->finalized
))
805 || DECL_PRESERVE_P (vnode
->decl
)
806 || varpool_used_from_object_file_p (vnode
)
807 || pointer_set_contains (aliased_vnodes
, vnode
)
808 || lookup_attribute ("externally_visible",
809 DECL_ATTRIBUTES (vnode
->decl
))))
810 vnode
->externally_visible
= true;
812 vnode
->externally_visible
= false;
813 if (!vnode
->externally_visible
)
815 gcc_assert (in_lto_p
|| whole_program
|| !TREE_PUBLIC (vnode
->decl
));
816 cgraph_make_decl_local (vnode
->decl
);
817 vnode
->resolution
= LDPR_PREVAILING_DEF_IRONLY
;
819 gcc_assert (TREE_STATIC (vnode
->decl
));
821 pointer_set_destroy (aliased_nodes
);
822 pointer_set_destroy (aliased_vnodes
);
826 fprintf (dump_file
, "\nMarking local functions:");
827 for (node
= cgraph_nodes
; node
; node
= node
->next
)
828 if (node
->local
.local
)
829 fprintf (dump_file
, " %s", cgraph_node_name (node
));
830 fprintf (dump_file
, "\n\n");
831 fprintf (dump_file
, "\nMarking externally visible functions:");
832 for (node
= cgraph_nodes
; node
; node
= node
->next
)
833 if (node
->local
.externally_visible
)
834 fprintf (dump_file
, " %s", cgraph_node_name (node
));
835 fprintf (dump_file
, "\n\n");
836 fprintf (dump_file
, "\nMarking externally visible variables:");
837 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
838 if (vnode
->externally_visible
)
839 fprintf (dump_file
, " %s", varpool_node_name (vnode
));
840 fprintf (dump_file
, "\n\n");
842 cgraph_function_flags_ready
= true;
846 /* Local function pass handling visibilities. This happens before LTO streaming
847 so in particular -fwhole-program should be ignored at this level. */
850 local_function_and_variable_visibility (void)
852 return function_and_variable_visibility (flag_whole_program
&& !flag_lto
&& !flag_whopr
);
855 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility
=
859 "visibility", /* name */
861 local_function_and_variable_visibility
,/* execute */
864 0, /* static_pass_number */
865 TV_CGRAPHOPT
, /* tv_id */
866 0, /* properties_required */
867 0, /* properties_provided */
868 0, /* properties_destroyed */
869 0, /* todo_flags_start */
870 TODO_remove_functions
| TODO_dump_cgraph
871 | TODO_ggc_collect
/* todo_flags_finish */
875 /* Do not re-run on ltrans stage. */
878 gate_whole_program_function_and_variable_visibility (void)
883 /* Bring functionss local at LTO time whith -fwhole-program. */
886 whole_program_function_and_variable_visibility (void)
888 struct cgraph_node
*node
;
889 struct varpool_node
*vnode
;
891 function_and_variable_visibility (flag_whole_program
);
893 for (node
= cgraph_nodes
; node
; node
= node
->next
)
894 if ((node
->local
.externally_visible
&& !DECL_COMDAT (node
->decl
))
895 && node
->local
.finalized
)
896 cgraph_mark_needed_node (node
);
897 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
898 if (vnode
->externally_visible
&& !DECL_COMDAT (vnode
->decl
))
899 varpool_mark_needed_node (vnode
);
902 fprintf (dump_file
, "\nNeeded variables:");
903 for (vnode
= varpool_nodes_queue
; vnode
; vnode
= vnode
->next_needed
)
905 fprintf (dump_file
, " %s", varpool_node_name (vnode
));
906 fprintf (dump_file
, "\n\n");
909 ipa_discover_readonly_nonaddressable_vars ();
913 struct ipa_opt_pass_d pass_ipa_whole_program_visibility
=
917 "whole-program", /* name */
918 gate_whole_program_function_and_variable_visibility
,/* gate */
919 whole_program_function_and_variable_visibility
,/* execute */
922 0, /* static_pass_number */
923 TV_CGRAPHOPT
, /* tv_id */
924 0, /* properties_required */
925 0, /* properties_provided */
926 0, /* properties_destroyed */
927 0, /* todo_flags_start */
928 TODO_remove_functions
| TODO_dump_cgraph
929 | TODO_ggc_collect
/* todo_flags_finish */
931 NULL
, /* generate_summary */
932 NULL
, /* write_summary */
933 NULL
, /* read_summary */
934 NULL
, /* write_optimization_summary */
935 NULL
, /* read_optimization_summary */
936 NULL
, /* stmt_fixup */
938 NULL
, /* function_transform */
939 NULL
, /* variable_transform */
942 /* Hash a cgraph node set element. */
945 hash_cgraph_node_set_element (const void *p
)
947 const_cgraph_node_set_element element
= (const_cgraph_node_set_element
) p
;
948 return htab_hash_pointer (element
->node
);
951 /* Compare two cgraph node set elements. */
954 eq_cgraph_node_set_element (const void *p1
, const void *p2
)
956 const_cgraph_node_set_element e1
= (const_cgraph_node_set_element
) p1
;
957 const_cgraph_node_set_element e2
= (const_cgraph_node_set_element
) p2
;
959 return e1
->node
== e2
->node
;
962 /* Create a new cgraph node set. */
965 cgraph_node_set_new (void)
967 cgraph_node_set new_node_set
;
969 new_node_set
= ggc_alloc_cgraph_node_set_def ();
970 new_node_set
->hashtab
= htab_create_ggc (10,
971 hash_cgraph_node_set_element
,
972 eq_cgraph_node_set_element
,
974 new_node_set
->nodes
= NULL
;
978 /* Add cgraph_node NODE to cgraph_node_set SET. */
981 cgraph_node_set_add (cgraph_node_set set
, struct cgraph_node
*node
)
984 cgraph_node_set_element element
;
985 struct cgraph_node_set_element_def dummy
;
988 slot
= htab_find_slot (set
->hashtab
, &dummy
, INSERT
);
990 if (*slot
!= HTAB_EMPTY_ENTRY
)
992 element
= (cgraph_node_set_element
) *slot
;
993 gcc_assert (node
== element
->node
994 && (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
999 /* Insert node into hash table. */
1000 element
= ggc_alloc_cgraph_node_set_element_def ();
1001 element
->node
= node
;
1002 element
->index
= VEC_length (cgraph_node_ptr
, set
->nodes
);
1005 /* Insert into node vector. */
1006 VEC_safe_push (cgraph_node_ptr
, gc
, set
->nodes
, node
);
1009 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1012 cgraph_node_set_remove (cgraph_node_set set
, struct cgraph_node
*node
)
1014 void **slot
, **last_slot
;
1015 cgraph_node_set_element element
, last_element
;
1016 struct cgraph_node
*last_node
;
1017 struct cgraph_node_set_element_def dummy
;
1020 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1024 element
= (cgraph_node_set_element
) *slot
;
1025 gcc_assert (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
1028 /* Remove from vector. We do this by swapping node with the last element
1030 last_node
= VEC_pop (cgraph_node_ptr
, set
->nodes
);
1031 if (last_node
!= node
)
1033 dummy
.node
= last_node
;
1034 last_slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1035 last_element
= (cgraph_node_set_element
) *last_slot
;
1036 gcc_assert (last_element
);
1038 /* Move the last element to the original spot of NODE. */
1039 last_element
->index
= element
->index
;
1040 VEC_replace (cgraph_node_ptr
, set
->nodes
, last_element
->index
,
1044 /* Remove element from hash table. */
1045 htab_clear_slot (set
->hashtab
, slot
);
1049 /* Find NODE in SET and return an iterator to it if found. A null iterator
1050 is returned if NODE is not in SET. */
1052 cgraph_node_set_iterator
1053 cgraph_node_set_find (cgraph_node_set set
, struct cgraph_node
*node
)
1056 struct cgraph_node_set_element_def dummy
;
1057 cgraph_node_set_element element
;
1058 cgraph_node_set_iterator csi
;
1061 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1063 csi
.index
= (unsigned) ~0;
1066 element
= (cgraph_node_set_element
) *slot
;
1067 gcc_assert (VEC_index (cgraph_node_ptr
, set
->nodes
, element
->index
)
1069 csi
.index
= element
->index
;
1076 /* Dump content of SET to file F. */
1079 dump_cgraph_node_set (FILE *f
, cgraph_node_set set
)
1081 cgraph_node_set_iterator iter
;
1083 for (iter
= csi_start (set
); !csi_end_p (iter
); csi_next (&iter
))
1085 struct cgraph_node
*node
= csi_node (iter
);
1086 fprintf (f
, " %s/%i", cgraph_node_name (node
), node
->uid
);
1091 /* Dump content of SET to stderr. */
1094 debug_cgraph_node_set (cgraph_node_set set
)
1096 dump_cgraph_node_set (stderr
, set
);
1099 /* Hash a varpool node set element. */
1102 hash_varpool_node_set_element (const void *p
)
1104 const_varpool_node_set_element element
= (const_varpool_node_set_element
) p
;
1105 return htab_hash_pointer (element
->node
);
1108 /* Compare two varpool node set elements. */
1111 eq_varpool_node_set_element (const void *p1
, const void *p2
)
1113 const_varpool_node_set_element e1
= (const_varpool_node_set_element
) p1
;
1114 const_varpool_node_set_element e2
= (const_varpool_node_set_element
) p2
;
1116 return e1
->node
== e2
->node
;
1119 /* Create a new varpool node set. */
1122 varpool_node_set_new (void)
1124 varpool_node_set new_node_set
;
1126 new_node_set
= ggc_alloc_varpool_node_set_def ();
1127 new_node_set
->hashtab
= htab_create_ggc (10,
1128 hash_varpool_node_set_element
,
1129 eq_varpool_node_set_element
,
1131 new_node_set
->nodes
= NULL
;
1132 return new_node_set
;
1135 /* Add varpool_node NODE to varpool_node_set SET. */
1138 varpool_node_set_add (varpool_node_set set
, struct varpool_node
*node
)
1141 varpool_node_set_element element
;
1142 struct varpool_node_set_element_def dummy
;
1145 slot
= htab_find_slot (set
->hashtab
, &dummy
, INSERT
);
1147 if (*slot
!= HTAB_EMPTY_ENTRY
)
1149 element
= (varpool_node_set_element
) *slot
;
1150 gcc_assert (node
== element
->node
1151 && (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1156 /* Insert node into hash table. */
1157 element
= ggc_alloc_varpool_node_set_element_def ();
1158 element
->node
= node
;
1159 element
->index
= VEC_length (varpool_node_ptr
, set
->nodes
);
1162 /* Insert into node vector. */
1163 VEC_safe_push (varpool_node_ptr
, gc
, set
->nodes
, node
);
1166 /* Remove varpool_node NODE from varpool_node_set SET. */
1169 varpool_node_set_remove (varpool_node_set set
, struct varpool_node
*node
)
1171 void **slot
, **last_slot
;
1172 varpool_node_set_element element
, last_element
;
1173 struct varpool_node
*last_node
;
1174 struct varpool_node_set_element_def dummy
;
1177 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1181 element
= (varpool_node_set_element
) *slot
;
1182 gcc_assert (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1185 /* Remove from vector. We do this by swapping node with the last element
1187 last_node
= VEC_pop (varpool_node_ptr
, set
->nodes
);
1188 if (last_node
!= node
)
1190 dummy
.node
= last_node
;
1191 last_slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1192 last_element
= (varpool_node_set_element
) *last_slot
;
1193 gcc_assert (last_element
);
1195 /* Move the last element to the original spot of NODE. */
1196 last_element
->index
= element
->index
;
1197 VEC_replace (varpool_node_ptr
, set
->nodes
, last_element
->index
,
1201 /* Remove element from hash table. */
1202 htab_clear_slot (set
->hashtab
, slot
);
1206 /* Find NODE in SET and return an iterator to it if found. A null iterator
1207 is returned if NODE is not in SET. */
1209 varpool_node_set_iterator
1210 varpool_node_set_find (varpool_node_set set
, struct varpool_node
*node
)
1213 struct varpool_node_set_element_def dummy
;
1214 varpool_node_set_element element
;
1215 varpool_node_set_iterator vsi
;
1218 slot
= htab_find_slot (set
->hashtab
, &dummy
, NO_INSERT
);
1220 vsi
.index
= (unsigned) ~0;
1223 element
= (varpool_node_set_element
) *slot
;
1224 gcc_assert (VEC_index (varpool_node_ptr
, set
->nodes
, element
->index
)
1226 vsi
.index
= element
->index
;
1233 /* Dump content of SET to file F. */
1236 dump_varpool_node_set (FILE *f
, varpool_node_set set
)
1238 varpool_node_set_iterator iter
;
1240 for (iter
= vsi_start (set
); !vsi_end_p (iter
); vsi_next (&iter
))
1242 struct varpool_node
*node
= vsi_node (iter
);
1243 fprintf (f
, " %s", varpool_node_name (node
));
1248 /* Dump content of SET to stderr. */
1251 debug_varpool_node_set (varpool_node_set set
)
1253 dump_varpool_node_set (stderr
, set
);
1257 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1262 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1263 struct cgraph_edge
*e
;
1265 bool something_changed
= false;
1268 order_pos
= cgraph_postorder (order
);
1269 for (i
= order_pos
- 1; i
>= 0; i
--)
1271 if (order
[i
]->local
.local
&& cgraph_propagate_frequency (order
[i
]))
1273 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1274 if (e
->callee
->local
.local
&& !e
->callee
->aux
)
1276 something_changed
= true;
1277 e
->callee
->aux
= (void *)1;
1280 order
[i
]->aux
= NULL
;
1283 while (something_changed
)
1285 something_changed
= false;
1286 for (i
= order_pos
- 1; i
>= 0; i
--)
1288 if (order
[i
]->aux
&& cgraph_propagate_frequency (order
[i
]))
1290 for (e
= order
[i
]->callees
; e
; e
= e
->next_callee
)
1291 if (e
->callee
->local
.local
&& !e
->callee
->aux
)
1293 something_changed
= true;
1294 e
->callee
->aux
= (void *)1;
1297 order
[i
]->aux
= NULL
;
1305 gate_ipa_profile (void)
1307 return flag_ipa_profile
;
1310 struct ipa_opt_pass_d pass_ipa_profile
=
1314 "ipa-profile", /* name */
1315 gate_ipa_profile
, /* gate */
1316 ipa_profile
, /* execute */
1319 0, /* static_pass_number */
1320 TV_IPA_PROFILE
, /* tv_id */
1321 0, /* properties_required */
1322 0, /* properties_provided */
1323 0, /* properties_destroyed */
1324 0, /* todo_flags_start */
1325 0 /* todo_flags_finish */
1327 NULL
, /* generate_summary */
1328 NULL
, /* write_summary */
1329 NULL
, /* read_summary */
1330 NULL
, /* write_optimization_summary */
1331 NULL
, /* read_optimization_summary */
1332 NULL
, /* stmt_fixup */
1334 NULL
, /* function_transform */
1335 NULL
/* variable_transform */
1338 /* Generate and emit a static constructor or destructor. WHICH must
1339 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1340 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1341 initialization priority for this constructor or destructor. */
1344 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
1346 static int counter
= 0;
1348 tree decl
, name
, resdecl
;
1350 /* The priority is encoded in the constructor or destructor name.
1351 collect2 will sort the names and arrange that they are called at
1353 sprintf (which_buf
, "%c_%.5d_%d", which
, priority
, counter
++);
1354 name
= get_file_function_name (which_buf
);
1356 decl
= build_decl (input_location
, FUNCTION_DECL
, name
,
1357 build_function_type_list (void_type_node
, NULL_TREE
));
1358 current_function_decl
= decl
;
1360 resdecl
= build_decl (input_location
,
1361 RESULT_DECL
, NULL_TREE
, void_type_node
);
1362 DECL_ARTIFICIAL (resdecl
) = 1;
1363 DECL_RESULT (decl
) = resdecl
;
1364 DECL_CONTEXT (resdecl
) = decl
;
1366 allocate_struct_function (decl
, false);
1368 TREE_STATIC (decl
) = 1;
1369 TREE_USED (decl
) = 1;
1370 DECL_ARTIFICIAL (decl
) = 1;
1371 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
1372 DECL_SAVED_TREE (decl
) = body
;
1373 if (!targetm
.have_ctors_dtors
)
1375 TREE_PUBLIC (decl
) = 1;
1376 DECL_PRESERVE_P (decl
) = 1;
1378 DECL_UNINLINABLE (decl
) = 1;
1380 DECL_INITIAL (decl
) = make_node (BLOCK
);
1381 TREE_USED (DECL_INITIAL (decl
)) = 1;
1383 DECL_SOURCE_LOCATION (decl
) = input_location
;
1384 cfun
->function_end_locus
= input_location
;
1389 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1390 decl_init_priority_insert (decl
, priority
);
1393 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1394 decl_fini_priority_insert (decl
, priority
);
1400 gimplify_function_tree (decl
);
1402 cgraph_add_new_function (decl
, false);
1405 current_function_decl
= NULL
;
1409 /* A vector of FUNCTION_DECLs declared as static constructors. */
1410 static VEC(tree
, heap
) *static_ctors
;
1411 /* A vector of FUNCTION_DECLs declared as static destructors. */
1412 static VEC(tree
, heap
) *static_dtors
;
1414 /* When target does not have ctors and dtors, we call all constructor
1415 and destructor by special initialization/destruction function
1416 recognized by collect2.
1418 When we are going to build this function, collect all constructors and
1419 destructors and turn them into normal functions. */
1422 record_cdtor_fn (struct cgraph_node
*node
)
1424 if (DECL_STATIC_CONSTRUCTOR (node
->decl
))
1425 VEC_safe_push (tree
, heap
, static_ctors
, node
->decl
);
1426 if (DECL_STATIC_DESTRUCTOR (node
->decl
))
1427 VEC_safe_push (tree
, heap
, static_dtors
, node
->decl
);
1428 node
= cgraph_node (node
->decl
);
1429 node
->local
.disregard_inline_limits
= 1;
1432 /* Define global constructors/destructor functions for the CDTORS, of
1433 which they are LEN. The CDTORS are sorted by initialization
1434 priority. If CTOR_P is true, these are constructors; otherwise,
1435 they are destructors. */
1438 build_cdtor (bool ctor_p
, VEC (tree
, heap
) *cdtors
)
1441 size_t len
= VEC_length (tree
, cdtors
);
1448 priority_type priority
;
1456 fn
= VEC_index (tree
, cdtors
, j
);
1457 p
= ctor_p
? DECL_INIT_PRIORITY (fn
) : DECL_FINI_PRIORITY (fn
);
1460 else if (p
!= priority
)
1466 /* When there is only one cdtor and target supports them, do nothing. */
1468 && targetm
.have_ctors_dtors
)
1473 /* Find the next batch of constructors/destructors with the same
1474 initialization priority. */
1478 fn
= VEC_index (tree
, cdtors
, i
);
1479 call
= build_call_expr (fn
, 0);
1481 DECL_STATIC_CONSTRUCTOR (fn
) = 0;
1483 DECL_STATIC_DESTRUCTOR (fn
) = 0;
1484 /* We do not want to optimize away pure/const calls here.
1485 When optimizing, these should be already removed, when not
1486 optimizing, we want user to be able to breakpoint in them. */
1487 TREE_SIDE_EFFECTS (call
) = 1;
1488 append_to_statement_list (call
, &body
);
1491 gcc_assert (body
!= NULL_TREE
);
1492 /* Generate a function to call all the function of like
1494 cgraph_build_static_cdtor (ctor_p
? 'I' : 'D', body
, priority
);
1498 /* Comparison function for qsort. P1 and P2 are actually of type
1499 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1500 used to determine the sort order. */
1503 compare_ctor (const void *p1
, const void *p2
)
1510 f1
= *(const tree
*)p1
;
1511 f2
= *(const tree
*)p2
;
1512 priority1
= DECL_INIT_PRIORITY (f1
);
1513 priority2
= DECL_INIT_PRIORITY (f2
);
1515 if (priority1
< priority2
)
1517 else if (priority1
> priority2
)
1520 /* Ensure a stable sort. Constructors are executed in backwarding
1521 order to make LTO initialize braries first. */
1522 return DECL_UID (f2
) - DECL_UID (f1
);
1525 /* Comparison function for qsort. P1 and P2 are actually of type
1526 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1527 used to determine the sort order. */
1530 compare_dtor (const void *p1
, const void *p2
)
1537 f1
= *(const tree
*)p1
;
1538 f2
= *(const tree
*)p2
;
1539 priority1
= DECL_FINI_PRIORITY (f1
);
1540 priority2
= DECL_FINI_PRIORITY (f2
);
1542 if (priority1
< priority2
)
1544 else if (priority1
> priority2
)
1547 /* Ensure a stable sort. */
1548 return DECL_UID (f1
) - DECL_UID (f2
);
1551 /* Generate functions to call static constructors and destructors
1552 for targets that do not support .ctors/.dtors sections. These
1553 functions have magic names which are detected by collect2. */
1556 build_cdtor_fns (void)
1558 if (!VEC_empty (tree
, static_ctors
))
1560 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1561 qsort (VEC_address (tree
, static_ctors
),
1562 VEC_length (tree
, static_ctors
),
1565 build_cdtor (/*ctor_p=*/true, static_ctors
);
1568 if (!VEC_empty (tree
, static_dtors
))
1570 gcc_assert (!targetm
.have_ctors_dtors
|| in_lto_p
);
1571 qsort (VEC_address (tree
, static_dtors
),
1572 VEC_length (tree
, static_dtors
),
1575 build_cdtor (/*ctor_p=*/false, static_dtors
);
1579 /* Look for constructors and destructors and produce function calling them.
1580 This is needed for targets not supporting ctors or dtors, but we perform the
1581 transformation also at linktime to merge possibly numberous
1582 constructors/destructors into single function to improve code locality and
1586 ipa_cdtor_merge (void)
1588 struct cgraph_node
*node
;
1589 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1591 && (DECL_STATIC_CONSTRUCTOR (node
->decl
)
1592 || DECL_STATIC_DESTRUCTOR (node
->decl
)))
1593 record_cdtor_fn (node
);
1595 VEC_free (tree
, heap
, static_ctors
);
1596 VEC_free (tree
, heap
, static_dtors
);
1600 /* Perform the pass when we have no ctors/dtors support
1601 or at LTO time to merge multiple constructors into single
1605 gate_ipa_cdtor_merge (void)
1607 return !targetm
.have_ctors_dtors
|| (optimize
&& in_lto_p
);
1610 struct ipa_opt_pass_d pass_ipa_cdtor_merge
=
1615 gate_ipa_cdtor_merge
, /* gate */
1616 ipa_cdtor_merge
, /* execute */
1619 0, /* static_pass_number */
1620 TV_CGRAPHOPT
, /* tv_id */
1621 0, /* properties_required */
1622 0, /* properties_provided */
1623 0, /* properties_destroyed */
1624 0, /* todo_flags_start */
1625 0 /* todo_flags_finish */
1627 NULL
, /* generate_summary */
1628 NULL
, /* write_summary */
1629 NULL
, /* read_summary */
1630 NULL
, /* write_optimization_summary */
1631 NULL
, /* read_optimization_summary */
1632 NULL
, /* stmt_fixup */
1634 NULL
, /* function_transform */
1635 NULL
/* variable_transform */