1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
49 static GTY(()) tree first_personality_decl
;
51 /* Returns a hash code for P. */
54 hash_name (const void *p
)
56 const struct lto_section_slot
*ds
= (const struct lto_section_slot
*) p
;
57 return (hashval_t
) htab_hash_string (ds
->name
);
61 /* Returns nonzero if P1 and P2 are equal. */
64 eq_name (const void *p1
, const void *p2
)
66 const struct lto_section_slot
*s1
=
67 (const struct lto_section_slot
*) p1
;
68 const struct lto_section_slot
*s2
=
69 (const struct lto_section_slot
*) p2
;
71 return strcmp (s1
->name
, s2
->name
) == 0;
74 /* Free lto_section_slot */
77 free_with_string (void *arg
)
79 struct lto_section_slot
*s
= (struct lto_section_slot
*)arg
;
81 free (CONST_CAST (char *, s
->name
));
85 /* Create section hash table */
88 lto_obj_create_section_hash_table (void)
90 return htab_create (37, hash_name
, eq_name
, free_with_string
);
93 /* Delete an allocated integer KEY in the splay tree. */
96 lto_splay_tree_delete_id (splay_tree_key key
)
101 /* Compare splay tree node ids A and B. */
104 lto_splay_tree_compare_ids (splay_tree_key a
, splay_tree_key b
)
106 unsigned HOST_WIDE_INT ai
;
107 unsigned HOST_WIDE_INT bi
;
109 ai
= *(unsigned HOST_WIDE_INT
*) a
;
110 bi
= *(unsigned HOST_WIDE_INT
*) b
;
119 /* Look up splay tree node by ID in splay tree T. */
121 static splay_tree_node
122 lto_splay_tree_lookup (splay_tree t
, unsigned HOST_WIDE_INT id
)
124 return splay_tree_lookup (t
, (splay_tree_key
) &id
);
127 /* Check if KEY has ID. */
130 lto_splay_tree_id_equal_p (splay_tree_key key
, unsigned HOST_WIDE_INT id
)
132 return *(unsigned HOST_WIDE_INT
*) key
== id
;
135 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
136 The ID is allocated separately because we need HOST_WIDE_INTs which may
137 be wider than a splay_tree_key. */
140 lto_splay_tree_insert (splay_tree t
, unsigned HOST_WIDE_INT id
,
141 struct lto_file_decl_data
*file_data
)
143 unsigned HOST_WIDE_INT
*idp
= XCNEW (unsigned HOST_WIDE_INT
);
145 splay_tree_insert (t
, (splay_tree_key
) idp
, (splay_tree_value
) file_data
);
148 /* Create a splay tree. */
151 lto_splay_tree_new (void)
153 return splay_tree_new (lto_splay_tree_compare_ids
,
154 lto_splay_tree_delete_id
,
158 /* Return true when NODE has a clone that is analyzed (i.e. we need
159 to load its body even if the node itself is not needed). */
162 has_analyzed_clone_p (struct cgraph_node
*node
)
164 struct cgraph_node
*orig
= node
;
173 else if (node
->next_sibling_clone
)
174 node
= node
->next_sibling_clone
;
177 while (node
!= orig
&& !node
->next_sibling_clone
)
178 node
= node
->clone_of
;
180 node
= node
->next_sibling_clone
;
186 /* Read the function body for the function associated with NODE. */
189 lto_materialize_function (struct cgraph_node
*node
)
192 struct lto_file_decl_data
*file_data
;
193 const char *data
, *name
;
196 decl
= node
->symbol
.decl
;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if (cgraph_function_with_gimple_body_p (node
) || has_analyzed_clone_p (node
))
201 /* Clones don't need to be read. */
205 /* Load the function body only if not operating in WPA mode. In
206 WPA mode, the body of the function is not needed. */
209 file_data
= node
->symbol
.lto_file_data
;
210 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
212 /* We may have renamed the declaration, e.g., a static function. */
213 name
= lto_get_decl_name_mapping (file_data
, name
);
215 data
= lto_get_section_data (file_data
, LTO_section_function_body
,
218 fatal_error ("%s: section %s is missing",
219 file_data
->file_name
,
222 gcc_assert (DECL_STRUCT_FUNCTION (decl
) == NULL
);
224 push_struct_function (decl
);
225 announce_function (decl
);
226 lto_input_function_body (file_data
, decl
, data
);
227 if (DECL_FUNCTION_PERSONALITY (decl
) && !first_personality_decl
)
228 first_personality_decl
= DECL_FUNCTION_PERSONALITY (decl
);
229 lto_stats
.num_function_bodies
++;
230 lto_free_section_data (file_data
, LTO_section_function_body
, name
,
237 /* Let the middle end know about the function. */
238 rest_of_decl_compilation (decl
, 1, 0);
242 /* Decode the content of memory pointed to by DATA in the in decl
243 state object STATE. DATA_IN points to a data_in structure for
244 decoding. Return the address after the decoded object in the
247 static const uint32_t *
248 lto_read_in_decl_state (struct data_in
*data_in
, const uint32_t *data
,
249 struct lto_in_decl_state
*state
)
256 decl
= streamer_tree_cache_get (data_in
->reader_cache
, ix
);
257 if (TREE_CODE (decl
) != FUNCTION_DECL
)
259 gcc_assert (decl
== void_type_node
);
262 state
->fn_decl
= decl
;
264 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
266 uint32_t size
= *data
++;
267 tree
*decls
= ggc_alloc_vec_tree (size
);
269 for (j
= 0; j
< size
; j
++)
270 decls
[j
] = streamer_tree_cache_get (data_in
->reader_cache
, data
[j
]);
272 state
->streams
[i
].size
= size
;
273 state
->streams
[i
].trees
= decls
;
282 /* Global type table. FIXME, it should be possible to re-use some
283 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
284 etc), but those assume that types were built with the various
285 build_*_type routines which is not the case with the streamer. */
286 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
288 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
289 htab_t type_hash_cache
;
291 static hashval_t
gimple_type_hash (const void *);
293 /* Structure used to maintain a cache of some type pairs compared by
294 gimple_types_compatible_p when comparing aggregate types. There are
295 three possible values for SAME_P:
297 -2: The pair (T1, T2) has just been inserted in the table.
298 0: T1 and T2 are different types.
299 1: T1 and T2 are the same type. */
307 typedef struct type_pair_d
*type_pair_t
;
309 #define GIMPLE_TYPE_PAIR_SIZE 16381
310 struct type_pair_d
*type_pair_cache
;
313 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
314 entry if none existed. */
316 static inline type_pair_t
317 lookup_type_pair (tree t1
, tree t2
)
320 unsigned int uid1
, uid2
;
322 if (TYPE_UID (t1
) < TYPE_UID (t2
))
324 uid1
= TYPE_UID (t1
);
325 uid2
= TYPE_UID (t2
);
329 uid1
= TYPE_UID (t2
);
330 uid2
= TYPE_UID (t1
);
332 gcc_checking_assert (uid1
!= uid2
);
334 /* iterative_hash_hashval_t imply an function calls.
335 We know that UIDS are in limited range. */
336 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
337 % GIMPLE_TYPE_PAIR_SIZE
);
338 if (type_pair_cache
[index
].uid1
== uid1
339 && type_pair_cache
[index
].uid2
== uid2
)
340 return &type_pair_cache
[index
];
342 type_pair_cache
[index
].uid1
= uid1
;
343 type_pair_cache
[index
].uid2
= uid2
;
344 type_pair_cache
[index
].same_p
= -2;
346 return &type_pair_cache
[index
];
349 /* Per pointer state for the SCC finding. The on_sccstack flag
350 is not strictly required, it is true when there is no hash value
351 recorded for the type and false otherwise. But querying that
365 static unsigned int next_dfs_num
;
366 static unsigned int gtc_next_dfs_num
;
368 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
370 typedef struct GTY(()) gimple_type_leader_entry_s
{
373 } gimple_type_leader_entry
;
375 #define GIMPLE_TYPE_LEADER_SIZE 16381
376 static GTY((length("GIMPLE_TYPE_LEADER_SIZE")))
377 gimple_type_leader_entry
*gimple_type_leader
;
379 /* Lookup an existing leader for T and return it or NULL_TREE, if
380 there is none in the cache. */
383 gimple_lookup_type_leader (tree t
)
385 gimple_type_leader_entry
*leader
;
387 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
388 if (leader
->type
!= t
)
391 return leader
->leader
;
395 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
396 true then if any type has no name return false, otherwise return
397 true if both types have no names. */
400 compare_type_names_p (tree t1
, tree t2
)
402 tree name1
= TYPE_NAME (t1
);
403 tree name2
= TYPE_NAME (t2
);
405 if ((name1
!= NULL_TREE
) != (name2
!= NULL_TREE
))
408 if (name1
== NULL_TREE
)
411 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
412 if (TREE_CODE (name1
) != TREE_CODE (name2
))
415 if (TREE_CODE (name1
) == TYPE_DECL
)
416 name1
= DECL_NAME (name1
);
417 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
419 if (TREE_CODE (name2
) == TYPE_DECL
)
420 name2
= DECL_NAME (name2
);
421 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
423 /* Identifiers can be compared with pointer equality rather
424 than a string comparison. */
432 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
434 struct pointer_map_t
*, struct obstack
*);
436 /* DFS visit the edge from the callers type pair with state *STATE to
437 the pair T1, T2 while operating in FOR_MERGING_P mode.
438 Update the merging status if it is not part of the SCC containing the
439 callers pair and return it.
440 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
443 gtc_visit (tree t1
, tree t2
,
445 vec
<type_pair_t
> *sccstack
,
446 struct pointer_map_t
*sccstate
,
447 struct obstack
*sccstate_obstack
)
449 struct sccs
*cstate
= NULL
;
452 tree leader1
, leader2
;
454 /* Check first for the obvious case of pointer identity. */
458 /* Check that we have two types to compare. */
459 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
462 /* Can't be the same type if the types don't have the same code. */
463 if (TREE_CODE (t1
) != TREE_CODE (t2
))
466 /* Can't be the same type if they have different CV qualifiers. */
467 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
470 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
473 /* Void types and nullptr types are always the same. */
474 if (TREE_CODE (t1
) == VOID_TYPE
475 || TREE_CODE (t1
) == NULLPTR_TYPE
)
478 /* Can't be the same type if they have different alignment or mode. */
479 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
480 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
483 /* Do some simple checks before doing three hashtable queries. */
484 if (INTEGRAL_TYPE_P (t1
)
485 || SCALAR_FLOAT_TYPE_P (t1
)
486 || FIXED_POINT_TYPE_P (t1
)
487 || TREE_CODE (t1
) == VECTOR_TYPE
488 || TREE_CODE (t1
) == COMPLEX_TYPE
489 || TREE_CODE (t1
) == OFFSET_TYPE
490 || POINTER_TYPE_P (t1
))
492 /* Can't be the same type if they have different sign or precision. */
493 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
494 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
497 if (TREE_CODE (t1
) == INTEGER_TYPE
498 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
501 /* That's all we need to check for float and fixed-point types. */
502 if (SCALAR_FLOAT_TYPE_P (t1
)
503 || FIXED_POINT_TYPE_P (t1
))
506 /* For other types fall through to more complex checks. */
509 /* If the types have been previously registered and found equal
511 leader1
= gimple_lookup_type_leader (t1
);
512 leader2
= gimple_lookup_type_leader (t2
);
515 || (leader1
&& leader1
== leader2
))
518 /* If the hash values of t1 and t2 are different the types can't
519 possibly be the same. This helps keeping the type-pair hashtable
520 small, only tracking comparisons for hash collisions. */
521 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
524 /* Allocate a new cache entry for this comparison. */
525 p
= lookup_type_pair (t1
, t2
);
526 if (p
->same_p
== 0 || p
->same_p
== 1)
528 /* We have already decided whether T1 and T2 are the
529 same, return the cached result. */
530 return p
->same_p
== 1;
533 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
534 cstate
= (struct sccs
*)*slot
;
535 /* Not yet visited. DFS recurse. */
538 gimple_types_compatible_p_1 (t1
, t2
, p
,
539 sccstack
, sccstate
, sccstate_obstack
);
540 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
541 state
->low
= MIN (state
->low
, cstate
->low
);
543 /* If the type is still on the SCC stack adjust the parents low. */
544 if (cstate
->dfsnum
< state
->dfsnum
545 && cstate
->on_sccstack
)
546 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
548 /* Return the current lattice value. We start with an equality
549 assumption so types part of a SCC will be optimistically
550 treated equal unless proven otherwise. */
551 return cstate
->u
.same_p
;
554 /* Worker for gimple_types_compatible.
555 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
558 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
559 vec
<type_pair_t
> *sccstack
,
560 struct pointer_map_t
*sccstate
,
561 struct obstack
*sccstate_obstack
)
565 gcc_assert (p
->same_p
== -2);
567 state
= XOBNEW (sccstate_obstack
, struct sccs
);
568 *pointer_map_insert (sccstate
, p
) = state
;
570 sccstack
->safe_push (p
);
571 state
->dfsnum
= gtc_next_dfs_num
++;
572 state
->low
= state
->dfsnum
;
573 state
->on_sccstack
= true;
574 /* Start with an equality assumption. As we DFS recurse into child
575 SCCs this assumption may get revisited. */
578 /* The struct tags shall compare equal. */
579 if (!compare_type_names_p (t1
, t2
))
580 goto different_types
;
582 /* The main variant of both types should compare equal. */
583 if (TYPE_MAIN_VARIANT (t1
) != t1
584 || TYPE_MAIN_VARIANT (t2
) != t2
)
586 if (!gtc_visit (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
),
587 state
, sccstack
, sccstate
, sccstate_obstack
))
588 goto different_types
;
591 /* We may not merge typedef types to the same type in different
594 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
595 && DECL_CONTEXT (TYPE_NAME (t1
))
596 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1
))))
598 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1
)),
599 DECL_CONTEXT (TYPE_NAME (t2
)),
600 state
, sccstack
, sccstate
, sccstate_obstack
))
601 goto different_types
;
604 /* If their attributes are not the same they can't be the same type. */
605 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
606 goto different_types
;
608 /* Do type-specific comparisons. */
609 switch (TREE_CODE (t1
))
613 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
614 state
, sccstack
, sccstate
, sccstate_obstack
))
615 goto different_types
;
619 /* Array types are the same if the element types are the same and
620 the number of elements are the same. */
621 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
622 state
, sccstack
, sccstate
, sccstate_obstack
)
623 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
624 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
625 goto different_types
;
628 tree i1
= TYPE_DOMAIN (t1
);
629 tree i2
= TYPE_DOMAIN (t2
);
631 /* For an incomplete external array, the type domain can be
632 NULL_TREE. Check this condition also. */
633 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
635 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
636 goto different_types
;
639 tree min1
= TYPE_MIN_VALUE (i1
);
640 tree min2
= TYPE_MIN_VALUE (i2
);
641 tree max1
= TYPE_MAX_VALUE (i1
);
642 tree max2
= TYPE_MAX_VALUE (i2
);
644 /* The minimum/maximum values have to be the same. */
647 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
648 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
649 || operand_equal_p (min1
, min2
, 0))))
652 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
653 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
654 || operand_equal_p (max1
, max2
, 0)))))
657 goto different_types
;
662 /* Method types should belong to the same class. */
663 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
664 state
, sccstack
, sccstate
, sccstate_obstack
))
665 goto different_types
;
670 /* Function types are the same if the return type and arguments types
672 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
673 state
, sccstack
, sccstate
, sccstate_obstack
))
674 goto different_types
;
676 if (!comp_type_attributes (t1
, t2
))
677 goto different_types
;
679 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
685 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
687 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
689 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
690 state
, sccstack
, sccstate
, sccstate_obstack
))
691 goto different_types
;
694 if (parms1
|| parms2
)
695 goto different_types
;
702 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
703 state
, sccstack
, sccstate
, sccstate_obstack
)
704 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
705 TYPE_OFFSET_BASETYPE (t2
),
706 state
, sccstack
, sccstate
, sccstate_obstack
))
707 goto different_types
;
715 /* If the two pointers have different ref-all attributes,
716 they can't be the same type. */
717 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
718 goto different_types
;
720 /* Otherwise, pointer and reference types are the same if the
721 pointed-to types are the same. */
722 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
723 state
, sccstack
, sccstate
, sccstate_obstack
))
726 goto different_types
;
732 tree min1
= TYPE_MIN_VALUE (t1
);
733 tree max1
= TYPE_MAX_VALUE (t1
);
734 tree min2
= TYPE_MIN_VALUE (t2
);
735 tree max2
= TYPE_MAX_VALUE (t2
);
736 bool min_equal_p
= false;
737 bool max_equal_p
= false;
739 /* If either type has a minimum value, the other type must
741 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
743 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
746 /* Likewise, if either type has a maximum value, the other
747 type must have the same. */
748 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
750 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
753 if (!min_equal_p
|| !max_equal_p
)
754 goto different_types
;
761 /* FIXME lto, we cannot check bounds on enumeral types because
762 different front ends will produce different values.
763 In C, enumeral types are integers, while in C++ each element
764 will have its own symbolic value. We should decide how enums
765 are to be represented in GIMPLE and have each front end lower
769 /* For enumeral types, all the values must be the same. */
770 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
773 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
775 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
777 tree c1
= TREE_VALUE (v1
);
778 tree c2
= TREE_VALUE (v2
);
780 if (TREE_CODE (c1
) == CONST_DECL
)
781 c1
= DECL_INITIAL (c1
);
783 if (TREE_CODE (c2
) == CONST_DECL
)
784 c2
= DECL_INITIAL (c2
);
786 if (tree_int_cst_equal (c1
, c2
) != 1)
787 goto different_types
;
789 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
790 goto different_types
;
793 /* If one enumeration has more values than the other, they
796 goto different_types
;
803 case QUAL_UNION_TYPE
:
807 /* For aggregate types, all the fields must be the same. */
808 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
810 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
812 /* Different field kinds are not compatible. */
813 if (TREE_CODE (f1
) != TREE_CODE (f2
))
814 goto different_types
;
815 /* Field decls must have the same name and offset. */
816 if (TREE_CODE (f1
) == FIELD_DECL
817 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
818 || !gimple_compare_field_offset (f1
, f2
)))
819 goto different_types
;
820 /* All entities should have the same name and type. */
821 if (DECL_NAME (f1
) != DECL_NAME (f2
)
822 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
823 state
, sccstack
, sccstate
, sccstate_obstack
))
824 goto different_types
;
827 /* If one aggregate has more fields than the other, they
830 goto different_types
;
839 /* Common exit path for types that are not compatible. */
844 /* Common exit path for types that are compatible. */
846 gcc_assert (state
->u
.same_p
== 1);
849 if (state
->low
== state
->dfsnum
)
853 /* Pop off the SCC and set its cache values to the final
854 comparison result. */
858 x
= sccstack
->pop ();
859 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
860 cstate
->on_sccstack
= false;
861 x
->same_p
= state
->u
.same_p
;
866 return state
->u
.same_p
;
869 /* Return true iff T1 and T2 are structurally identical. When
870 FOR_MERGING_P is true the an incomplete type and a complete type
871 are considered different, otherwise they are considered compatible. */
874 gimple_types_compatible_p (tree t1
, tree t2
)
876 vec
<type_pair_t
> sccstack
= vNULL
;
877 struct pointer_map_t
*sccstate
;
878 struct obstack sccstate_obstack
;
879 type_pair_t p
= NULL
;
881 tree leader1
, leader2
;
883 /* Before starting to set up the SCC machinery handle simple cases. */
885 /* Check first for the obvious case of pointer identity. */
889 /* Check that we have two types to compare. */
890 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
893 /* Can't be the same type if the types don't have the same code. */
894 if (TREE_CODE (t1
) != TREE_CODE (t2
))
897 /* Can't be the same type if they have different CV qualifiers. */
898 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
901 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
904 /* Void types and nullptr types are always the same. */
905 if (TREE_CODE (t1
) == VOID_TYPE
906 || TREE_CODE (t1
) == NULLPTR_TYPE
)
909 /* Can't be the same type if they have different alignment or mode. */
910 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
911 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
914 /* Do some simple checks before doing three hashtable queries. */
915 if (INTEGRAL_TYPE_P (t1
)
916 || SCALAR_FLOAT_TYPE_P (t1
)
917 || FIXED_POINT_TYPE_P (t1
)
918 || TREE_CODE (t1
) == VECTOR_TYPE
919 || TREE_CODE (t1
) == COMPLEX_TYPE
920 || TREE_CODE (t1
) == OFFSET_TYPE
921 || POINTER_TYPE_P (t1
))
923 /* Can't be the same type if they have different sign or precision. */
924 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
925 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
928 if (TREE_CODE (t1
) == INTEGER_TYPE
929 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
932 /* That's all we need to check for float and fixed-point types. */
933 if (SCALAR_FLOAT_TYPE_P (t1
)
934 || FIXED_POINT_TYPE_P (t1
))
937 /* For other types fall through to more complex checks. */
940 /* If the types have been previously registered and found equal
942 leader1
= gimple_lookup_type_leader (t1
);
943 leader2
= gimple_lookup_type_leader (t2
);
946 || (leader1
&& leader1
== leader2
))
949 /* If the hash values of t1 and t2 are different the types can't
950 possibly be the same. This helps keeping the type-pair hashtable
951 small, only tracking comparisons for hash collisions. */
952 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
955 /* If we've visited this type pair before (in the case of aggregates
956 with self-referential types), and we made a decision, return it. */
957 p
= lookup_type_pair (t1
, t2
);
958 if (p
->same_p
== 0 || p
->same_p
== 1)
960 /* We have already decided whether T1 and T2 are the
961 same, return the cached result. */
962 return p
->same_p
== 1;
965 /* Now set up the SCC machinery for the comparison. */
966 gtc_next_dfs_num
= 1;
967 sccstate
= pointer_map_create ();
968 gcc_obstack_init (&sccstate_obstack
);
969 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
970 &sccstack
, sccstate
, &sccstate_obstack
);
972 pointer_map_destroy (sccstate
);
973 obstack_free (&sccstate_obstack
, NULL
);
979 iterative_hash_gimple_type (tree
, hashval_t
, vec
<tree
> *,
980 struct pointer_map_t
*, struct obstack
*);
982 /* DFS visit the edge from the callers type with state *STATE to T.
983 Update the callers type hash V with the hash for T if it is not part
984 of the SCC containing the callers type and return it.
985 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
988 visit (tree t
, struct sccs
*state
, hashval_t v
,
990 struct pointer_map_t
*sccstate
,
991 struct obstack
*sccstate_obstack
)
993 struct sccs
*cstate
= NULL
;
994 struct tree_int_map m
;
997 /* If there is a hash value recorded for this type then it can't
998 possibly be part of our parent SCC. Simply mix in its hash. */
1000 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1002 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
1004 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
1005 cstate
= (struct sccs
*)*slot
;
1009 /* Not yet visited. DFS recurse. */
1010 tem
= iterative_hash_gimple_type (t
, v
,
1011 sccstack
, sccstate
, sccstate_obstack
);
1013 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
1014 state
->low
= MIN (state
->low
, cstate
->low
);
1015 /* If the type is no longer on the SCC stack and thus is not part
1016 of the parents SCC mix in its hash value. Otherwise we will
1017 ignore the type for hashing purposes and return the unaltered
1019 if (!cstate
->on_sccstack
)
1022 if (cstate
->dfsnum
< state
->dfsnum
1023 && cstate
->on_sccstack
)
1024 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
1026 /* We are part of our parents SCC, skip this type during hashing
1027 and return the unaltered hash value. */
1031 /* Hash NAME with the previous hash value V and return it. */
1034 iterative_hash_name (tree name
, hashval_t v
)
1038 v
= iterative_hash_hashval_t (TREE_CODE (name
), v
);
1039 if (TREE_CODE (name
) == TYPE_DECL
)
1040 name
= DECL_NAME (name
);
1043 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
1044 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
1047 /* A type, hashvalue pair for sorting SCC members. */
1049 struct type_hash_pair
{
1054 /* Compare two type, hashvalue pairs. */
1057 type_hash_pair_compare (const void *p1_
, const void *p2_
)
1059 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
1060 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
1061 if (p1
->hash
< p2
->hash
)
1063 else if (p1
->hash
> p2
->hash
)
1068 /* Returning a hash value for gimple type TYPE combined with VAL.
1069 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1071 To hash a type we end up hashing in types that are reachable.
1072 Through pointers we can end up with cycles which messes up the
1073 required property that we need to compute the same hash value
1074 for structurally equivalent types. To avoid this we have to
1075 hash all types in a cycle (the SCC) in a commutative way. The
1076 easiest way is to not mix in the hashes of the SCC members at
1077 all. To make this work we have to delay setting the hash
1078 values of the SCC until it is complete. */
1081 iterative_hash_gimple_type (tree type
, hashval_t val
,
1082 vec
<tree
> *sccstack
,
1083 struct pointer_map_t
*sccstate
,
1084 struct obstack
*sccstate_obstack
)
1090 /* Not visited during this DFS walk. */
1091 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
1092 state
= XOBNEW (sccstate_obstack
, struct sccs
);
1093 *pointer_map_insert (sccstate
, type
) = state
;
1095 sccstack
->safe_push (type
);
1096 state
->dfsnum
= next_dfs_num
++;
1097 state
->low
= state
->dfsnum
;
1098 state
->on_sccstack
= true;
1100 /* Combine a few common features of types so that types are grouped into
1101 smaller sets; when searching for existing matching types to merge,
1102 only existing types having the same features as the new type will be
1104 v
= iterative_hash_name (TYPE_NAME (type
), 0);
1105 if (TYPE_NAME (type
)
1106 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1107 && DECL_CONTEXT (TYPE_NAME (type
))
1108 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type
))))
1109 v
= visit (DECL_CONTEXT (TYPE_NAME (type
)), state
, v
,
1110 sccstack
, sccstate
, sccstate_obstack
);
1112 /* Factor in the variant structure. */
1113 if (TYPE_MAIN_VARIANT (type
) != type
)
1114 v
= visit (TYPE_MAIN_VARIANT (type
), state
, v
,
1115 sccstack
, sccstate
, sccstate_obstack
);
1117 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
1118 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
1119 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
1121 /* Do not hash the types size as this will cause differences in
1122 hash values for the complete vs. the incomplete type variant. */
1124 /* Incorporate common features of numerical types. */
1125 if (INTEGRAL_TYPE_P (type
)
1126 || SCALAR_FLOAT_TYPE_P (type
)
1127 || FIXED_POINT_TYPE_P (type
))
1129 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
1130 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
1131 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
1134 /* For pointer and reference types, fold in information about the type
1136 if (POINTER_TYPE_P (type
))
1137 v
= visit (TREE_TYPE (type
), state
, v
,
1138 sccstack
, sccstate
, sccstate_obstack
);
1140 /* For integer types hash the types min/max values and the string flag. */
1141 if (TREE_CODE (type
) == INTEGER_TYPE
)
1143 /* OMP lowering can introduce error_mark_node in place of
1144 random local decls in types. */
1145 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
1146 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
1147 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
1148 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
1149 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1152 /* For array types hash the domain and the string flag. */
1153 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
1155 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1156 v
= visit (TYPE_DOMAIN (type
), state
, v
,
1157 sccstack
, sccstate
, sccstate_obstack
);
1160 /* Recurse for aggregates with a single element type. */
1161 if (TREE_CODE (type
) == ARRAY_TYPE
1162 || TREE_CODE (type
) == COMPLEX_TYPE
1163 || TREE_CODE (type
) == VECTOR_TYPE
)
1164 v
= visit (TREE_TYPE (type
), state
, v
,
1165 sccstack
, sccstate
, sccstate_obstack
);
1167 /* Incorporate function return and argument types. */
1168 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
1173 /* For method types also incorporate their parent class. */
1174 if (TREE_CODE (type
) == METHOD_TYPE
)
1175 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
1176 sccstack
, sccstate
, sccstate_obstack
);
1178 /* Check result and argument types. */
1179 v
= visit (TREE_TYPE (type
), state
, v
,
1180 sccstack
, sccstate
, sccstate_obstack
);
1181 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
1183 v
= visit (TREE_VALUE (p
), state
, v
,
1184 sccstack
, sccstate
, sccstate_obstack
);
1188 v
= iterative_hash_hashval_t (na
, v
);
1191 if (RECORD_OR_UNION_TYPE_P (type
))
1196 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
1198 v
= iterative_hash_name (DECL_NAME (f
), v
);
1199 v
= visit (TREE_TYPE (f
), state
, v
,
1200 sccstack
, sccstate
, sccstate_obstack
);
1204 v
= iterative_hash_hashval_t (nf
, v
);
1207 /* Record hash for us. */
1210 /* See if we found an SCC. */
1211 if (state
->low
== state
->dfsnum
)
1214 struct tree_int_map
*m
;
1216 /* Pop off the SCC and set its hash values. */
1217 x
= sccstack
->pop ();
1218 /* Optimize SCC size one. */
1221 state
->on_sccstack
= false;
1222 m
= ggc_alloc_cleared_tree_int_map ();
1225 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1226 gcc_assert (!*slot
);
1231 struct sccs
*cstate
;
1232 unsigned first
, i
, size
, j
;
1233 struct type_hash_pair
*pairs
;
1234 /* Pop off the SCC and build an array of type, hash pairs. */
1235 first
= sccstack
->length () - 1;
1236 while ((*sccstack
)[first
] != type
)
1238 size
= sccstack
->length () - first
+ 1;
1239 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
1241 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1242 cstate
->on_sccstack
= false;
1244 pairs
[i
].hash
= cstate
->u
.hash
;
1247 x
= sccstack
->pop ();
1248 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1249 cstate
->on_sccstack
= false;
1252 pairs
[i
].hash
= cstate
->u
.hash
;
1255 gcc_assert (i
+ 1 == size
);
1256 /* Sort the arrays of type, hash pairs so that when we mix in
1257 all members of the SCC the hash value becomes independent on
1258 the order we visited the SCC. Disregard hashes equal to
1259 the hash of the type we mix into because we cannot guarantee
1260 a stable sort for those across different TUs. */
1261 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
1262 type_hash_pair_compare
);
1263 for (i
= 0; i
< size
; ++i
)
1266 m
= ggc_alloc_cleared_tree_int_map ();
1267 m
->base
.from
= pairs
[i
].type
;
1268 hash
= pairs
[i
].hash
;
1269 /* Skip same hashes. */
1270 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
1272 for (; j
< size
; ++j
)
1273 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1274 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
1275 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1277 if (pairs
[i
].type
== type
)
1279 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1280 gcc_assert (!*slot
);
1286 return iterative_hash_hashval_t (v
, val
);
1289 /* Returns a hash value for P (assumed to be a type). The hash value
1290 is computed using some distinguishing features of the type. Note
1291 that we cannot use pointer hashing here as we may be dealing with
1292 two distinct instances of the same type.
1294 This function should produce the same hash value for two compatible
1295 types according to gimple_types_compatible_p. */
1298 gimple_type_hash (const void *p
)
1300 const_tree t
= (const_tree
) p
;
1301 vec
<tree
> sccstack
= vNULL
;
1302 struct pointer_map_t
*sccstate
;
1303 struct obstack sccstate_obstack
;
1306 struct tree_int_map m
;
1308 m
.base
.from
= CONST_CAST_TREE (t
);
1309 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1311 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
1313 /* Perform a DFS walk and pre-hash all reachable types. */
1315 sccstate
= pointer_map_create ();
1316 gcc_obstack_init (&sccstate_obstack
);
1317 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
1318 &sccstack
, sccstate
, &sccstate_obstack
);
1319 sccstack
.release ();
1320 pointer_map_destroy (sccstate
);
1321 obstack_free (&sccstate_obstack
, NULL
);
1326 /* Returns nonzero if P1 and P2 are equal. */
1329 gimple_type_eq (const void *p1
, const void *p2
)
1331 const_tree t1
= (const_tree
) p1
;
1332 const_tree t2
= (const_tree
) p2
;
1333 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
1334 CONST_CAST_TREE (t2
));
1338 /* Worker for gimple_register_type.
1339 Register type T in the global type table gimple_types.
1340 When REGISTERING_MV is false first recurse for the main variant of T. */
1343 gimple_register_type_1 (tree t
, bool registering_mv
)
1346 gimple_type_leader_entry
*leader
;
1348 /* If we registered this type before return the cached result. */
1349 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
1350 if (leader
->type
== t
)
1351 return leader
->leader
;
1353 /* Always register the main variant first. This is important so we
1354 pick up the non-typedef variants as canonical, otherwise we'll end
1355 up taking typedef ids for structure tags during comparison.
1356 It also makes sure that main variants will be merged to main variants.
1357 As we are operating on a possibly partially fixed up type graph
1358 do not bother to recurse more than once, otherwise we may end up
1360 If we are registering a main variant it will either remain its
1361 own main variant or it will be merged to something else in which
1362 case we do not care for the main variant leader. */
1364 && TYPE_MAIN_VARIANT (t
) != t
)
1365 gimple_register_type_1 (TYPE_MAIN_VARIANT (t
), true);
1367 /* See if we already have an equivalent type registered. */
1368 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
1370 && *(tree
*)slot
!= t
)
1372 tree new_type
= (tree
) *((tree
*) slot
);
1374 leader
->leader
= new_type
;
1378 /* If not, insert it to the cache and the hash. */
1385 /* Register type T in the global type table gimple_types.
1386 If another type T', compatible with T, already existed in
1387 gimple_types then return T', otherwise return T. This is used by
1388 LTO to merge identical types read from different TUs. */
1391 gimple_register_type (tree t
)
1393 gcc_assert (TYPE_P (t
));
1394 return gimple_register_type_1 (t
, false);
1397 #define GIMPLE_REGISTER_TYPE(tt) \
1398 (TREE_VISITED (tt) ? gimple_register_type (tt) : tt)
1402 /* A hashtable of trees that potentially refer to variables or functions
1403 that must be replaced with their prevailing variant. */
1404 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
))) htab_t
1407 /* Remember that T is a tree that (potentially) refers to a variable
1408 or function decl that may be replaced with its prevailing variant. */
1410 remember_with_vars (tree t
)
1412 *(tree
*) htab_find_slot (tree_with_vars
, t
, INSERT
) = t
;
1415 #define LTO_FIXUP_TREE(tt) \
1421 (tt) = GIMPLE_REGISTER_TYPE (tt); \
1422 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1423 remember_with_vars (t); \
1424 if (TREE_CODE (tt) == INTEGER_CST) \
1425 (tt) = fixup_integer_cst (tt); \
1429 static void lto_fixup_types (tree
);
1431 /* Return integer_cst T with updated type. */
1434 fixup_integer_cst (tree t
)
1436 tree type
= GIMPLE_REGISTER_TYPE (TREE_TYPE (t
));
1438 if (type
== TREE_TYPE (t
))
1441 /* If overflow was set, streamer_read_integer_cst
1442 produced local copy of T. */
1443 if (TREE_OVERFLOW (t
))
1445 TREE_TYPE (t
) = type
;
1449 /* Otherwise produce new shared node for the new type. */
1450 return build_int_cst_wide (type
, TREE_INT_CST_LOW (t
),
1451 TREE_INT_CST_HIGH (t
));
1454 /* Fix up fields of a tree_typed T. */
1457 lto_ft_typed (tree t
)
1459 LTO_FIXUP_TREE (TREE_TYPE (t
));
1462 /* Fix up fields of a tree_common T. */
1465 lto_ft_common (tree t
)
1468 LTO_FIXUP_TREE (TREE_CHAIN (t
));
1471 /* Fix up fields of a decl_minimal T. */
1474 lto_ft_decl_minimal (tree t
)
1477 LTO_FIXUP_TREE (DECL_NAME (t
));
1478 LTO_FIXUP_TREE (DECL_CONTEXT (t
));
1481 /* Fix up fields of a decl_common T. */
1484 lto_ft_decl_common (tree t
)
1486 lto_ft_decl_minimal (t
);
1487 LTO_FIXUP_TREE (DECL_SIZE (t
));
1488 LTO_FIXUP_TREE (DECL_SIZE_UNIT (t
));
1489 LTO_FIXUP_TREE (DECL_INITIAL (t
));
1490 LTO_FIXUP_TREE (DECL_ATTRIBUTES (t
));
1491 LTO_FIXUP_TREE (DECL_ABSTRACT_ORIGIN (t
));
1494 /* Fix up fields of a decl_with_vis T. */
1497 lto_ft_decl_with_vis (tree t
)
1499 lto_ft_decl_common (t
);
1501 /* Accessor macro has side-effects, use field-name here. */
1502 LTO_FIXUP_TREE (t
->decl_with_vis
.assembler_name
);
1503 LTO_FIXUP_TREE (DECL_SECTION_NAME (t
));
1506 /* Fix up fields of a decl_non_common T. */
1509 lto_ft_decl_non_common (tree t
)
1511 lto_ft_decl_with_vis (t
);
1512 LTO_FIXUP_TREE (DECL_ARGUMENT_FLD (t
));
1513 LTO_FIXUP_TREE (DECL_RESULT_FLD (t
));
1514 LTO_FIXUP_TREE (DECL_VINDEX (t
));
1515 /* The C frontends may create exact duplicates for DECL_ORIGINAL_TYPE
1516 like for 'typedef enum foo foo'. We have no way of avoiding to
1517 merge them and dwarf2out.c cannot deal with this,
1518 so fix this up by clearing DECL_ORIGINAL_TYPE in this case. */
1519 if (TREE_CODE (t
) == TYPE_DECL
1520 && DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
1521 DECL_ORIGINAL_TYPE (t
) = NULL_TREE
;
1524 /* Fix up fields of a decl_non_common T. */
1527 lto_ft_function (tree t
)
1529 lto_ft_decl_non_common (t
);
1530 LTO_FIXUP_TREE (DECL_FUNCTION_PERSONALITY (t
));
1533 /* Fix up fields of a field_decl T. */
1536 lto_ft_field_decl (tree t
)
1538 lto_ft_decl_common (t
);
1539 LTO_FIXUP_TREE (DECL_FIELD_OFFSET (t
));
1540 LTO_FIXUP_TREE (DECL_BIT_FIELD_TYPE (t
));
1541 LTO_FIXUP_TREE (DECL_QUALIFIER (t
));
1542 LTO_FIXUP_TREE (DECL_FIELD_BIT_OFFSET (t
));
1543 LTO_FIXUP_TREE (DECL_FCONTEXT (t
));
1546 /* Fix up fields of a type T. */
1549 lto_ft_type (tree t
)
1552 LTO_FIXUP_TREE (TYPE_CACHED_VALUES (t
));
1553 LTO_FIXUP_TREE (TYPE_SIZE (t
));
1554 LTO_FIXUP_TREE (TYPE_SIZE_UNIT (t
));
1555 LTO_FIXUP_TREE (TYPE_ATTRIBUTES (t
));
1556 LTO_FIXUP_TREE (TYPE_NAME (t
));
1558 /* Accessors are for derived node types only. */
1559 if (!POINTER_TYPE_P (t
))
1560 LTO_FIXUP_TREE (TYPE_MINVAL (t
));
1561 LTO_FIXUP_TREE (TYPE_MAXVAL (t
));
1563 /* Accessor is for derived node types only. */
1564 LTO_FIXUP_TREE (t
->type_non_common
.binfo
);
1566 LTO_FIXUP_TREE (TYPE_CONTEXT (t
));
1569 /* Fix up fields of a BINFO T. */
1572 lto_ft_binfo (tree t
)
1574 unsigned HOST_WIDE_INT i
, n
;
1575 tree base
, saved_base
;
1578 LTO_FIXUP_TREE (BINFO_VTABLE (t
));
1579 LTO_FIXUP_TREE (BINFO_OFFSET (t
));
1580 LTO_FIXUP_TREE (BINFO_VIRTUALS (t
));
1581 LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t
));
1582 n
= vec_safe_length (BINFO_BASE_ACCESSES (t
));
1583 for (i
= 0; i
< n
; i
++)
1585 saved_base
= base
= BINFO_BASE_ACCESS (t
, i
);
1586 LTO_FIXUP_TREE (base
);
1587 if (base
!= saved_base
)
1588 (*BINFO_BASE_ACCESSES (t
))[i
] = base
;
1590 LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t
));
1591 LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t
));
1592 LTO_FIXUP_TREE (BINFO_VPTR_INDEX (t
));
1593 n
= BINFO_N_BASE_BINFOS (t
);
1594 for (i
= 0; i
< n
; i
++)
1596 saved_base
= base
= BINFO_BASE_BINFO (t
, i
);
1597 LTO_FIXUP_TREE (base
);
1598 if (base
!= saved_base
)
1599 (*BINFO_BASE_BINFOS (t
))[i
] = base
;
1603 /* Fix up fields of a CONSTRUCTOR T. */
1606 lto_ft_constructor (tree t
)
1608 unsigned HOST_WIDE_INT idx
;
1609 constructor_elt
*ce
;
1613 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
); idx
++)
1615 LTO_FIXUP_TREE (ce
->index
);
1616 LTO_FIXUP_TREE (ce
->value
);
1620 /* Fix up fields of an expression tree T. */
1623 lto_ft_expr (tree t
)
1627 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
1628 LTO_FIXUP_TREE (TREE_OPERAND (t
, i
));
1631 /* Given a tree T fixup fields of T by replacing types with their merged
1632 variant and other entities by an equal entity from an earlier compilation
1633 unit, or an entity being canonical in a different way. This includes
1634 for instance integer or string constants. */
1637 lto_fixup_types (tree t
)
1639 switch (TREE_CODE (t
))
1641 case IDENTIFIER_NODE
:
1645 LTO_FIXUP_TREE (TREE_VALUE (t
));
1646 LTO_FIXUP_TREE (TREE_PURPOSE (t
));
1647 LTO_FIXUP_TREE (TREE_CHAIN (t
));
1651 lto_ft_field_decl (t
);
1659 lto_ft_decl_common (t
);
1663 lto_ft_decl_with_vis (t
);
1667 lto_ft_decl_non_common (t
);
1671 lto_ft_function (t
);
1678 case PLACEHOLDER_EXPR
:
1683 case TRANSLATION_UNIT_DECL
:
1684 case OPTIMIZATION_NODE
:
1685 case TARGET_OPTION_NODE
:
1691 else if (TREE_CODE (t
) == CONSTRUCTOR
)
1692 lto_ft_constructor (t
);
1693 else if (CONSTANT_CLASS_P (t
))
1694 LTO_FIXUP_TREE (TREE_TYPE (t
));
1695 else if (EXPR_P (t
))
1701 remember_with_vars (t
);
1707 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1709 static enum ld_plugin_symbol_resolution
1710 get_resolution (struct data_in
*data_in
, unsigned index
)
1712 if (data_in
->globals_resolution
.exists ())
1714 ld_plugin_symbol_resolution_t ret
;
1715 /* We can have references to not emitted functions in
1716 DECL_FUNCTION_PERSONALITY at least. So we can and have
1717 to indeed return LDPR_UNKNOWN in some cases. */
1718 if (data_in
->globals_resolution
.length () <= index
)
1719 return LDPR_UNKNOWN
;
1720 ret
= data_in
->globals_resolution
[index
];
1724 /* Delay resolution finding until decl merging. */
1725 return LDPR_UNKNOWN
;
1728 /* Map assigning declarations their resolutions. */
1729 static pointer_map_t
*resolution_map
;
1731 /* We need to record resolutions until symbol table is read. */
1733 register_resolution (tree decl
, enum ld_plugin_symbol_resolution resolution
)
1735 if (resolution
== LDPR_UNKNOWN
)
1737 if (!resolution_map
)
1738 resolution_map
= pointer_map_create ();
1739 *pointer_map_insert (resolution_map
, decl
) = (void *)(size_t)resolution
;
1742 /* Register DECL with the global symbol table and change its
1743 name if necessary to avoid name clashes for static globals across
1747 lto_register_var_decl_in_symtab (struct data_in
*data_in
, tree decl
)
1751 /* Variable has file scope, not local. */
1752 if (!TREE_PUBLIC (decl
)
1753 && !((context
= decl_function_context (decl
))
1754 && auto_var_in_fn_p (decl
, context
)))
1756 rest_of_decl_compilation (decl
, 1, 0);
1759 /* If this variable has already been declared, queue the
1760 declaration for merging. */
1761 if (TREE_PUBLIC (decl
))
1764 if (!streamer_tree_cache_lookup (data_in
->reader_cache
, decl
, &ix
))
1766 register_resolution (decl
, get_resolution (data_in
, ix
));
1771 /* Register DECL with the global symbol table and change its
1772 name if necessary to avoid name clashes for static globals across
1773 different files. DATA_IN contains descriptors and tables for the
1777 lto_register_function_decl_in_symtab (struct data_in
*data_in
, tree decl
)
1779 /* If this variable has already been declared, queue the
1780 declaration for merging. */
1781 if (TREE_PUBLIC (decl
) && !DECL_ABSTRACT (decl
))
1784 if (!streamer_tree_cache_lookup (data_in
->reader_cache
, decl
, &ix
))
1786 register_resolution (decl
, get_resolution (data_in
, ix
));
1791 /* Given a streamer cache structure DATA_IN (holding a sequence of trees
1792 for one compilation unit) go over all trees starting at index FROM until the
1793 end of the sequence and replace fields of those trees, and the trees
1794 themself with their canonical variants as per gimple_register_type. */
1797 uniquify_nodes (struct data_in
*data_in
, unsigned from
)
1799 struct streamer_tree_cache_d
*cache
= data_in
->reader_cache
;
1800 unsigned len
= cache
->nodes
.length ();
1803 /* Go backwards because children streamed for the first time come
1804 as part of their parents, and hence are created after them. */
1806 /* First register all the types in the cache. This makes sure to
1807 have the original structure in the type cycles when registering
1808 them and computing hashes. */
1809 for (i
= len
; i
-- > from
;)
1811 tree t
= cache
->nodes
[i
];
1812 if (t
&& TYPE_P (t
))
1814 tree newt
= gimple_register_type (t
);
1815 /* Mark non-prevailing types so we fix them up. No need
1816 to reset that flag afterwards - nothing that refers
1817 to those types is left and they are collected. */
1819 TREE_VISITED (t
) = 1;
1823 /* Second fixup all trees in the new cache entries. */
1824 for (i
= len
; i
-- > from
;)
1826 tree t
= cache
->nodes
[i
];
1831 /* First fixup the fields of T. */
1832 lto_fixup_types (t
);
1837 /* Now try to find a canonical variant of T itself. */
1838 t
= GIMPLE_REGISTER_TYPE (t
);
1842 /* The following re-creates proper variant lists while fixing up
1843 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1844 variant list state before fixup is broken. */
1847 #ifdef ENABLE_CHECKING
1848 /* Remove us from our main variant list if we are not the
1850 if (TYPE_MAIN_VARIANT (t
) != t
)
1852 tem
= TYPE_MAIN_VARIANT (t
);
1853 while (tem
&& TYPE_NEXT_VARIANT (tem
) != t
)
1854 tem
= TYPE_NEXT_VARIANT (tem
);
1855 gcc_assert (!tem
&& !TYPE_NEXT_VARIANT (t
));
1859 /* Query our new main variant. */
1860 mv
= GIMPLE_REGISTER_TYPE (TYPE_MAIN_VARIANT (t
));
1862 /* If we were the variant leader and we get replaced ourselves drop
1863 all variants from our list. */
1864 if (TYPE_MAIN_VARIANT (t
) == t
1870 tree tem2
= TYPE_NEXT_VARIANT (tem
);
1871 TYPE_NEXT_VARIANT (tem
) = NULL_TREE
;
1876 /* If we are not our own variant leader link us into our new leaders
1880 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
1881 TYPE_NEXT_VARIANT (mv
) = t
;
1882 if (RECORD_OR_UNION_TYPE_P (t
))
1883 TYPE_BINFO (t
) = TYPE_BINFO (mv
);
1884 /* Preserve the invariant that type variants share their
1886 if (RECORD_OR_UNION_TYPE_P (t
)
1887 && TYPE_FIELDS (mv
) != TYPE_FIELDS (t
))
1890 for (f1
= TYPE_FIELDS (mv
), f2
= TYPE_FIELDS (t
);
1891 f1
&& f2
; f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1894 gcc_assert (f1
!= f2
1895 && DECL_NAME (f1
) == DECL_NAME (f2
));
1896 if (!streamer_tree_cache_lookup (cache
, f2
, &ix
))
1898 /* If we're going to replace an element which we'd
1899 still visit in the next iterations, we wouldn't
1900 handle it, so do it here. We do have to handle it
1901 even though the field_decl itself will be removed,
1902 as it could refer to e.g. integer_cst which we
1903 wouldn't reach via any other way, hence they
1904 (and their type) would stay uncollected. */
1905 /* ??? We should rather make sure to replace all
1906 references to f2 with f1. That means handling
1907 COMPONENT_REFs and CONSTRUCTOR elements in
1908 lto_fixup_types and special-case the field-decl
1909 operand handling. */
1910 /* ??? Not sure the above is all relevant in this
1911 path canonicalizing TYPE_FIELDS to that of the
1914 lto_fixup_types (f2
);
1915 streamer_tree_cache_insert_at (cache
, f1
, ix
);
1917 TYPE_FIELDS (t
) = TYPE_FIELDS (mv
);
1921 /* Finally adjust our main variant and fix it up. */
1922 TYPE_MAIN_VARIANT (t
) = mv
;
1924 /* The following reconstructs the pointer chains
1925 of the new pointed-to type if we are a main variant. We do
1926 not stream those so they are broken before fixup. */
1927 if (TREE_CODE (t
) == POINTER_TYPE
1928 && TYPE_MAIN_VARIANT (t
) == t
)
1930 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (TREE_TYPE (t
));
1931 TYPE_POINTER_TO (TREE_TYPE (t
)) = t
;
1933 else if (TREE_CODE (t
) == REFERENCE_TYPE
1934 && TYPE_MAIN_VARIANT (t
) == t
)
1936 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (TREE_TYPE (t
));
1937 TYPE_REFERENCE_TO (TREE_TYPE (t
)) = t
;
1943 if (RECORD_OR_UNION_TYPE_P (t
))
1946 if (TYPE_FIELDS (t
) != TYPE_FIELDS (oldt
))
1947 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (oldt
);
1948 f1
&& f2
; f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1951 gcc_assert (f1
!= f2
&& DECL_NAME (f1
) == DECL_NAME (f2
));
1952 if (!streamer_tree_cache_lookup (cache
, f2
, &ix
))
1954 /* If we're going to replace an element which we'd
1955 still visit in the next iterations, we wouldn't
1956 handle it, so do it here. We do have to handle it
1957 even though the field_decl itself will be removed,
1958 as it could refer to e.g. integer_cst which we
1959 wouldn't reach via any other way, hence they
1960 (and their type) would stay uncollected. */
1961 /* ??? We should rather make sure to replace all
1962 references to f2 with f1. That means handling
1963 COMPONENT_REFs and CONSTRUCTOR elements in
1964 lto_fixup_types and special-case the field-decl
1965 operand handling. */
1967 lto_fixup_types (f2
);
1968 streamer_tree_cache_insert_at (cache
, f1
, ix
);
1972 /* If we found a tree that is equal to oldt replace it in the
1973 cache, so that further users (in the various LTO sections)
1975 streamer_tree_cache_insert_at (cache
, t
, i
);
1979 /* Finally compute the canonical type of all TREE_TYPEs and register
1980 VAR_DECL and FUNCTION_DECL nodes in the symbol table.
1981 From this point there are no longer any types with
1982 TYPE_STRUCTURAL_EQUALITY_P and its type-based alias problems.
1983 This step requires the TYPE_POINTER_TO lists being present, so
1984 make sure it is done last. */
1985 for (i
= len
; i
-- > from
;)
1987 tree t
= cache
->nodes
[i
];
1991 if (TREE_CODE (t
) == VAR_DECL
)
1992 lto_register_var_decl_in_symtab (data_in
, t
);
1993 else if (TREE_CODE (t
) == FUNCTION_DECL
&& !DECL_BUILT_IN (t
))
1994 lto_register_function_decl_in_symtab (data_in
, t
);
1996 && TREE_CODE (t
) == TYPE_DECL
)
1997 debug_hooks
->type_decl (t
, !DECL_FILE_SCOPE_P (t
));
1998 else if (TYPE_P (t
) && !TYPE_CANONICAL (t
))
1999 TYPE_CANONICAL (t
) = gimple_register_canonical_type (t
);
2004 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2005 RESOLUTIONS is the set of symbols picked by the linker (read from the
2006 resolution file when the linker plugin is being used). */
2009 lto_read_decls (struct lto_file_decl_data
*decl_data
, const void *data
,
2010 vec
<ld_plugin_symbol_resolution_t
> resolutions
)
2012 const struct lto_decl_header
*header
= (const struct lto_decl_header
*) data
;
2013 const int decl_offset
= sizeof (struct lto_decl_header
);
2014 const int main_offset
= decl_offset
+ header
->decl_state_size
;
2015 const int string_offset
= main_offset
+ header
->main_size
;
2016 struct lto_input_block ib_main
;
2017 struct data_in
*data_in
;
2019 const uint32_t *data_ptr
, *data_end
;
2020 uint32_t num_decl_states
;
2022 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
2025 data_in
= lto_data_in_create (decl_data
, (const char *) data
+ string_offset
,
2026 header
->string_size
, resolutions
);
2028 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2029 internal types that should not be merged. */
2031 /* Read the global declarations and types. */
2032 while (ib_main
.p
< ib_main
.len
)
2035 unsigned from
= data_in
->reader_cache
->nodes
.length ();
2036 t
= stream_read_tree (&ib_main
, data_in
);
2037 gcc_assert (t
&& ib_main
.p
<= ib_main
.len
);
2038 uniquify_nodes (data_in
, from
);
2041 /* Read in lto_in_decl_state objects. */
2042 data_ptr
= (const uint32_t *) ((const char*) data
+ decl_offset
);
2044 (const uint32_t *) ((const char*) data_ptr
+ header
->decl_state_size
);
2045 num_decl_states
= *data_ptr
++;
2047 gcc_assert (num_decl_states
> 0);
2048 decl_data
->global_decl_state
= lto_new_in_decl_state ();
2049 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
,
2050 decl_data
->global_decl_state
);
2052 /* Read in per-function decl states and enter them in hash table. */
2053 decl_data
->function_decl_states
=
2054 htab_create_ggc (37, lto_hash_in_decl_state
, lto_eq_in_decl_state
, NULL
);
2056 for (i
= 1; i
< num_decl_states
; i
++)
2058 struct lto_in_decl_state
*state
= lto_new_in_decl_state ();
2061 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
, state
);
2062 slot
= htab_find_slot (decl_data
->function_decl_states
, state
, INSERT
);
2063 gcc_assert (*slot
== NULL
);
2067 if (data_ptr
!= data_end
)
2068 internal_error ("bytecode stream: garbage at the end of symbols section");
2070 /* Set the current decl state to be the global state. */
2071 decl_data
->current_decl_state
= decl_data
->global_decl_state
;
2073 lto_data_in_delete (data_in
);
2076 /* Custom version of strtoll, which is not portable. */
2078 static HOST_WIDEST_INT
2079 lto_parse_hex (const char *p
)
2081 HOST_WIDEST_INT ret
= 0;
2083 for (; *p
!= '\0'; ++p
)
2088 if (c
>= '0' && c
<= '9')
2090 else if (c
>= 'a' && c
<= 'f')
2091 part
= c
- 'a' + 10;
2092 else if (c
>= 'A' && c
<= 'F')
2093 part
= c
- 'A' + 10;
2095 internal_error ("could not parse hex number");
2102 /* Read resolution for file named FILE_NAME. The resolution is read from
2106 lto_resolution_read (splay_tree file_ids
, FILE *resolution
, lto_file
*file
)
2108 /* We require that objects in the resolution file are in the same
2109 order as the lto1 command line. */
2110 unsigned int name_len
;
2112 unsigned int num_symbols
;
2114 struct lto_file_decl_data
*file_data
;
2115 splay_tree_node nd
= NULL
;
2120 name_len
= strlen (file
->filename
);
2121 obj_name
= XNEWVEC (char, name_len
+ 1);
2122 fscanf (resolution
, " "); /* Read white space. */
2124 fread (obj_name
, sizeof (char), name_len
, resolution
);
2125 obj_name
[name_len
] = '\0';
2126 if (filename_cmp (obj_name
, file
->filename
) != 0)
2127 internal_error ("unexpected file name %s in linker resolution file. "
2128 "Expected %s", obj_name
, file
->filename
);
2129 if (file
->offset
!= 0)
2133 HOST_WIDEST_INT offset
;
2134 t
= fscanf (resolution
, "@0x%16s", offset_p
);
2136 internal_error ("could not parse file offset");
2137 offset
= lto_parse_hex (offset_p
);
2138 if (offset
!= file
->offset
)
2139 internal_error ("unexpected offset");
2144 fscanf (resolution
, "%u", &num_symbols
);
2146 for (i
= 0; i
< num_symbols
; i
++)
2150 unsigned HOST_WIDE_INT id
;
2152 enum ld_plugin_symbol_resolution r
= (enum ld_plugin_symbol_resolution
) 0;
2154 unsigned int lto_resolution_str_len
=
2155 sizeof (lto_resolution_str
) / sizeof (char *);
2158 t
= fscanf (resolution
, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
" %26s %*[^\n]\n",
2159 &index
, &id
, r_str
);
2161 internal_error ("invalid line in the resolution file");
2163 for (j
= 0; j
< lto_resolution_str_len
; j
++)
2165 if (strcmp (lto_resolution_str
[j
], r_str
) == 0)
2167 r
= (enum ld_plugin_symbol_resolution
) j
;
2171 if (j
== lto_resolution_str_len
)
2172 internal_error ("invalid resolution in the resolution file");
2174 if (!(nd
&& lto_splay_tree_id_equal_p (nd
->key
, id
)))
2176 nd
= lto_splay_tree_lookup (file_ids
, id
);
2178 internal_error ("resolution sub id %wx not in object file", id
);
2181 file_data
= (struct lto_file_decl_data
*)nd
->value
;
2182 /* The indexes are very sparse. To save memory save them in a compact
2183 format that is only unpacked later when the subfile is processed. */
2186 file_data
->respairs
.safe_push (rp
);
2187 if (file_data
->max_index
< index
)
2188 file_data
->max_index
= index
;
2192 /* List of file_decl_datas */
2193 struct file_data_list
2195 struct lto_file_decl_data
*first
, *last
;
2198 /* Is the name for a id'ed LTO section? */
2201 lto_section_with_id (const char *name
, unsigned HOST_WIDE_INT
*id
)
2205 if (strncmp (name
, LTO_SECTION_NAME_PREFIX
, strlen (LTO_SECTION_NAME_PREFIX
)))
2207 s
= strrchr (name
, '.');
2208 return s
&& sscanf (s
, "." HOST_WIDE_INT_PRINT_HEX_PURE
, id
) == 1;
2211 /* Create file_data of each sub file id */
2214 create_subid_section_table (struct lto_section_slot
*ls
, splay_tree file_ids
,
2215 struct file_data_list
*list
)
2217 struct lto_section_slot s_slot
, *new_slot
;
2218 unsigned HOST_WIDE_INT id
;
2222 struct lto_file_decl_data
*file_data
;
2224 if (!lto_section_with_id (ls
->name
, &id
))
2227 /* Find hash table of sub module id */
2228 nd
= lto_splay_tree_lookup (file_ids
, id
);
2231 file_data
= (struct lto_file_decl_data
*)nd
->value
;
2235 file_data
= ggc_alloc_lto_file_decl_data ();
2236 memset(file_data
, 0, sizeof (struct lto_file_decl_data
));
2238 file_data
->section_hash_table
= lto_obj_create_section_hash_table ();;
2239 lto_splay_tree_insert (file_ids
, id
, file_data
);
2241 /* Maintain list in linker order */
2243 list
->first
= file_data
;
2245 list
->last
->next
= file_data
;
2246 list
->last
= file_data
;
2249 /* Copy section into sub module hash table */
2250 new_name
= XDUPVEC (char, ls
->name
, strlen (ls
->name
) + 1);
2251 s_slot
.name
= new_name
;
2252 hash_slot
= htab_find_slot (file_data
->section_hash_table
, &s_slot
, INSERT
);
2253 gcc_assert (*hash_slot
== NULL
);
2255 new_slot
= XDUP (struct lto_section_slot
, ls
);
2256 new_slot
->name
= new_name
;
2257 *hash_slot
= new_slot
;
2261 /* Read declarations and other initializations for a FILE_DATA. */
2264 lto_file_finalize (struct lto_file_decl_data
*file_data
, lto_file
*file
)
2268 vec
<ld_plugin_symbol_resolution_t
>
2269 resolutions
= vNULL
;
2273 /* Create vector for fast access of resolution. We do this lazily
2275 resolutions
.safe_grow_cleared (file_data
->max_index
+ 1);
2276 for (i
= 0; file_data
->respairs
.iterate (i
, &rp
); i
++)
2277 resolutions
[rp
->index
] = rp
->res
;
2278 file_data
->respairs
.release ();
2280 file_data
->renaming_hash_table
= lto_create_renaming_table ();
2281 file_data
->file_name
= file
->filename
;
2282 data
= lto_get_section_data (file_data
, LTO_section_decls
, NULL
, &len
);
2285 internal_error ("cannot read LTO decls from %s", file_data
->file_name
);
2288 /* Frees resolutions */
2289 lto_read_decls (file_data
, data
, resolutions
);
2290 lto_free_section_data (file_data
, LTO_section_decls
, NULL
, data
, len
);
2293 /* Finalize FILE_DATA in FILE and increase COUNT. */
2296 lto_create_files_from_ids (lto_file
*file
, struct lto_file_decl_data
*file_data
,
2299 lto_file_finalize (file_data
, file
);
2300 if (cgraph_dump_file
)
2301 fprintf (cgraph_dump_file
, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX
"\n",
2302 file_data
->file_name
, file_data
->id
);
2307 /* Generate a TREE representation for all types and external decls
2310 Read all of the globals out of the file. Then read the cgraph
2311 and process the .o index into the cgraph nodes so that it can open
2312 the .o file to load the functions and ipa information. */
2314 static struct lto_file_decl_data
*
2315 lto_file_read (lto_file
*file
, FILE *resolution_file
, int *count
)
2317 struct lto_file_decl_data
*file_data
= NULL
;
2318 splay_tree file_ids
;
2319 htab_t section_hash_table
;
2320 struct lto_section_slot
*section
;
2321 struct file_data_list file_list
;
2322 struct lto_section_list section_list
;
2324 memset (§ion_list
, 0, sizeof (struct lto_section_list
));
2325 section_hash_table
= lto_obj_build_section_table (file
, §ion_list
);
2327 /* Find all sub modules in the object and put their sections into new hash
2328 tables in a splay tree. */
2329 file_ids
= lto_splay_tree_new ();
2330 memset (&file_list
, 0, sizeof (struct file_data_list
));
2331 for (section
= section_list
.first
; section
!= NULL
; section
= section
->next
)
2332 create_subid_section_table (section
, file_ids
, &file_list
);
2334 /* Add resolutions to file ids */
2335 lto_resolution_read (file_ids
, resolution_file
, file
);
2337 /* Finalize each lto file for each submodule in the merged object */
2338 for (file_data
= file_list
.first
; file_data
!= NULL
; file_data
= file_data
->next
)
2339 lto_create_files_from_ids (file
, file_data
, count
);
2341 splay_tree_delete (file_ids
);
2342 htab_delete (section_hash_table
);
2344 return file_list
.first
;
2347 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2348 #define LTO_MMAP_IO 1
2352 /* Page size of machine is used for mmap and munmap calls. */
2353 static size_t page_mask
;
2356 /* Get the section data of length LEN from FILENAME starting at
2357 OFFSET. The data segment must be freed by the caller when the
2358 caller is finished. Returns NULL if all was not well. */
2361 lto_read_section_data (struct lto_file_decl_data
*file_data
,
2362 intptr_t offset
, size_t len
)
2366 static char *fd_name
;
2368 intptr_t computed_len
;
2369 intptr_t computed_offset
;
2373 /* Keep a single-entry file-descriptor cache. The last file we
2374 touched will get closed at exit.
2375 ??? Eventually we want to add a more sophisticated larger cache
2376 or rather fix function body streaming to not stream them in
2377 practically random order. */
2379 && filename_cmp (fd_name
, file_data
->file_name
) != 0)
2387 fd
= open (file_data
->file_name
, O_RDONLY
|O_BINARY
);
2390 fatal_error ("Cannot open %s", file_data
->file_name
);
2393 fd_name
= xstrdup (file_data
->file_name
);
2399 size_t page_size
= sysconf (_SC_PAGE_SIZE
);
2400 page_mask
= ~(page_size
- 1);
2403 computed_offset
= offset
& page_mask
;
2404 diff
= offset
- computed_offset
;
2405 computed_len
= len
+ diff
;
2407 result
= (char *) mmap (NULL
, computed_len
, PROT_READ
, MAP_PRIVATE
,
2408 fd
, computed_offset
);
2409 if (result
== MAP_FAILED
)
2411 fatal_error ("Cannot map %s", file_data
->file_name
);
2415 return result
+ diff
;
2417 result
= (char *) xmalloc (len
);
2418 if (lseek (fd
, offset
, SEEK_SET
) != offset
2419 || read (fd
, result
, len
) != (ssize_t
) len
)
2422 fatal_error ("Cannot read %s", file_data
->file_name
);
2426 /* Native windows doesn't supports delayed unlink on opened file. So
2427 we close file here again. This produces higher I/O load, but at least
2428 it prevents to have dangling file handles preventing unlink. */
2439 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2440 NAME will be NULL unless the section type is for a function
2444 get_section_data (struct lto_file_decl_data
*file_data
,
2445 enum lto_section_type section_type
,
2449 htab_t section_hash_table
= file_data
->section_hash_table
;
2450 struct lto_section_slot
*f_slot
;
2451 struct lto_section_slot s_slot
;
2452 const char *section_name
= lto_get_section_name (section_type
, name
, file_data
);
2456 s_slot
.name
= section_name
;
2457 f_slot
= (struct lto_section_slot
*) htab_find (section_hash_table
, &s_slot
);
2460 data
= lto_read_section_data (file_data
, f_slot
->start
, f_slot
->len
);
2464 free (CONST_CAST (char *, section_name
));
2469 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2470 starts at OFFSET and has LEN bytes. */
2473 free_section_data (struct lto_file_decl_data
*file_data ATTRIBUTE_UNUSED
,
2474 enum lto_section_type section_type ATTRIBUTE_UNUSED
,
2475 const char *name ATTRIBUTE_UNUSED
,
2476 const char *offset
, size_t len ATTRIBUTE_UNUSED
)
2479 intptr_t computed_len
;
2480 intptr_t computed_offset
;
2485 computed_offset
= ((intptr_t) offset
) & page_mask
;
2486 diff
= (intptr_t) offset
- computed_offset
;
2487 computed_len
= len
+ diff
;
2489 munmap ((caddr_t
) computed_offset
, computed_len
);
2491 free (CONST_CAST(char *, offset
));
2495 static lto_file
*current_lto_file
;
2497 /* Helper for qsort; compare partitions and return one with smaller size.
2498 We sort from greatest to smallest so parallel build doesn't stale on the
2499 longest compilation being executed too late. */
2502 cmp_partitions_size (const void *a
, const void *b
)
2504 const struct ltrans_partition_def
*pa
2505 = *(struct ltrans_partition_def
*const *)a
;
2506 const struct ltrans_partition_def
*pb
2507 = *(struct ltrans_partition_def
*const *)b
;
2508 return pb
->insns
- pa
->insns
;
2511 /* Helper for qsort; compare partitions and return one with smaller order. */
2514 cmp_partitions_order (const void *a
, const void *b
)
2516 const struct ltrans_partition_def
*pa
2517 = *(struct ltrans_partition_def
*const *)a
;
2518 const struct ltrans_partition_def
*pb
2519 = *(struct ltrans_partition_def
*const *)b
;
2520 int ordera
= -1, orderb
= -1;
2522 if (lto_symtab_encoder_size (pa
->encoder
))
2523 ordera
= lto_symtab_encoder_deref (pa
->encoder
, 0)->symbol
.order
;
2524 if (lto_symtab_encoder_size (pb
->encoder
))
2525 orderb
= lto_symtab_encoder_deref (pb
->encoder
, 0)->symbol
.order
;
2526 return orderb
- ordera
;
2529 /* Write all output files in WPA mode and the file with the list of
2533 lto_wpa_write_files (void)
2537 ltrans_partition part
;
2538 FILE *ltrans_output_list_stream
;
2539 char *temp_filename
;
2542 /* Open the LTRANS output list. */
2543 if (!ltrans_output_list
)
2544 fatal_error ("no LTRANS output list filename provided");
2545 ltrans_output_list_stream
= fopen (ltrans_output_list
, "w");
2546 if (ltrans_output_list_stream
== NULL
)
2547 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list
);
2549 timevar_push (TV_WHOPR_WPA
);
2551 FOR_EACH_VEC_ELT (ltrans_partitions
, i
, part
)
2552 lto_stats
.num_output_symtab_nodes
+= lto_symtab_encoder_size (part
->encoder
);
2554 /* Find out statics that need to be promoted
2555 to globals with hidden visibility because they are accessed from multiple
2557 lto_promote_cross_file_statics ();
2559 timevar_pop (TV_WHOPR_WPA
);
2561 timevar_push (TV_WHOPR_WPA_IO
);
2563 /* Generate a prefix for the LTRANS unit files. */
2564 blen
= strlen (ltrans_output_list
);
2565 temp_filename
= (char *) xmalloc (blen
+ sizeof ("2147483648.o"));
2566 strcpy (temp_filename
, ltrans_output_list
);
2567 if (blen
> sizeof (".out")
2568 && strcmp (temp_filename
+ blen
- sizeof (".out") + 1,
2570 temp_filename
[blen
- sizeof (".out") + 1] = '\0';
2571 blen
= strlen (temp_filename
);
2573 n_sets
= ltrans_partitions
.length ();
2575 /* Sort partitions by size so small ones are compiled last.
2576 FIXME: Even when not reordering we may want to output one list for parallel make
2577 and other for final link command. */
2578 ltrans_partitions
.qsort (flag_toplevel_reorder
2579 ? cmp_partitions_size
2580 : cmp_partitions_order
);
2581 for (i
= 0; i
< n_sets
; i
++)
2584 ltrans_partition part
= ltrans_partitions
[i
];
2586 /* Write all the nodes in SET. */
2587 sprintf (temp_filename
+ blen
, "%u.o", i
);
2588 file
= lto_obj_file_open (temp_filename
, true);
2590 fatal_error ("lto_obj_file_open() failed");
2593 fprintf (stderr
, " %s (%s %i insns)", temp_filename
, part
->name
, part
->insns
);
2594 if (cgraph_dump_file
)
2596 lto_symtab_encoder_iterator lsei
;
2598 fprintf (cgraph_dump_file
, "Writing partition %s to file %s, %i insns\n",
2599 part
->name
, temp_filename
, part
->insns
);
2600 fprintf (cgraph_dump_file
, " Symbols in partition: ");
2601 for (lsei
= lsei_start_in_partition (part
->encoder
); !lsei_end_p (lsei
);
2602 lsei_next_in_partition (&lsei
))
2604 symtab_node node
= lsei_node (lsei
);
2605 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
2607 fprintf (cgraph_dump_file
, "\n Symbols in boundary: ");
2608 for (lsei
= lsei_start (part
->encoder
); !lsei_end_p (lsei
);
2611 symtab_node node
= lsei_node (lsei
);
2612 if (!lto_symtab_encoder_in_partition_p (part
->encoder
, node
))
2614 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
2615 cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
2617 && lto_symtab_encoder_encode_body_p (part
->encoder
, cnode
))
2618 fprintf (cgraph_dump_file
, "(body included)");
2621 varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
);
2623 && lto_symtab_encoder_encode_initializer_p (part
->encoder
, vnode
))
2624 fprintf (cgraph_dump_file
, "(initializer included)");
2628 fprintf (cgraph_dump_file
, "\n");
2630 gcc_checking_assert (lto_symtab_encoder_size (part
->encoder
) || !i
);
2632 lto_set_current_out_file (file
);
2634 ipa_write_optimization_summaries (part
->encoder
);
2636 lto_set_current_out_file (NULL
);
2637 lto_obj_file_close (file
);
2639 part
->encoder
= NULL
;
2641 len
= strlen (temp_filename
);
2642 if (fwrite (temp_filename
, 1, len
, ltrans_output_list_stream
) < len
2643 || fwrite ("\n", 1, 1, ltrans_output_list_stream
) < 1)
2644 fatal_error ("writing to LTRANS output list %s: %m",
2645 ltrans_output_list
);
2648 lto_stats
.num_output_files
+= n_sets
;
2650 /* Close the LTRANS output list. */
2651 if (fclose (ltrans_output_list_stream
))
2652 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list
);
2654 free_ltrans_partitions();
2655 free (temp_filename
);
2657 timevar_pop (TV_WHOPR_WPA_IO
);
2661 /* If TT is a variable or function decl replace it with its
2662 prevailing variant. */
2663 #define LTO_SET_PREVAIL(tt) \
2665 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
2666 tt = lto_symtab_prevailing_decl (tt); \
2669 /* Ensure that TT isn't a replacable var of function decl. */
2670 #define LTO_NO_PREVAIL(tt) \
2671 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2673 /* Given a tree T replace all fields referring to variables or functions
2674 with their prevailing variant. */
2676 lto_fixup_prevailing_decls (tree t
)
2678 enum tree_code code
= TREE_CODE (t
);
2679 LTO_NO_PREVAIL (TREE_TYPE (t
));
2680 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
2681 LTO_NO_PREVAIL (TREE_CHAIN (t
));
2684 LTO_NO_PREVAIL (DECL_NAME (t
));
2685 LTO_SET_PREVAIL (DECL_CONTEXT (t
));
2686 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
2688 LTO_SET_PREVAIL (DECL_SIZE (t
));
2689 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t
));
2690 LTO_SET_PREVAIL (DECL_INITIAL (t
));
2691 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t
));
2692 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t
));
2694 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
2696 LTO_NO_PREVAIL (t
->decl_with_vis
.assembler_name
);
2697 LTO_NO_PREVAIL (DECL_SECTION_NAME (t
));
2699 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
2701 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t
));
2702 LTO_NO_PREVAIL (DECL_RESULT_FLD (t
));
2703 LTO_NO_PREVAIL (DECL_VINDEX (t
));
2705 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
2706 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t
));
2707 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
2709 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t
));
2710 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t
));
2711 LTO_NO_PREVAIL (DECL_QUALIFIER (t
));
2712 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t
));
2713 LTO_NO_PREVAIL (DECL_FCONTEXT (t
));
2716 else if (TYPE_P (t
))
2718 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t
));
2719 LTO_SET_PREVAIL (TYPE_SIZE (t
));
2720 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t
));
2721 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t
));
2722 LTO_NO_PREVAIL (TYPE_NAME (t
));
2724 LTO_SET_PREVAIL (TYPE_MINVAL (t
));
2725 LTO_SET_PREVAIL (TYPE_MAXVAL (t
));
2726 LTO_SET_PREVAIL (t
->type_non_common
.binfo
);
2728 LTO_SET_PREVAIL (TYPE_CONTEXT (t
));
2730 LTO_NO_PREVAIL (TYPE_CANONICAL (t
));
2731 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t
));
2732 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t
));
2734 else if (EXPR_P (t
))
2737 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
2738 LTO_SET_PREVAIL (TREE_OPERAND (t
, i
));
2745 LTO_SET_PREVAIL (TREE_VALUE (t
));
2746 LTO_SET_PREVAIL (TREE_PURPOSE (t
));
2753 #undef LTO_SET_PREVAIL
2754 #undef LTO_NO_PREVAIL
2756 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2757 replaces var and function decls with the corresponding prevailing def. */
2760 lto_fixup_state (struct lto_in_decl_state
*state
)
2763 struct lto_tree_ref_table
*table
;
2765 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2766 we still need to walk from all DECLs to find the reachable
2767 FUNCTION_DECLs and VAR_DECLs. */
2768 for (si
= 0; si
< LTO_N_DECL_STREAMS
; si
++)
2770 table
= &state
->streams
[si
];
2771 for (i
= 0; i
< table
->size
; i
++)
2773 tree
*tp
= table
->trees
+ i
;
2774 if (VAR_OR_FUNCTION_DECL_P (*tp
))
2775 *tp
= lto_symtab_prevailing_decl (*tp
);
2780 /* A callback of htab_traverse. Just extracts a state from SLOT
2781 and calls lto_fixup_state. */
2784 lto_fixup_state_aux (void **slot
, void *aux ATTRIBUTE_UNUSED
)
2786 struct lto_in_decl_state
*state
= (struct lto_in_decl_state
*) *slot
;
2787 lto_fixup_state (state
);
2791 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2795 lto_fixup_decls (struct lto_file_decl_data
**files
)
2801 FOR_EACH_HTAB_ELEMENT (tree_with_vars
, t
, tree
, hi
)
2802 lto_fixup_prevailing_decls (t
);
2804 for (i
= 0; files
[i
]; i
++)
2806 struct lto_file_decl_data
*file
= files
[i
];
2807 struct lto_in_decl_state
*state
= file
->global_decl_state
;
2808 lto_fixup_state (state
);
2810 htab_traverse (file
->function_decl_states
, lto_fixup_state_aux
, NULL
);
2814 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data
**all_file_decl_data
;
2816 /* Turn file datas for sub files into a single array, so that they look
2817 like separate files for further passes. */
2820 lto_flatten_files (struct lto_file_decl_data
**orig
, int count
, int last_file_ix
)
2822 struct lto_file_decl_data
*n
, *next
;
2825 lto_stats
.num_input_files
= count
;
2827 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count
+ 1);
2828 /* Set the hooks so that all of the ipa passes can read in their data. */
2829 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
2830 for (i
= 0, k
= 0; i
< last_file_ix
; i
++)
2832 for (n
= orig
[i
]; n
!= NULL
; n
= next
)
2834 all_file_decl_data
[k
++] = n
;
2839 all_file_decl_data
[k
] = NULL
;
2840 gcc_assert (k
== count
);
2843 /* Input file data before flattening (i.e. splitting them to subfiles to support
2844 incremental linking. */
2845 static int real_file_count
;
2846 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data
**real_file_decl_data
;
2848 static void print_lto_report_1 (void);
2850 /* Read all the symbols from the input files FNAMES. NFILES is the
2851 number of files requested in the command line. Instantiate a
2852 global call graph by aggregating all the sub-graphs found in each
2856 read_cgraph_and_symbols (unsigned nfiles
, const char **fnames
)
2858 unsigned int i
, last_file_ix
;
2860 struct cgraph_node
*node
;
2862 struct lto_file_decl_data
**decl_data
;
2866 timevar_push (TV_IPA_LTO_DECL_IN
);
2869 = decl_data
= ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles
+ 1);
2870 real_file_count
= nfiles
;
2872 /* Read the resolution file. */
2874 if (resolution_file_name
)
2877 unsigned num_objects
;
2879 resolution
= fopen (resolution_file_name
, "r");
2880 if (resolution
== NULL
)
2881 fatal_error ("could not open symbol resolution file: %m");
2883 t
= fscanf (resolution
, "%u", &num_objects
);
2884 gcc_assert (t
== 1);
2886 /* True, since the plugin splits the archives. */
2887 gcc_assert (num_objects
== nfiles
);
2890 tree_with_vars
= htab_create_ggc (101, htab_hash_pointer
, htab_eq_pointer
,
2892 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
2893 tree_int_map_eq
, NULL
);
2894 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
2895 gimple_type_leader
= ggc_alloc_cleared_vec_gimple_type_leader_entry_s
2896 (GIMPLE_TYPE_LEADER_SIZE
);
2897 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
2900 fprintf (stderr
, "Reading object files:");
2902 /* Read all of the object files specified on the command line. */
2903 for (i
= 0, last_file_ix
= 0; i
< nfiles
; ++i
)
2905 struct lto_file_decl_data
*file_data
= NULL
;
2908 fprintf (stderr
, " %s", fnames
[i
]);
2912 current_lto_file
= lto_obj_file_open (fnames
[i
], false);
2913 if (!current_lto_file
)
2916 file_data
= lto_file_read (current_lto_file
, resolution
, &count
);
2919 lto_obj_file_close (current_lto_file
);
2920 free (current_lto_file
);
2921 current_lto_file
= NULL
;
2925 decl_data
[last_file_ix
++] = file_data
;
2927 lto_obj_file_close (current_lto_file
);
2928 free (current_lto_file
);
2929 current_lto_file
= NULL
;
2933 lto_flatten_files (decl_data
, count
, last_file_ix
);
2934 lto_stats
.num_input_files
= count
;
2935 ggc_free(decl_data
);
2936 real_file_decl_data
= NULL
;
2938 if (resolution_file_name
)
2939 fclose (resolution
);
2941 /* Show the LTO report before launching LTRANS. */
2942 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
2943 print_lto_report_1 ();
2945 /* Free gimple type merging datastructures. */
2946 htab_delete (gimple_types
);
2947 gimple_types
= NULL
;
2948 htab_delete (type_hash_cache
);
2949 type_hash_cache
= NULL
;
2950 free (type_pair_cache
);
2951 type_pair_cache
= NULL
;
2952 gimple_type_leader
= NULL
;
2953 free_gimple_type_tables ();
2956 /* Set the hooks so that all of the ipa passes can read in their data. */
2957 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
2959 timevar_pop (TV_IPA_LTO_DECL_IN
);
2962 fprintf (stderr
, "\nReading the callgraph\n");
2964 timevar_push (TV_IPA_LTO_CGRAPH_IO
);
2965 /* Read the symtab. */
2968 /* Store resolutions into the symbol table. */
2974 FOR_EACH_SYMBOL (snode
)
2975 if (symtab_real_symbol_p (snode
)
2976 && (res
= pointer_map_contains (resolution_map
,
2977 snode
->symbol
.decl
)))
2978 snode
->symbol
.resolution
2979 = (enum ld_plugin_symbol_resolution
)(size_t)*res
;
2981 pointer_map_destroy (resolution_map
);
2982 resolution_map
= NULL
;
2985 timevar_pop (TV_IPA_LTO_CGRAPH_IO
);
2988 fprintf (stderr
, "Merging declarations\n");
2990 timevar_push (TV_IPA_LTO_DECL_MERGE
);
2991 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2992 need to care about resolving symbols again, we only need to replace
2993 duplicated declarations read from the callgraph and from function
2997 lto_symtab_merge_decls ();
2999 /* If there were errors during symbol merging bail out, we have no
3000 good way to recover here. */
3002 fatal_error ("errors during merging of translation units");
3004 /* Fixup all decls. */
3005 lto_fixup_decls (all_file_decl_data
);
3007 htab_delete (tree_with_vars
);
3008 tree_with_vars
= NULL
;
3011 timevar_pop (TV_IPA_LTO_DECL_MERGE
);
3012 /* Each pass will set the appropriate timer. */
3015 fprintf (stderr
, "Reading summaries\n");
3017 /* Read the IPA summary data. */
3019 ipa_read_optimization_summaries ();
3021 ipa_read_summaries ();
3023 for (i
= 0; all_file_decl_data
[i
]; i
++)
3025 gcc_assert (all_file_decl_data
[i
]->symtab_node_encoder
);
3026 lto_symtab_encoder_delete (all_file_decl_data
[i
]->symtab_node_encoder
);
3027 all_file_decl_data
[i
]->symtab_node_encoder
= NULL
;
3030 /* Finally merge the cgraph according to the decl merging decisions. */
3031 timevar_push (TV_IPA_LTO_CGRAPH_MERGE
);
3032 if (cgraph_dump_file
)
3034 fprintf (cgraph_dump_file
, "Before merging:\n");
3035 dump_cgraph (cgraph_dump_file
);
3036 dump_varpool (cgraph_dump_file
);
3038 lto_symtab_merge_cgraph_nodes ();
3041 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
3042 summaries computed and needs to apply changes. At the moment WHOPR only
3043 supports inlining, so we can push it here by hand. In future we need to stream
3044 this field into ltrans compilation. */
3046 FOR_EACH_DEFINED_FUNCTION (node
)
3047 node
->ipa_transforms_to_apply
.safe_push ((ipa_opt_pass
)&pass_ipa_inline
);
3049 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE
);
3051 timevar_push (TV_IPA_LTO_DECL_INIT_IO
);
3053 /* Indicate that the cgraph is built and ready. */
3054 cgraph_function_flags_ready
= true;
3056 timevar_pop (TV_IPA_LTO_DECL_INIT_IO
);
3057 ggc_free (all_file_decl_data
);
3058 all_file_decl_data
= NULL
;
3062 /* Materialize all the bodies for all the nodes in the callgraph. */
3065 materialize_cgraph (void)
3068 struct cgraph_node
*node
;
3070 timevar_id_t lto_timer
;
3074 flag_wpa
? "Materializing decls:" : "Reading function bodies:");
3076 /* Now that we have input the cgraph, we need to clear all of the aux
3077 nodes and read the functions if we are not running in WPA mode. */
3078 timevar_push (TV_IPA_LTO_GIMPLE_IN
);
3080 FOR_EACH_FUNCTION (node
)
3082 if (node
->symbol
.lto_file_data
)
3084 lto_materialize_function (node
);
3085 lto_stats
.num_input_cgraph_nodes
++;
3089 timevar_pop (TV_IPA_LTO_GIMPLE_IN
);
3091 /* Start the appropriate timer depending on the mode that we are
3093 lto_timer
= (flag_wpa
) ? TV_WHOPR_WPA
3094 : (flag_ltrans
) ? TV_WHOPR_LTRANS
3096 timevar_push (lto_timer
);
3098 current_function_decl
= NULL
;
3101 /* Inform the middle end about the global variables we have seen. */
3102 FOR_EACH_VEC_ELT (*lto_global_var_decls
, i
, decl
)
3103 rest_of_decl_compilation (decl
, 1, 0);
3106 fprintf (stderr
, "\n");
3108 timevar_pop (lto_timer
);
3112 /* Show various memory usage statistics related to LTO. */
3114 print_lto_report_1 (void)
3116 const char *pfx
= (flag_lto
) ? "LTO" : (flag_wpa
) ? "WPA" : "LTRANS";
3117 fprintf (stderr
, "%s statistics\n", pfx
);
3120 fprintf (stderr
, "[%s] GIMPLE type table: size %ld, %ld elements, "
3121 "%ld searches, %ld collisions (ratio: %f)\n", pfx
,
3122 (long) htab_size (gimple_types
),
3123 (long) htab_elements (gimple_types
),
3124 (long) gimple_types
->searches
,
3125 (long) gimple_types
->collisions
,
3126 htab_collisions (gimple_types
));
3128 fprintf (stderr
, "[%s] GIMPLE type table is empty\n", pfx
);
3129 if (type_hash_cache
)
3130 fprintf (stderr
, "[%s] GIMPLE type hash cache table: size %ld, %ld elements, "
3131 "%ld searches, %ld collisions (ratio: %f)\n", pfx
,
3132 (long) htab_size (type_hash_cache
),
3133 (long) htab_elements (type_hash_cache
),
3134 (long) type_hash_cache
->searches
,
3135 (long) type_hash_cache
->collisions
,
3136 htab_collisions (type_hash_cache
));
3138 fprintf (stderr
, "[%s] GIMPLE type hash cache table is empty\n", pfx
);
3140 print_gimple_types_stats (pfx
);
3141 print_lto_report (pfx
);
3144 /* Perform whole program analysis (WPA) on the callgraph and write out the
3145 optimization plan. */
3148 do_whole_program_analysis (void)
3152 timevar_start (TV_PHASE_OPT_GEN
);
3154 /* Note that since we are in WPA mode, materialize_cgraph will not
3155 actually read in all the function bodies. It only materializes
3156 the decls and cgraph nodes so that analysis can be performed. */
3157 materialize_cgraph ();
3159 /* Reading in the cgraph uses different timers, start timing WPA now. */
3160 timevar_push (TV_WHOPR_WPA
);
3162 if (pre_ipa_mem_report
)
3164 fprintf (stderr
, "Memory consumption before IPA\n");
3165 dump_memory_report (false);
3168 cgraph_function_flags_ready
= true;
3170 if (cgraph_dump_file
)
3172 dump_cgraph (cgraph_dump_file
);
3173 dump_varpool (cgraph_dump_file
);
3175 bitmap_obstack_initialize (NULL
);
3176 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
3178 execute_ipa_pass_list (all_regular_ipa_passes
);
3179 symtab_remove_unreachable_nodes (false, dump_file
);
3181 if (cgraph_dump_file
)
3183 fprintf (cgraph_dump_file
, "Optimized ");
3184 dump_cgraph (cgraph_dump_file
);
3185 dump_varpool (cgraph_dump_file
);
3187 #ifdef ENABLE_CHECKING
3190 bitmap_obstack_release (NULL
);
3192 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3193 timevar_pop (TV_WHOPR_WPA
);
3195 timevar_push (TV_WHOPR_PARTITIONING
);
3196 if (flag_lto_partition_1to1
)
3198 else if (flag_lto_partition_max
)
3201 lto_balanced_map ();
3203 /* AUX pointers are used by partitioning code to bookkeep number of
3204 partitions symbol is in. This is no longer needed. */
3205 FOR_EACH_SYMBOL (node
)
3206 node
->symbol
.aux
= NULL
;
3208 lto_stats
.num_cgraph_partitions
+= ltrans_partitions
.length ();
3209 timevar_pop (TV_WHOPR_PARTITIONING
);
3211 timevar_stop (TV_PHASE_OPT_GEN
);
3212 timevar_start (TV_PHASE_STREAM_OUT
);
3216 fprintf (stderr
, "\nStreaming out");
3219 lto_wpa_write_files ();
3221 fprintf (stderr
, "\n");
3223 timevar_stop (TV_PHASE_STREAM_OUT
);
3226 if (post_ipa_mem_report
)
3228 fprintf (stderr
, "Memory consumption after IPA\n");
3229 dump_memory_report (false);
3232 /* Show the LTO report before launching LTRANS. */
3233 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
3234 print_lto_report_1 ();
3236 dump_memory_report (true);
3240 static GTY(()) tree lto_eh_personality_decl
;
3242 /* Return the LTO personality function decl. */
3245 lto_eh_personality (void)
3247 if (!lto_eh_personality_decl
)
3249 /* Use the first personality DECL for our personality if we don't
3250 support multiple ones. This ensures that we don't artificially
3251 create the need for them in a single-language program. */
3252 if (first_personality_decl
&& !dwarf2out_do_cfi_asm ())
3253 lto_eh_personality_decl
= first_personality_decl
;
3255 lto_eh_personality_decl
= lhd_gcc_personality ();
3258 return lto_eh_personality_decl
;
3261 /* Set the process name based on the LTO mode. */
3264 lto_process_name (void)
3267 setproctitle ("lto1-lto");
3269 setproctitle ("lto1-wpa");
3271 setproctitle ("lto1-ltrans");
3275 /* Initialize the LTO front end. */
3280 lto_process_name ();
3281 lto_streamer_hooks_init ();
3283 lto_set_in_hooks (NULL
, get_section_data
, free_section_data
);
3284 memset (<o_stats
, 0, sizeof (lto_stats
));
3285 bitmap_obstack_initialize (NULL
);
3286 gimple_register_cfg_hooks ();
3290 /* Main entry point for the GIMPLE front end. This front end has
3291 three main personalities:
3293 - LTO (-flto). All the object files on the command line are
3294 loaded in memory and processed as a single translation unit.
3295 This is the traditional link-time optimization behavior.
3297 - WPA (-fwpa). Only the callgraph and summary information for
3298 files in the command file are loaded. A single callgraph
3299 (without function bodies) is instantiated for the whole set of
3300 files. IPA passes are only allowed to analyze the call graph
3301 and make transformation decisions. The callgraph is
3302 partitioned, each partition is written to a new object file
3303 together with the transformation decisions.
3305 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3306 summary files from running again. Since WPA computed summary
3307 information and decided what transformations to apply, LTRANS
3308 simply applies them. */
3313 /* LTO is called as a front end, even though it is not a front end.
3314 Because it is called as a front end, TV_PHASE_PARSING and
3315 TV_PARSE_GLOBAL are active, and we need to turn them off while
3316 doing LTO. Later we turn them back on so they are active up in
3318 timevar_pop (TV_PARSE_GLOBAL
);
3319 timevar_stop (TV_PHASE_PARSING
);
3321 timevar_start (TV_PHASE_SETUP
);
3323 /* Initialize the LTO front end. */
3326 timevar_stop (TV_PHASE_SETUP
);
3327 timevar_start (TV_PHASE_STREAM_IN
);
3329 /* Read all the symbols and call graph from all the files in the
3331 read_cgraph_and_symbols (num_in_fnames
, in_fnames
);
3333 timevar_stop (TV_PHASE_STREAM_IN
);
3337 /* If WPA is enabled analyze the whole call graph and create an
3338 optimization plan. Otherwise, read in all the function
3339 bodies and continue with optimization. */
3341 do_whole_program_analysis ();
3344 struct varpool_node
*vnode
;
3346 timevar_start (TV_PHASE_OPT_GEN
);
3348 materialize_cgraph ();
3350 lto_promote_statics_nonwpa ();
3352 /* Let the middle end know that we have read and merged all of
3356 timevar_stop (TV_PHASE_OPT_GEN
);
3358 /* FIXME lto, if the processes spawned by WPA fail, we miss
3359 the chance to print WPA's report, so WPA will call
3360 print_lto_report before launching LTRANS. If LTRANS was
3361 launched directly by the driver we would not need to do
3363 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
3364 print_lto_report_1 ();
3366 /* Record the global variables. */
3367 FOR_EACH_DEFINED_VARIABLE (vnode
)
3368 vec_safe_push (lto_global_var_decls
, vnode
->symbol
.decl
);
3372 /* Here we make LTO pretend to be a parser. */
3373 timevar_start (TV_PHASE_PARSING
);
3374 timevar_push (TV_PARSE_GLOBAL
);
3377 #include "gt-lto-lto.h"