1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48 #include "data-streamer.h"
50 #include "pass_manager.h"
52 /* Vector to keep track of external variables we've seen so far. */
53 vec
<tree
, va_gc
> *lto_global_var_decls
;
55 static GTY(()) tree first_personality_decl
;
57 /* Returns a hash code for P. */
60 hash_name (const void *p
)
62 const struct lto_section_slot
*ds
= (const struct lto_section_slot
*) p
;
63 return (hashval_t
) htab_hash_string (ds
->name
);
67 /* Returns nonzero if P1 and P2 are equal. */
70 eq_name (const void *p1
, const void *p2
)
72 const struct lto_section_slot
*s1
=
73 (const struct lto_section_slot
*) p1
;
74 const struct lto_section_slot
*s2
=
75 (const struct lto_section_slot
*) p2
;
77 return strcmp (s1
->name
, s2
->name
) == 0;
80 /* Free lto_section_slot */
83 free_with_string (void *arg
)
85 struct lto_section_slot
*s
= (struct lto_section_slot
*)arg
;
87 free (CONST_CAST (char *, s
->name
));
91 /* Create section hash table */
94 lto_obj_create_section_hash_table (void)
96 return htab_create (37, hash_name
, eq_name
, free_with_string
);
99 /* Delete an allocated integer KEY in the splay tree. */
102 lto_splay_tree_delete_id (splay_tree_key key
)
107 /* Compare splay tree node ids A and B. */
110 lto_splay_tree_compare_ids (splay_tree_key a
, splay_tree_key b
)
112 unsigned HOST_WIDE_INT ai
;
113 unsigned HOST_WIDE_INT bi
;
115 ai
= *(unsigned HOST_WIDE_INT
*) a
;
116 bi
= *(unsigned HOST_WIDE_INT
*) b
;
125 /* Look up splay tree node by ID in splay tree T. */
127 static splay_tree_node
128 lto_splay_tree_lookup (splay_tree t
, unsigned HOST_WIDE_INT id
)
130 return splay_tree_lookup (t
, (splay_tree_key
) &id
);
133 /* Check if KEY has ID. */
136 lto_splay_tree_id_equal_p (splay_tree_key key
, unsigned HOST_WIDE_INT id
)
138 return *(unsigned HOST_WIDE_INT
*) key
== id
;
141 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
142 The ID is allocated separately because we need HOST_WIDE_INTs which may
143 be wider than a splay_tree_key. */
146 lto_splay_tree_insert (splay_tree t
, unsigned HOST_WIDE_INT id
,
147 struct lto_file_decl_data
*file_data
)
149 unsigned HOST_WIDE_INT
*idp
= XCNEW (unsigned HOST_WIDE_INT
);
151 splay_tree_insert (t
, (splay_tree_key
) idp
, (splay_tree_value
) file_data
);
154 /* Create a splay tree. */
157 lto_splay_tree_new (void)
159 return splay_tree_new (lto_splay_tree_compare_ids
,
160 lto_splay_tree_delete_id
,
164 /* Return true when NODE has a clone that is analyzed (i.e. we need
165 to load its body even if the node itself is not needed). */
168 has_analyzed_clone_p (struct cgraph_node
*node
)
170 struct cgraph_node
*orig
= node
;
175 if (node
->symbol
.analyzed
)
179 else if (node
->next_sibling_clone
)
180 node
= node
->next_sibling_clone
;
183 while (node
!= orig
&& !node
->next_sibling_clone
)
184 node
= node
->clone_of
;
186 node
= node
->next_sibling_clone
;
192 /* Read the function body for the function associated with NODE. */
195 lto_materialize_function (struct cgraph_node
*node
)
199 decl
= node
->symbol
.decl
;
200 /* Read in functions with body (analyzed nodes)
201 and also functions that are needed to produce virtual clones. */
202 if ((cgraph_function_with_gimple_body_p (node
) && node
->symbol
.analyzed
)
203 || node
->used_as_abstract_origin
204 || has_analyzed_clone_p (node
))
206 /* Clones don't need to be read. */
209 if (DECL_FUNCTION_PERSONALITY (decl
) && !first_personality_decl
)
210 first_personality_decl
= DECL_FUNCTION_PERSONALITY (decl
);
213 /* Let the middle end know about the function. */
214 rest_of_decl_compilation (decl
, 1, 0);
218 /* Decode the content of memory pointed to by DATA in the in decl
219 state object STATE. DATA_IN points to a data_in structure for
220 decoding. Return the address after the decoded object in the
223 static const uint32_t *
224 lto_read_in_decl_state (struct data_in
*data_in
, const uint32_t *data
,
225 struct lto_in_decl_state
*state
)
232 decl
= streamer_tree_cache_get_tree (data_in
->reader_cache
, ix
);
233 if (TREE_CODE (decl
) != FUNCTION_DECL
)
235 gcc_assert (decl
== void_type_node
);
238 state
->fn_decl
= decl
;
240 for (i
= 0; i
< LTO_N_DECL_STREAMS
; i
++)
242 uint32_t size
= *data
++;
243 tree
*decls
= ggc_alloc_vec_tree (size
);
245 for (j
= 0; j
< size
; j
++)
246 decls
[j
] = streamer_tree_cache_get_tree (data_in
->reader_cache
, data
[j
]);
248 state
->streams
[i
].size
= size
;
249 state
->streams
[i
].trees
= decls
;
257 /* Global canonical type table. */
258 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
259 htab_t gimple_canonical_types
;
260 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
261 htab_t canonical_type_hash_cache
;
263 /* Returning a hash value for gimple type TYPE combined with VAL.
265 The hash value returned is equal for types considered compatible
266 by gimple_canonical_types_compatible_p. */
269 iterative_hash_canonical_type (tree type
, hashval_t val
)
273 struct tree_int_map
*mp
, m
;
276 if ((slot
= htab_find_slot (canonical_type_hash_cache
, &m
, NO_INSERT
)))
277 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, val
);
279 /* Combine a few common features of types so that types are grouped into
280 smaller sets; when searching for existing matching types to merge,
281 only existing types having the same features as the new type will be
283 v
= iterative_hash_hashval_t (TREE_CODE (type
), 0);
284 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
285 v
= iterative_hash_hashval_t (TYPE_ALIGN (type
), v
);
286 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
288 /* Incorporate common features of numerical types. */
289 if (INTEGRAL_TYPE_P (type
)
290 || SCALAR_FLOAT_TYPE_P (type
)
291 || FIXED_POINT_TYPE_P (type
)
292 || TREE_CODE (type
) == OFFSET_TYPE
293 || POINTER_TYPE_P (type
))
295 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
296 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
299 if (VECTOR_TYPE_P (type
))
301 v
= iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type
), v
);
302 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
305 if (TREE_CODE (type
) == COMPLEX_TYPE
)
306 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
308 /* For pointer and reference types, fold in information about the type
309 pointed to but do not recurse to the pointed-to type. */
310 if (POINTER_TYPE_P (type
))
312 v
= iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type
), v
);
313 v
= iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type
)), v
);
314 v
= iterative_hash_hashval_t (TYPE_RESTRICT (type
), v
);
315 v
= iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type
)), v
);
318 /* For integer types hash only the string flag. */
319 if (TREE_CODE (type
) == INTEGER_TYPE
)
320 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
322 /* For array types hash the domain bounds and the string flag. */
323 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
325 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
326 /* OMP lowering can introduce error_mark_node in place of
327 random local decls in types. */
328 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
329 v
= iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)), v
);
330 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
331 v
= iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), v
);
334 /* Recurse for aggregates with a single element type. */
335 if (TREE_CODE (type
) == ARRAY_TYPE
336 || TREE_CODE (type
) == COMPLEX_TYPE
337 || TREE_CODE (type
) == VECTOR_TYPE
)
338 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
340 /* Incorporate function return and argument types. */
341 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
346 /* For method types also incorporate their parent class. */
347 if (TREE_CODE (type
) == METHOD_TYPE
)
348 v
= iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type
), v
);
350 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
352 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
354 v
= iterative_hash_canonical_type (TREE_VALUE (p
), v
);
358 v
= iterative_hash_hashval_t (na
, v
);
361 if (RECORD_OR_UNION_TYPE_P (type
))
366 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
367 if (TREE_CODE (f
) == FIELD_DECL
)
369 v
= iterative_hash_canonical_type (TREE_TYPE (f
), v
);
373 v
= iterative_hash_hashval_t (nf
, v
);
376 /* Cache the just computed hash value. */
377 mp
= ggc_alloc_cleared_tree_int_map ();
378 mp
->base
.from
= type
;
380 /* As we recurse the hashtable may expand between looking up the
381 cached value (and not finding one) and here, so we have to
382 re-lookup the slot. */
383 slot
= htab_find_slot (canonical_type_hash_cache
, &m
, INSERT
);
386 return iterative_hash_hashval_t (v
, val
);
390 gimple_canonical_type_hash (const void *p
)
392 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree
) p
), 0);
396 /* The TYPE_CANONICAL merging machinery. It should closely resemble
397 the middle-end types_compatible_p function. It needs to avoid
398 claiming types are different for types that should be treated
399 the same with respect to TBAA. Canonical types are also used
400 for IL consistency checks via the useless_type_conversion_p
401 predicate which does not handle all type kinds itself but falls
402 back to pointer-comparison of TYPE_CANONICAL for aggregates
405 /* Return true iff T1 and T2 are structurally identical for what
406 TBAA is concerned. */
409 gimple_canonical_types_compatible_p (tree t1
, tree t2
)
411 /* Before starting to set up the SCC machinery handle simple cases. */
413 /* Check first for the obvious case of pointer identity. */
417 /* Check that we have two types to compare. */
418 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
421 /* If the types have been previously registered and found equal
423 if (TYPE_CANONICAL (t1
)
424 && TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
))
427 /* Can't be the same type if the types don't have the same code. */
428 if (TREE_CODE (t1
) != TREE_CODE (t2
))
431 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
434 /* Qualifiers do not matter for canonical type comparison purposes. */
436 /* Void types and nullptr types are always the same. */
437 if (TREE_CODE (t1
) == VOID_TYPE
438 || TREE_CODE (t1
) == NULLPTR_TYPE
)
441 /* Can't be the same type if they have different alignment, or mode. */
442 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
443 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
446 /* Non-aggregate types can be handled cheaply. */
447 if (INTEGRAL_TYPE_P (t1
)
448 || SCALAR_FLOAT_TYPE_P (t1
)
449 || FIXED_POINT_TYPE_P (t1
)
450 || TREE_CODE (t1
) == VECTOR_TYPE
451 || TREE_CODE (t1
) == COMPLEX_TYPE
452 || TREE_CODE (t1
) == OFFSET_TYPE
453 || POINTER_TYPE_P (t1
))
455 /* Can't be the same type if they have different sign or precision. */
456 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
457 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
460 if (TREE_CODE (t1
) == INTEGER_TYPE
461 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
464 /* For canonical type comparisons we do not want to build SCCs
465 so we cannot compare pointed-to types. But we can, for now,
466 require the same pointed-to type kind and match what
467 useless_type_conversion_p would do. */
468 if (POINTER_TYPE_P (t1
))
470 /* If the two pointers have different ref-all attributes,
471 they can't be the same type. */
472 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
475 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
476 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
479 if (TYPE_RESTRICT (t1
) != TYPE_RESTRICT (t2
))
482 if (TREE_CODE (TREE_TYPE (t1
)) != TREE_CODE (TREE_TYPE (t2
)))
486 /* Tail-recurse to components. */
487 if (TREE_CODE (t1
) == VECTOR_TYPE
488 || TREE_CODE (t1
) == COMPLEX_TYPE
)
489 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
495 /* Do type-specific comparisons. */
496 switch (TREE_CODE (t1
))
499 /* Array types are the same if the element types are the same and
500 the number of elements are the same. */
501 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
))
502 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
503 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
507 tree i1
= TYPE_DOMAIN (t1
);
508 tree i2
= TYPE_DOMAIN (t2
);
510 /* For an incomplete external array, the type domain can be
511 NULL_TREE. Check this condition also. */
512 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
514 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
518 tree min1
= TYPE_MIN_VALUE (i1
);
519 tree min2
= TYPE_MIN_VALUE (i2
);
520 tree max1
= TYPE_MAX_VALUE (i1
);
521 tree max2
= TYPE_MAX_VALUE (i2
);
523 /* The minimum/maximum values have to be the same. */
526 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
527 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
528 || operand_equal_p (min1
, min2
, 0))))
531 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
532 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
533 || operand_equal_p (max1
, max2
, 0)))))
542 /* Function types are the same if the return type and arguments types
544 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
547 if (!comp_type_attributes (t1
, t2
))
550 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
556 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
558 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
560 if (!gimple_canonical_types_compatible_p
561 (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
565 if (parms1
|| parms2
)
573 case QUAL_UNION_TYPE
:
577 /* For aggregate types, all the fields must be the same. */
578 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
580 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
582 /* Skip non-fields. */
583 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
584 f1
= TREE_CHAIN (f1
);
585 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
586 f2
= TREE_CHAIN (f2
);
589 /* The fields must have the same name, offset and type. */
590 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
591 || !gimple_compare_field_offset (f1
, f2
)
592 || !gimple_canonical_types_compatible_p
593 (TREE_TYPE (f1
), TREE_TYPE (f2
)))
597 /* If one aggregate has more fields than the other, they
611 /* Returns nonzero if P1 and P2 are equal. */
614 gimple_canonical_type_eq (const void *p1
, const void *p2
)
616 const_tree t1
= (const_tree
) p1
;
617 const_tree t2
= (const_tree
) p2
;
618 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1
),
619 CONST_CAST_TREE (t2
));
622 /* Register type T in the global type table gimple_types.
623 If another type T', compatible with T, already existed in
624 gimple_types then return T', otherwise return T. This is used by
625 LTO to merge identical types read from different TUs.
627 ??? This merging does not exactly match how the tree.c middle-end
628 functions will assign TYPE_CANONICAL when new types are created
629 during optimization (which at least happens for pointer and array
633 gimple_register_canonical_type (tree t
)
637 gcc_assert (TYPE_P (t
));
639 if (TYPE_CANONICAL (t
))
640 return TYPE_CANONICAL (t
);
642 slot
= htab_find_slot (gimple_canonical_types
, t
, INSERT
);
644 && *(tree
*)slot
!= t
)
646 tree new_type
= (tree
) *((tree
*) slot
);
648 TYPE_CANONICAL (t
) = new_type
;
653 TYPE_CANONICAL (t
) = t
;
660 /* Re-compute TYPE_CANONICAL for NODE and related types. */
663 lto_register_canonical_types (tree node
)
669 TYPE_CANONICAL (node
) = NULL_TREE
;
670 TYPE_CANONICAL (node
) = gimple_register_canonical_type (node
);
672 if (POINTER_TYPE_P (node
)
673 || TREE_CODE (node
) == COMPLEX_TYPE
674 || TREE_CODE (node
) == ARRAY_TYPE
)
675 lto_register_canonical_types (TREE_TYPE (node
));
679 /* ??? Old hashing and merging code follows, we keep it for statistics
682 /* Global type table. FIXME, it should be possible to re-use some
683 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
684 etc), but those assume that types were built with the various
685 build_*_type routines which is not the case with the streamer. */
686 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
688 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
689 htab_t type_hash_cache
;
691 static hashval_t
gimple_type_hash (const void *);
693 /* Structure used to maintain a cache of some type pairs compared by
694 gimple_types_compatible_p when comparing aggregate types. There are
695 three possible values for SAME_P:
697 -2: The pair (T1, T2) has just been inserted in the table.
698 0: T1 and T2 are different types.
699 1: T1 and T2 are the same type. */
707 typedef struct type_pair_d
*type_pair_t
;
709 #define GIMPLE_TYPE_PAIR_SIZE 16381
710 struct type_pair_d
*type_pair_cache
;
713 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
714 entry if none existed. */
716 static inline type_pair_t
717 lookup_type_pair (tree t1
, tree t2
)
720 unsigned int uid1
, uid2
;
722 if (TYPE_UID (t1
) < TYPE_UID (t2
))
724 uid1
= TYPE_UID (t1
);
725 uid2
= TYPE_UID (t2
);
729 uid1
= TYPE_UID (t2
);
730 uid2
= TYPE_UID (t1
);
732 gcc_checking_assert (uid1
!= uid2
);
734 /* iterative_hash_hashval_t imply an function calls.
735 We know that UIDS are in limited range. */
736 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
737 % GIMPLE_TYPE_PAIR_SIZE
);
738 if (type_pair_cache
[index
].uid1
== uid1
739 && type_pair_cache
[index
].uid2
== uid2
)
740 return &type_pair_cache
[index
];
742 type_pair_cache
[index
].uid1
= uid1
;
743 type_pair_cache
[index
].uid2
= uid2
;
744 type_pair_cache
[index
].same_p
= -2;
746 return &type_pair_cache
[index
];
749 /* Per pointer state for the SCC finding. The on_sccstack flag
750 is not strictly required, it is true when there is no hash value
751 recorded for the type and false otherwise. But querying that
765 static unsigned int next_dfs_num
;
766 static unsigned int gtc_next_dfs_num
;
768 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
769 true then if any type has no name return false, otherwise return
770 true if both types have no names. */
773 compare_type_names_p (tree t1
, tree t2
)
775 tree name1
= TYPE_NAME (t1
);
776 tree name2
= TYPE_NAME (t2
);
778 if ((name1
!= NULL_TREE
) != (name2
!= NULL_TREE
))
781 if (name1
== NULL_TREE
)
784 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
785 if (TREE_CODE (name1
) != TREE_CODE (name2
))
788 if (TREE_CODE (name1
) == TYPE_DECL
)
789 name1
= DECL_NAME (name1
);
790 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
792 if (TREE_CODE (name2
) == TYPE_DECL
)
793 name2
= DECL_NAME (name2
);
794 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
796 /* Identifiers can be compared with pointer equality rather
797 than a string comparison. */
805 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
807 struct pointer_map_t
*, struct obstack
*);
809 /* DFS visit the edge from the callers type pair with state *STATE to
810 the pair T1, T2 while operating in FOR_MERGING_P mode.
811 Update the merging status if it is not part of the SCC containing the
812 callers pair and return it.
813 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
816 gtc_visit (tree t1
, tree t2
,
818 vec
<type_pair_t
> *sccstack
,
819 struct pointer_map_t
*sccstate
,
820 struct obstack
*sccstate_obstack
)
822 struct sccs
*cstate
= NULL
;
826 /* Check first for the obvious case of pointer identity. */
830 /* Check that we have two types to compare. */
831 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
834 /* Can't be the same type if the types don't have the same code. */
835 if (TREE_CODE (t1
) != TREE_CODE (t2
))
838 /* Can't be the same type if they have different CV qualifiers. */
839 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
842 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
845 /* Void types and nullptr types are always the same. */
846 if (TREE_CODE (t1
) == VOID_TYPE
847 || TREE_CODE (t1
) == NULLPTR_TYPE
)
850 /* Can't be the same type if they have different alignment or mode. */
851 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
852 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
855 /* Do some simple checks before doing three hashtable queries. */
856 if (INTEGRAL_TYPE_P (t1
)
857 || SCALAR_FLOAT_TYPE_P (t1
)
858 || FIXED_POINT_TYPE_P (t1
)
859 || TREE_CODE (t1
) == VECTOR_TYPE
860 || TREE_CODE (t1
) == COMPLEX_TYPE
861 || TREE_CODE (t1
) == OFFSET_TYPE
862 || POINTER_TYPE_P (t1
))
864 /* Can't be the same type if they have different sign or precision. */
865 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
866 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
869 if (TREE_CODE (t1
) == INTEGER_TYPE
870 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
873 /* That's all we need to check for float and fixed-point types. */
874 if (SCALAR_FLOAT_TYPE_P (t1
)
875 || FIXED_POINT_TYPE_P (t1
))
878 /* For other types fall through to more complex checks. */
881 /* If the hash values of t1 and t2 are different the types can't
882 possibly be the same. This helps keeping the type-pair hashtable
883 small, only tracking comparisons for hash collisions. */
884 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
887 /* Allocate a new cache entry for this comparison. */
888 p
= lookup_type_pair (t1
, t2
);
889 if (p
->same_p
== 0 || p
->same_p
== 1)
891 /* We have already decided whether T1 and T2 are the
892 same, return the cached result. */
893 return p
->same_p
== 1;
896 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
897 cstate
= (struct sccs
*)*slot
;
898 /* Not yet visited. DFS recurse. */
901 gimple_types_compatible_p_1 (t1
, t2
, p
,
902 sccstack
, sccstate
, sccstate_obstack
);
903 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
904 state
->low
= MIN (state
->low
, cstate
->low
);
906 /* If the type is still on the SCC stack adjust the parents low. */
907 if (cstate
->dfsnum
< state
->dfsnum
908 && cstate
->on_sccstack
)
909 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
911 /* Return the current lattice value. We start with an equality
912 assumption so types part of a SCC will be optimistically
913 treated equal unless proven otherwise. */
914 return cstate
->u
.same_p
;
917 /* Worker for gimple_types_compatible.
918 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
921 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
922 vec
<type_pair_t
> *sccstack
,
923 struct pointer_map_t
*sccstate
,
924 struct obstack
*sccstate_obstack
)
928 gcc_assert (p
->same_p
== -2);
930 state
= XOBNEW (sccstate_obstack
, struct sccs
);
931 *pointer_map_insert (sccstate
, p
) = state
;
933 sccstack
->safe_push (p
);
934 state
->dfsnum
= gtc_next_dfs_num
++;
935 state
->low
= state
->dfsnum
;
936 state
->on_sccstack
= true;
937 /* Start with an equality assumption. As we DFS recurse into child
938 SCCs this assumption may get revisited. */
941 /* The struct tags shall compare equal. */
942 if (!compare_type_names_p (t1
, t2
))
943 goto different_types
;
945 /* The main variant of both types should compare equal. */
946 if (TYPE_MAIN_VARIANT (t1
) != t1
947 || TYPE_MAIN_VARIANT (t2
) != t2
)
949 if (!gtc_visit (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
),
950 state
, sccstack
, sccstate
, sccstate_obstack
))
951 goto different_types
;
954 /* We may not merge typedef types to the same type in different
957 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
958 && DECL_CONTEXT (TYPE_NAME (t1
))
959 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1
))))
961 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1
)),
962 DECL_CONTEXT (TYPE_NAME (t2
)),
963 state
, sccstack
, sccstate
, sccstate_obstack
))
964 goto different_types
;
967 /* If their attributes are not the same they can't be the same type. */
968 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
969 goto different_types
;
971 /* Do type-specific comparisons. */
972 switch (TREE_CODE (t1
))
976 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
977 state
, sccstack
, sccstate
, sccstate_obstack
))
978 goto different_types
;
982 /* Array types are the same if the element types are the same and
983 the number of elements are the same. */
984 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
985 state
, sccstack
, sccstate
, sccstate_obstack
)
986 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
987 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
988 goto different_types
;
991 tree i1
= TYPE_DOMAIN (t1
);
992 tree i2
= TYPE_DOMAIN (t2
);
994 /* For an incomplete external array, the type domain can be
995 NULL_TREE. Check this condition also. */
996 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
998 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
999 goto different_types
;
1002 tree min1
= TYPE_MIN_VALUE (i1
);
1003 tree min2
= TYPE_MIN_VALUE (i2
);
1004 tree max1
= TYPE_MAX_VALUE (i1
);
1005 tree max2
= TYPE_MAX_VALUE (i2
);
1007 /* The minimum/maximum values have to be the same. */
1010 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
1011 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
1012 || operand_equal_p (min1
, min2
, 0))))
1015 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
1016 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
1017 || operand_equal_p (max1
, max2
, 0)))))
1020 goto different_types
;
1025 /* Method types should belong to the same class. */
1026 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
1027 state
, sccstack
, sccstate
, sccstate_obstack
))
1028 goto different_types
;
1033 /* Function types are the same if the return type and arguments types
1035 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
1036 state
, sccstack
, sccstate
, sccstate_obstack
))
1037 goto different_types
;
1039 if (!comp_type_attributes (t1
, t2
))
1040 goto different_types
;
1042 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
1046 tree parms1
, parms2
;
1048 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
1050 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
1052 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
1053 state
, sccstack
, sccstate
, sccstate_obstack
))
1054 goto different_types
;
1057 if (parms1
|| parms2
)
1058 goto different_types
;
1065 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
1066 state
, sccstack
, sccstate
, sccstate_obstack
)
1067 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
1068 TYPE_OFFSET_BASETYPE (t2
),
1069 state
, sccstack
, sccstate
, sccstate_obstack
))
1070 goto different_types
;
1076 case REFERENCE_TYPE
:
1078 /* If the two pointers have different ref-all attributes,
1079 they can't be the same type. */
1080 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
1081 goto different_types
;
1083 /* Otherwise, pointer and reference types are the same if the
1084 pointed-to types are the same. */
1085 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
1086 state
, sccstack
, sccstate
, sccstate_obstack
))
1089 goto different_types
;
1095 tree min1
= TYPE_MIN_VALUE (t1
);
1096 tree max1
= TYPE_MAX_VALUE (t1
);
1097 tree min2
= TYPE_MIN_VALUE (t2
);
1098 tree max2
= TYPE_MAX_VALUE (t2
);
1099 bool min_equal_p
= false;
1100 bool max_equal_p
= false;
1102 /* If either type has a minimum value, the other type must
1104 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
1106 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
1109 /* Likewise, if either type has a maximum value, the other
1110 type must have the same. */
1111 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
1113 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
1116 if (!min_equal_p
|| !max_equal_p
)
1117 goto different_types
;
1124 /* FIXME lto, we cannot check bounds on enumeral types because
1125 different front ends will produce different values.
1126 In C, enumeral types are integers, while in C++ each element
1127 will have its own symbolic value. We should decide how enums
1128 are to be represented in GIMPLE and have each front end lower
1132 /* For enumeral types, all the values must be the same. */
1133 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
1136 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
1138 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
1140 tree c1
= TREE_VALUE (v1
);
1141 tree c2
= TREE_VALUE (v2
);
1143 if (TREE_CODE (c1
) == CONST_DECL
)
1144 c1
= DECL_INITIAL (c1
);
1146 if (TREE_CODE (c2
) == CONST_DECL
)
1147 c2
= DECL_INITIAL (c2
);
1149 if (tree_int_cst_equal (c1
, c2
) != 1)
1150 goto different_types
;
1152 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
1153 goto different_types
;
1156 /* If one enumeration has more values than the other, they
1157 are not the same. */
1159 goto different_types
;
1166 case QUAL_UNION_TYPE
:
1170 /* For aggregate types, all the fields must be the same. */
1171 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
1173 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
1175 /* Different field kinds are not compatible. */
1176 if (TREE_CODE (f1
) != TREE_CODE (f2
))
1177 goto different_types
;
1178 /* Field decls must have the same name and offset. */
1179 if (TREE_CODE (f1
) == FIELD_DECL
1180 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
1181 || !gimple_compare_field_offset (f1
, f2
)))
1182 goto different_types
;
1183 /* All entities should have the same name and type. */
1184 if (DECL_NAME (f1
) != DECL_NAME (f2
)
1185 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
1186 state
, sccstack
, sccstate
, sccstate_obstack
))
1187 goto different_types
;
1190 /* If one aggregate has more fields than the other, they
1191 are not the same. */
1193 goto different_types
;
1202 /* Common exit path for types that are not compatible. */
1204 state
->u
.same_p
= 0;
1207 /* Common exit path for types that are compatible. */
1209 gcc_assert (state
->u
.same_p
== 1);
1212 if (state
->low
== state
->dfsnum
)
1216 /* Pop off the SCC and set its cache values to the final
1217 comparison result. */
1220 struct sccs
*cstate
;
1221 x
= sccstack
->pop ();
1222 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1223 cstate
->on_sccstack
= false;
1224 x
->same_p
= state
->u
.same_p
;
1229 return state
->u
.same_p
;
1232 /* Return true iff T1 and T2 are structurally identical. When
1233 FOR_MERGING_P is true the an incomplete type and a complete type
1234 are considered different, otherwise they are considered compatible. */
1237 gimple_types_compatible_p (tree t1
, tree t2
)
1239 vec
<type_pair_t
> sccstack
= vNULL
;
1240 struct pointer_map_t
*sccstate
;
1241 struct obstack sccstate_obstack
;
1242 type_pair_t p
= NULL
;
1245 /* Before starting to set up the SCC machinery handle simple cases. */
1247 /* Check first for the obvious case of pointer identity. */
1251 /* Check that we have two types to compare. */
1252 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1255 /* Can't be the same type if the types don't have the same code. */
1256 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1259 /* Can't be the same type if they have different CV qualifiers. */
1260 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
1263 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
1266 /* Void types and nullptr types are always the same. */
1267 if (TREE_CODE (t1
) == VOID_TYPE
1268 || TREE_CODE (t1
) == NULLPTR_TYPE
)
1271 /* Can't be the same type if they have different alignment or mode. */
1272 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
1273 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
1276 /* Do some simple checks before doing three hashtable queries. */
1277 if (INTEGRAL_TYPE_P (t1
)
1278 || SCALAR_FLOAT_TYPE_P (t1
)
1279 || FIXED_POINT_TYPE_P (t1
)
1280 || TREE_CODE (t1
) == VECTOR_TYPE
1281 || TREE_CODE (t1
) == COMPLEX_TYPE
1282 || TREE_CODE (t1
) == OFFSET_TYPE
1283 || POINTER_TYPE_P (t1
))
1285 /* Can't be the same type if they have different sign or precision. */
1286 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
1287 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
1290 if (TREE_CODE (t1
) == INTEGER_TYPE
1291 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
1294 /* That's all we need to check for float and fixed-point types. */
1295 if (SCALAR_FLOAT_TYPE_P (t1
)
1296 || FIXED_POINT_TYPE_P (t1
))
1299 /* For other types fall through to more complex checks. */
1302 /* If the hash values of t1 and t2 are different the types can't
1303 possibly be the same. This helps keeping the type-pair hashtable
1304 small, only tracking comparisons for hash collisions. */
1305 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
1308 /* If we've visited this type pair before (in the case of aggregates
1309 with self-referential types), and we made a decision, return it. */
1310 p
= lookup_type_pair (t1
, t2
);
1311 if (p
->same_p
== 0 || p
->same_p
== 1)
1313 /* We have already decided whether T1 and T2 are the
1314 same, return the cached result. */
1315 return p
->same_p
== 1;
1318 /* Now set up the SCC machinery for the comparison. */
1319 gtc_next_dfs_num
= 1;
1320 sccstate
= pointer_map_create ();
1321 gcc_obstack_init (&sccstate_obstack
);
1322 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
1323 &sccstack
, sccstate
, &sccstate_obstack
);
1324 sccstack
.release ();
1325 pointer_map_destroy (sccstate
);
1326 obstack_free (&sccstate_obstack
, NULL
);
1332 iterative_hash_gimple_type (tree
, hashval_t
, vec
<tree
> *,
1333 struct pointer_map_t
*, struct obstack
*);
1335 /* DFS visit the edge from the callers type with state *STATE to T.
1336 Update the callers type hash V with the hash for T if it is not part
1337 of the SCC containing the callers type and return it.
1338 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
1341 visit (tree t
, struct sccs
*state
, hashval_t v
,
1342 vec
<tree
> *sccstack
,
1343 struct pointer_map_t
*sccstate
,
1344 struct obstack
*sccstate_obstack
)
1346 struct sccs
*cstate
= NULL
;
1347 struct tree_int_map m
;
1350 /* If there is a hash value recorded for this type then it can't
1351 possibly be part of our parent SCC. Simply mix in its hash. */
1353 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1355 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
1357 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
1358 cstate
= (struct sccs
*)*slot
;
1362 /* Not yet visited. DFS recurse. */
1363 tem
= iterative_hash_gimple_type (t
, v
,
1364 sccstack
, sccstate
, sccstate_obstack
);
1366 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
1367 state
->low
= MIN (state
->low
, cstate
->low
);
1368 /* If the type is no longer on the SCC stack and thus is not part
1369 of the parents SCC mix in its hash value. Otherwise we will
1370 ignore the type for hashing purposes and return the unaltered
1372 if (!cstate
->on_sccstack
)
1375 if (cstate
->dfsnum
< state
->dfsnum
1376 && cstate
->on_sccstack
)
1377 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
1379 /* We are part of our parents SCC, skip this type during hashing
1380 and return the unaltered hash value. */
1384 /* Hash NAME with the previous hash value V and return it. */
1387 iterative_hash_name (tree name
, hashval_t v
)
1391 v
= iterative_hash_hashval_t (TREE_CODE (name
), v
);
1392 if (TREE_CODE (name
) == TYPE_DECL
)
1393 name
= DECL_NAME (name
);
1396 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
1397 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
1400 /* A type, hashvalue pair for sorting SCC members. */
1402 struct type_hash_pair
{
1407 /* Compare two type, hashvalue pairs. */
1410 type_hash_pair_compare (const void *p1_
, const void *p2_
)
1412 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
1413 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
1414 if (p1
->hash
< p2
->hash
)
1416 else if (p1
->hash
> p2
->hash
)
1421 /* Returning a hash value for gimple type TYPE combined with VAL.
1422 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1424 To hash a type we end up hashing in types that are reachable.
1425 Through pointers we can end up with cycles which messes up the
1426 required property that we need to compute the same hash value
1427 for structurally equivalent types. To avoid this we have to
1428 hash all types in a cycle (the SCC) in a commutative way. The
1429 easiest way is to not mix in the hashes of the SCC members at
1430 all. To make this work we have to delay setting the hash
1431 values of the SCC until it is complete. */
1434 iterative_hash_gimple_type (tree type
, hashval_t val
,
1435 vec
<tree
> *sccstack
,
1436 struct pointer_map_t
*sccstate
,
1437 struct obstack
*sccstate_obstack
)
1443 /* Not visited during this DFS walk. */
1444 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
1445 state
= XOBNEW (sccstate_obstack
, struct sccs
);
1446 *pointer_map_insert (sccstate
, type
) = state
;
1448 sccstack
->safe_push (type
);
1449 state
->dfsnum
= next_dfs_num
++;
1450 state
->low
= state
->dfsnum
;
1451 state
->on_sccstack
= true;
1453 /* Combine a few common features of types so that types are grouped into
1454 smaller sets; when searching for existing matching types to merge,
1455 only existing types having the same features as the new type will be
1457 v
= iterative_hash_name (TYPE_NAME (type
), 0);
1458 if (TYPE_NAME (type
)
1459 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1460 && DECL_CONTEXT (TYPE_NAME (type
))
1461 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type
))))
1462 v
= visit (DECL_CONTEXT (TYPE_NAME (type
)), state
, v
,
1463 sccstack
, sccstate
, sccstate_obstack
);
1465 /* Factor in the variant structure. */
1466 if (TYPE_MAIN_VARIANT (type
) != type
)
1467 v
= visit (TYPE_MAIN_VARIANT (type
), state
, v
,
1468 sccstack
, sccstate
, sccstate_obstack
);
1470 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
1471 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
1472 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
1474 /* Do not hash the types size as this will cause differences in
1475 hash values for the complete vs. the incomplete type variant. */
1477 /* Incorporate common features of numerical types. */
1478 if (INTEGRAL_TYPE_P (type
)
1479 || SCALAR_FLOAT_TYPE_P (type
)
1480 || FIXED_POINT_TYPE_P (type
))
1482 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
1483 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
1484 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
1487 /* For pointer and reference types, fold in information about the type
1489 if (POINTER_TYPE_P (type
))
1490 v
= visit (TREE_TYPE (type
), state
, v
,
1491 sccstack
, sccstate
, sccstate_obstack
);
1493 /* For integer types hash the types min/max values and the string flag. */
1494 if (TREE_CODE (type
) == INTEGER_TYPE
)
1496 /* OMP lowering can introduce error_mark_node in place of
1497 random local decls in types. */
1498 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
1499 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
1500 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
1501 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
1502 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1505 /* For array types hash the domain and the string flag. */
1506 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
1508 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
1509 v
= visit (TYPE_DOMAIN (type
), state
, v
,
1510 sccstack
, sccstate
, sccstate_obstack
);
1513 /* Recurse for aggregates with a single element type. */
1514 if (TREE_CODE (type
) == ARRAY_TYPE
1515 || TREE_CODE (type
) == COMPLEX_TYPE
1516 || TREE_CODE (type
) == VECTOR_TYPE
)
1517 v
= visit (TREE_TYPE (type
), state
, v
,
1518 sccstack
, sccstate
, sccstate_obstack
);
1520 /* Incorporate function return and argument types. */
1521 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
1526 /* For method types also incorporate their parent class. */
1527 if (TREE_CODE (type
) == METHOD_TYPE
)
1528 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
1529 sccstack
, sccstate
, sccstate_obstack
);
1531 /* Check result and argument types. */
1532 v
= visit (TREE_TYPE (type
), state
, v
,
1533 sccstack
, sccstate
, sccstate_obstack
);
1534 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
1536 v
= visit (TREE_VALUE (p
), state
, v
,
1537 sccstack
, sccstate
, sccstate_obstack
);
1541 v
= iterative_hash_hashval_t (na
, v
);
1544 if (RECORD_OR_UNION_TYPE_P (type
))
1549 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
1551 v
= iterative_hash_name (DECL_NAME (f
), v
);
1552 v
= visit (TREE_TYPE (f
), state
, v
,
1553 sccstack
, sccstate
, sccstate_obstack
);
1557 v
= iterative_hash_hashval_t (nf
, v
);
1560 /* Record hash for us. */
1563 /* See if we found an SCC. */
1564 if (state
->low
== state
->dfsnum
)
1567 struct tree_int_map
*m
;
1569 /* Pop off the SCC and set its hash values. */
1570 x
= sccstack
->pop ();
1571 /* Optimize SCC size one. */
1574 state
->on_sccstack
= false;
1575 m
= ggc_alloc_cleared_tree_int_map ();
1578 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1579 gcc_assert (!*slot
);
1584 struct sccs
*cstate
;
1585 unsigned first
, i
, size
, j
;
1586 struct type_hash_pair
*pairs
;
1587 /* Pop off the SCC and build an array of type, hash pairs. */
1588 first
= sccstack
->length () - 1;
1589 while ((*sccstack
)[first
] != type
)
1591 size
= sccstack
->length () - first
+ 1;
1592 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
1594 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1595 cstate
->on_sccstack
= false;
1597 pairs
[i
].hash
= cstate
->u
.hash
;
1600 x
= sccstack
->pop ();
1601 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
1602 cstate
->on_sccstack
= false;
1605 pairs
[i
].hash
= cstate
->u
.hash
;
1608 gcc_assert (i
+ 1 == size
);
1609 /* Sort the arrays of type, hash pairs so that when we mix in
1610 all members of the SCC the hash value becomes independent on
1611 the order we visited the SCC. Disregard hashes equal to
1612 the hash of the type we mix into because we cannot guarantee
1613 a stable sort for those across different TUs. */
1614 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
1615 type_hash_pair_compare
);
1616 for (i
= 0; i
< size
; ++i
)
1619 m
= ggc_alloc_cleared_tree_int_map ();
1620 m
->base
.from
= pairs
[i
].type
;
1621 hash
= pairs
[i
].hash
;
1622 /* Skip same hashes. */
1623 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
1625 for (; j
< size
; ++j
)
1626 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1627 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
1628 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
1630 if (pairs
[i
].type
== type
)
1632 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
1633 gcc_assert (!*slot
);
1639 return iterative_hash_hashval_t (v
, val
);
1642 /* Returns a hash value for P (assumed to be a type). The hash value
1643 is computed using some distinguishing features of the type. Note
1644 that we cannot use pointer hashing here as we may be dealing with
1645 two distinct instances of the same type.
1647 This function should produce the same hash value for two compatible
1648 types according to gimple_types_compatible_p. */
1651 gimple_type_hash (const void *p
)
1653 const_tree t
= (const_tree
) p
;
1654 vec
<tree
> sccstack
= vNULL
;
1655 struct pointer_map_t
*sccstate
;
1656 struct obstack sccstate_obstack
;
1659 struct tree_int_map m
;
1661 m
.base
.from
= CONST_CAST_TREE (t
);
1662 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
1664 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
1666 /* Perform a DFS walk and pre-hash all reachable types. */
1668 sccstate
= pointer_map_create ();
1669 gcc_obstack_init (&sccstate_obstack
);
1670 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
1671 &sccstack
, sccstate
, &sccstate_obstack
);
1672 sccstack
.release ();
1673 pointer_map_destroy (sccstate
);
1674 obstack_free (&sccstate_obstack
, NULL
);
1679 /* Returns nonzero if P1 and P2 are equal. */
1682 gimple_type_eq (const void *p1
, const void *p2
)
1684 const_tree t1
= (const_tree
) p1
;
1685 const_tree t2
= (const_tree
) p2
;
1686 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
1687 CONST_CAST_TREE (t2
));
1690 /* Register type T in the global type table gimple_types. */
1693 gimple_register_type (tree t
)
1697 /* See if we already have an equivalent type registered. */
1698 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
1700 && *(tree
*)slot
!= t
)
1701 return (tree
) *((tree
*) slot
);
1703 /* If not, insert it to the cache and the hash. */
1708 /* End of old merging code. */
1710 /* Remember trees that contains references to declarations. */
1711 static GTY(()) vec
<tree
, va_gc
> *tree_with_vars
;
1713 #define CHECK_VAR(tt) \
1716 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
1717 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
1721 #define CHECK_NO_VAR(tt) \
1722 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
1724 /* Check presence of pointers to decls in fields of a tree_typed T. */
1727 mentions_vars_p_typed (tree t
)
1729 CHECK_NO_VAR (TREE_TYPE (t
));
1733 /* Check presence of pointers to decls in fields of a tree_common T. */
1736 mentions_vars_p_common (tree t
)
1738 if (mentions_vars_p_typed (t
))
1740 CHECK_NO_VAR (TREE_CHAIN (t
));
1744 /* Check presence of pointers to decls in fields of a decl_minimal T. */
1747 mentions_vars_p_decl_minimal (tree t
)
1749 if (mentions_vars_p_common (t
))
1751 CHECK_NO_VAR (DECL_NAME (t
));
1752 CHECK_VAR (DECL_CONTEXT (t
));
1756 /* Check presence of pointers to decls in fields of a decl_common T. */
1759 mentions_vars_p_decl_common (tree t
)
1761 if (mentions_vars_p_decl_minimal (t
))
1763 CHECK_VAR (DECL_SIZE (t
));
1764 CHECK_VAR (DECL_SIZE_UNIT (t
));
1765 CHECK_VAR (DECL_INITIAL (t
));
1766 CHECK_NO_VAR (DECL_ATTRIBUTES (t
));
1767 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t
));
1771 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
1774 mentions_vars_p_decl_with_vis (tree t
)
1776 if (mentions_vars_p_decl_common (t
))
1779 /* Accessor macro has side-effects, use field-name here. */
1780 CHECK_NO_VAR (t
->decl_with_vis
.assembler_name
);
1781 CHECK_NO_VAR (DECL_SECTION_NAME (t
));
1785 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1788 mentions_vars_p_decl_non_common (tree t
)
1790 if (mentions_vars_p_decl_with_vis (t
))
1792 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t
));
1793 CHECK_NO_VAR (DECL_RESULT_FLD (t
));
1794 CHECK_NO_VAR (DECL_VINDEX (t
));
1798 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1801 mentions_vars_p_function (tree t
)
1803 if (mentions_vars_p_decl_non_common (t
))
1805 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t
));
1809 /* Check presence of pointers to decls in fields of a field_decl T. */
1812 mentions_vars_p_field_decl (tree t
)
1814 if (mentions_vars_p_decl_common (t
))
1816 CHECK_VAR (DECL_FIELD_OFFSET (t
));
1817 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t
));
1818 CHECK_NO_VAR (DECL_QUALIFIER (t
));
1819 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t
));
1820 CHECK_NO_VAR (DECL_FCONTEXT (t
));
1824 /* Check presence of pointers to decls in fields of a type T. */
1827 mentions_vars_p_type (tree t
)
1829 if (mentions_vars_p_common (t
))
1831 CHECK_NO_VAR (TYPE_CACHED_VALUES (t
));
1832 CHECK_VAR (TYPE_SIZE (t
));
1833 CHECK_VAR (TYPE_SIZE_UNIT (t
));
1834 CHECK_NO_VAR (TYPE_ATTRIBUTES (t
));
1835 CHECK_NO_VAR (TYPE_NAME (t
));
1837 CHECK_VAR (TYPE_MINVAL (t
));
1838 CHECK_VAR (TYPE_MAXVAL (t
));
1840 /* Accessor is for derived node types only. */
1841 CHECK_NO_VAR (t
->type_non_common
.binfo
);
1843 CHECK_VAR (TYPE_CONTEXT (t
));
1844 CHECK_NO_VAR (TYPE_CANONICAL (t
));
1845 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t
));
1846 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t
));
1850 /* Check presence of pointers to decls in fields of a BINFO T. */
1853 mentions_vars_p_binfo (tree t
)
1855 unsigned HOST_WIDE_INT i
, n
;
1857 if (mentions_vars_p_common (t
))
1859 CHECK_VAR (BINFO_VTABLE (t
));
1860 CHECK_NO_VAR (BINFO_OFFSET (t
));
1861 CHECK_NO_VAR (BINFO_VIRTUALS (t
));
1862 CHECK_NO_VAR (BINFO_VPTR_FIELD (t
));
1863 n
= vec_safe_length (BINFO_BASE_ACCESSES (t
));
1864 for (i
= 0; i
< n
; i
++)
1865 CHECK_NO_VAR (BINFO_BASE_ACCESS (t
, i
));
1866 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1867 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1868 n
= BINFO_N_BASE_BINFOS (t
);
1869 for (i
= 0; i
< n
; i
++)
1870 CHECK_NO_VAR (BINFO_BASE_BINFO (t
, i
));
1874 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
1877 mentions_vars_p_constructor (tree t
)
1879 unsigned HOST_WIDE_INT idx
;
1880 constructor_elt
*ce
;
1882 if (mentions_vars_p_typed (t
))
1885 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
); idx
++)
1887 CHECK_NO_VAR (ce
->index
);
1888 CHECK_VAR (ce
->value
);
1893 /* Check presence of pointers to decls in fields of an expression tree T. */
1896 mentions_vars_p_expr (tree t
)
1899 if (mentions_vars_p_typed (t
))
1901 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
1902 CHECK_VAR (TREE_OPERAND (t
, i
));
1906 /* Check presence of pointers to decls that needs later fixup in T. */
1909 mentions_vars_p (tree t
)
1911 switch (TREE_CODE (t
))
1913 case IDENTIFIER_NODE
:
1917 CHECK_VAR (TREE_VALUE (t
));
1918 CHECK_VAR (TREE_PURPOSE (t
));
1919 CHECK_NO_VAR (TREE_CHAIN (t
));
1923 return mentions_vars_p_field_decl (t
);
1931 case NAMESPACE_DECL
:
1932 return mentions_vars_p_decl_common (t
);
1936 return mentions_vars_p_decl_with_vis (t
);
1940 return mentions_vars_p_decl_non_common (t
);
1944 return mentions_vars_p_function (t
);
1948 return mentions_vars_p_binfo (t
);
1951 case PLACEHOLDER_EXPR
:
1952 return mentions_vars_p_common (t
);
1956 case TRANSLATION_UNIT_DECL
:
1957 case OPTIMIZATION_NODE
:
1958 case TARGET_OPTION_NODE
:
1962 return mentions_vars_p_constructor (t
);
1968 if (mentions_vars_p_type (t
))
1971 else if (EXPR_P (t
))
1973 if (mentions_vars_p_expr (t
))
1976 else if (CONSTANT_CLASS_P (t
))
1977 CHECK_NO_VAR (TREE_TYPE (t
));
1985 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1987 static enum ld_plugin_symbol_resolution
1988 get_resolution (struct data_in
*data_in
, unsigned index
)
1990 if (data_in
->globals_resolution
.exists ())
1992 ld_plugin_symbol_resolution_t ret
;
1993 /* We can have references to not emitted functions in
1994 DECL_FUNCTION_PERSONALITY at least. So we can and have
1995 to indeed return LDPR_UNKNOWN in some cases. */
1996 if (data_in
->globals_resolution
.length () <= index
)
1997 return LDPR_UNKNOWN
;
1998 ret
= data_in
->globals_resolution
[index
];
2002 /* Delay resolution finding until decl merging. */
2003 return LDPR_UNKNOWN
;
2006 /* We need to record resolutions until symbol table is read. */
2008 register_resolution (struct lto_file_decl_data
*file_data
, tree decl
,
2009 enum ld_plugin_symbol_resolution resolution
)
2011 if (resolution
== LDPR_UNKNOWN
)
2013 if (!file_data
->resolution_map
)
2014 file_data
->resolution_map
= pointer_map_create ();
2015 *pointer_map_insert (file_data
->resolution_map
, decl
) = (void *)(size_t)resolution
;
2018 /* Register DECL with the global symbol table and change its
2019 name if necessary to avoid name clashes for static globals across
2023 lto_register_var_decl_in_symtab (struct data_in
*data_in
, tree decl
,
2028 /* Variable has file scope, not local. */
2029 if (!TREE_PUBLIC (decl
)
2030 && !((context
= decl_function_context (decl
))
2031 && auto_var_in_fn_p (decl
, context
)))
2032 rest_of_decl_compilation (decl
, 1, 0);
2034 /* If this variable has already been declared, queue the
2035 declaration for merging. */
2036 if (TREE_PUBLIC (decl
))
2037 register_resolution (data_in
->file_data
,
2038 decl
, get_resolution (data_in
, ix
));
2042 /* Register DECL with the global symbol table and change its
2043 name if necessary to avoid name clashes for static globals across
2044 different files. DATA_IN contains descriptors and tables for the
2048 lto_register_function_decl_in_symtab (struct data_in
*data_in
, tree decl
,
2051 /* If this variable has already been declared, queue the
2052 declaration for merging. */
2053 if (TREE_PUBLIC (decl
) && !DECL_ABSTRACT (decl
))
2054 register_resolution (data_in
->file_data
,
2055 decl
, get_resolution (data_in
, ix
));
2059 /* For the type T re-materialize it in the type variant list and
2060 the pointer/reference-to chains. */
2063 lto_fixup_prevailing_type (tree t
)
2065 /* The following re-creates proper variant lists while fixing up
2066 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
2067 variant list state before fixup is broken. */
2069 /* If we are not our own variant leader link us into our new leaders
2071 if (TYPE_MAIN_VARIANT (t
) != t
)
2073 tree mv
= TYPE_MAIN_VARIANT (t
);
2074 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
2075 TYPE_NEXT_VARIANT (mv
) = t
;
2078 /* The following reconstructs the pointer chains
2079 of the new pointed-to type if we are a main variant. We do
2080 not stream those so they are broken before fixup. */
2081 if (TREE_CODE (t
) == POINTER_TYPE
2082 && TYPE_MAIN_VARIANT (t
) == t
)
2084 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (TREE_TYPE (t
));
2085 TYPE_POINTER_TO (TREE_TYPE (t
)) = t
;
2087 else if (TREE_CODE (t
) == REFERENCE_TYPE
2088 && TYPE_MAIN_VARIANT (t
) == t
)
2090 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (TREE_TYPE (t
));
2091 TYPE_REFERENCE_TO (TREE_TYPE (t
)) = t
;
2096 /* We keep prevailing tree SCCs in a hashtable with manual collision
2097 handling (in case all hashes compare the same) and keep the colliding
2098 entries in the tree_scc->next chain. */
2103 /* Hash of the whole SCC. */
2105 /* Number of trees in the SCC. */
2107 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
2108 which share the same individual tree hash). */
2110 /* The members of the SCC.
2111 We only need to remember the first entry node candidate for prevailing
2112 SCCs (but of course have access to all entries for SCCs we are
2114 ??? For prevailing SCCs we really only need hash and the first
2115 entry candidate, but that's too awkward to implement. */
2119 struct tree_scc_hasher
: typed_noop_remove
<tree_scc
>
2121 typedef tree_scc value_type
;
2122 typedef tree_scc compare_type
;
2123 static inline hashval_t
hash (const value_type
*);
2124 static inline bool equal (const value_type
*, const compare_type
*);
2128 tree_scc_hasher::hash (const value_type
*scc
)
2134 tree_scc_hasher::equal (const value_type
*scc1
, const compare_type
*scc2
)
2136 if (scc1
->hash
!= scc2
->hash
2137 || scc1
->len
!= scc2
->len
2138 || scc1
->entry_len
!= scc2
->entry_len
)
2143 static hash_table
<tree_scc_hasher
> tree_scc_hash
;
2144 static struct obstack tree_scc_hash_obstack
;
2146 static unsigned long num_merged_types
;
2147 static unsigned long num_prevailing_types
;
2148 static unsigned long num_not_merged_types
;
2149 static unsigned long num_not_merged_types_in_same_scc
;
2150 static unsigned long num_not_merged_types_trees
;
2151 static unsigned long num_not_merged_types_in_same_scc_trees
;
2152 static unsigned long num_type_scc_trees
;
2153 static unsigned long total_scc_size
;
2154 static unsigned long num_sccs_read
;
2155 static unsigned long total_scc_size_merged
;
2156 static unsigned long num_sccs_merged
;
2157 static unsigned long num_scc_compares
;
2158 static unsigned long num_scc_compare_collisions
;
2161 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
2162 recursing through in-SCC tree edges. Returns true if the SCCs entered
2163 through T1 and T2 are equal and fills in *MAP with the pairs of
2164 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
2167 compare_tree_sccs_1 (tree t1
, tree t2
, tree
**map
)
2169 enum tree_code code
;
2171 /* Mark already visited nodes. */
2172 TREE_ASM_WRITTEN (t2
) = 1;
2174 /* Push the pair onto map. */
2179 /* Compare value-fields. */
2180 #define compare_values(X) \
2182 if (X(t1) != X(t2)) \
2186 compare_values (TREE_CODE
);
2187 code
= TREE_CODE (t1
);
2191 compare_values (TREE_SIDE_EFFECTS
);
2192 compare_values (TREE_CONSTANT
);
2193 compare_values (TREE_READONLY
);
2194 compare_values (TREE_PUBLIC
);
2196 compare_values (TREE_ADDRESSABLE
);
2197 compare_values (TREE_THIS_VOLATILE
);
2199 compare_values (DECL_UNSIGNED
);
2200 else if (TYPE_P (t1
))
2201 compare_values (TYPE_UNSIGNED
);
2203 compare_values (TYPE_ARTIFICIAL
);
2205 compare_values (TREE_NO_WARNING
);
2206 compare_values (TREE_NOTHROW
);
2207 compare_values (TREE_STATIC
);
2208 if (code
!= TREE_BINFO
)
2209 compare_values (TREE_PRIVATE
);
2210 compare_values (TREE_PROTECTED
);
2211 compare_values (TREE_DEPRECATED
);
2214 compare_values (TYPE_SATURATING
);
2215 compare_values (TYPE_ADDR_SPACE
);
2217 else if (code
== SSA_NAME
)
2218 compare_values (SSA_NAME_IS_DEFAULT_DEF
);
2220 if (CODE_CONTAINS_STRUCT (code
, TS_INT_CST
))
2222 compare_values (TREE_INT_CST_LOW
);
2223 compare_values (TREE_INT_CST_HIGH
);
2226 if (CODE_CONTAINS_STRUCT (code
, TS_REAL_CST
))
2228 /* ??? No suitable compare routine available. */
2229 REAL_VALUE_TYPE r1
= TREE_REAL_CST (t1
);
2230 REAL_VALUE_TYPE r2
= TREE_REAL_CST (t2
);
2232 || r1
.decimal
!= r2
.decimal
2233 || r1
.sign
!= r2
.sign
2234 || r1
.signalling
!= r2
.signalling
2235 || r1
.canonical
!= r2
.canonical
2236 || r1
.uexp
!= r2
.uexp
)
2238 for (unsigned i
= 0; i
< SIGSZ
; ++i
)
2239 if (r1
.sig
[i
] != r2
.sig
[i
])
2243 if (CODE_CONTAINS_STRUCT (code
, TS_FIXED_CST
))
2244 if (!fixed_compare (EQ_EXPR
,
2245 TREE_FIXED_CST_PTR (t1
), TREE_FIXED_CST_PTR (t2
)))
2249 /* We don't want to compare locations, so there is nothing do compare
2250 for TS_DECL_MINIMAL. */
2252 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
2254 compare_values (DECL_MODE
);
2255 compare_values (DECL_NONLOCAL
);
2256 compare_values (DECL_VIRTUAL_P
);
2257 compare_values (DECL_IGNORED_P
);
2258 compare_values (DECL_ABSTRACT
);
2259 compare_values (DECL_ARTIFICIAL
);
2260 compare_values (DECL_USER_ALIGN
);
2261 compare_values (DECL_PRESERVE_P
);
2262 compare_values (DECL_EXTERNAL
);
2263 compare_values (DECL_GIMPLE_REG_P
);
2264 compare_values (DECL_ALIGN
);
2265 if (code
== LABEL_DECL
)
2267 compare_values (EH_LANDING_PAD_NR
);
2268 compare_values (LABEL_DECL_UID
);
2270 else if (code
== FIELD_DECL
)
2272 compare_values (DECL_PACKED
);
2273 compare_values (DECL_NONADDRESSABLE_P
);
2274 compare_values (DECL_OFFSET_ALIGN
);
2276 else if (code
== VAR_DECL
)
2278 compare_values (DECL_HAS_DEBUG_EXPR_P
);
2279 compare_values (DECL_NONLOCAL_FRAME
);
2281 if (code
== RESULT_DECL
2282 || code
== PARM_DECL
2283 || code
== VAR_DECL
)
2285 compare_values (DECL_BY_REFERENCE
);
2286 if (code
== VAR_DECL
2287 || code
== PARM_DECL
)
2288 compare_values (DECL_HAS_VALUE_EXPR_P
);
2292 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WRTL
))
2293 compare_values (DECL_REGISTER
);
2295 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
2297 compare_values (DECL_COMMON
);
2298 compare_values (DECL_DLLIMPORT_P
);
2299 compare_values (DECL_WEAK
);
2300 compare_values (DECL_SEEN_IN_BIND_EXPR_P
);
2301 compare_values (DECL_COMDAT
);
2302 compare_values (DECL_VISIBILITY
);
2303 compare_values (DECL_VISIBILITY_SPECIFIED
);
2304 if (code
== VAR_DECL
)
2306 compare_values (DECL_HARD_REGISTER
);
2307 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
2308 compare_values (DECL_IN_CONSTANT_POOL
);
2309 compare_values (DECL_TLS_MODEL
);
2311 if (VAR_OR_FUNCTION_DECL_P (t1
))
2312 compare_values (DECL_INIT_PRIORITY
);
2315 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
2317 compare_values (DECL_BUILT_IN_CLASS
);
2318 compare_values (DECL_STATIC_CONSTRUCTOR
);
2319 compare_values (DECL_STATIC_DESTRUCTOR
);
2320 compare_values (DECL_UNINLINABLE
);
2321 compare_values (DECL_POSSIBLY_INLINED
);
2322 compare_values (DECL_IS_NOVOPS
);
2323 compare_values (DECL_IS_RETURNS_TWICE
);
2324 compare_values (DECL_IS_MALLOC
);
2325 compare_values (DECL_IS_OPERATOR_NEW
);
2326 compare_values (DECL_DECLARED_INLINE_P
);
2327 compare_values (DECL_STATIC_CHAIN
);
2328 compare_values (DECL_NO_INLINE_WARNING_P
);
2329 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT
);
2330 compare_values (DECL_NO_LIMIT_STACK
);
2331 compare_values (DECL_DISREGARD_INLINE_LIMITS
);
2332 compare_values (DECL_PURE_P
);
2333 compare_values (DECL_LOOPING_CONST_OR_PURE_P
);
2334 compare_values (DECL_FINAL_P
);
2335 compare_values (DECL_CXX_CONSTRUCTOR_P
);
2336 compare_values (DECL_CXX_DESTRUCTOR_P
);
2337 if (DECL_BUILT_IN_CLASS (t1
) != NOT_BUILT_IN
)
2338 compare_values (DECL_FUNCTION_CODE
);
2339 if (DECL_STATIC_DESTRUCTOR (t1
))
2340 compare_values (DECL_FINI_PRIORITY
);
2343 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_COMMON
))
2345 compare_values (TYPE_MODE
);
2346 compare_values (TYPE_STRING_FLAG
);
2347 compare_values (TYPE_NO_FORCE_BLK
);
2348 compare_values (TYPE_NEEDS_CONSTRUCTING
);
2349 if (RECORD_OR_UNION_TYPE_P (t1
))
2351 compare_values (TYPE_TRANSPARENT_AGGR
);
2352 compare_values (TYPE_FINAL_P
);
2354 else if (code
== ARRAY_TYPE
)
2355 compare_values (TYPE_NONALIASED_COMPONENT
);
2356 compare_values (TYPE_PACKED
);
2357 compare_values (TYPE_RESTRICT
);
2358 compare_values (TYPE_USER_ALIGN
);
2359 compare_values (TYPE_READONLY
);
2360 compare_values (TYPE_PRECISION
);
2361 compare_values (TYPE_ALIGN
);
2362 compare_values (TYPE_ALIAS_SET
);
2365 /* We don't want to compare locations, so there is nothing do compare
2368 /* BLOCKs are function local and we don't merge anything there, so
2369 simply refuse to merge. */
2370 if (CODE_CONTAINS_STRUCT (code
, TS_BLOCK
))
2373 if (CODE_CONTAINS_STRUCT (code
, TS_TRANSLATION_UNIT_DECL
))
2374 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1
),
2375 TRANSLATION_UNIT_LANGUAGE (t2
)) != 0)
2378 if (CODE_CONTAINS_STRUCT (code
, TS_TARGET_OPTION
))
2379 if (memcmp (TREE_TARGET_OPTION (t1
), TREE_TARGET_OPTION (t2
),
2380 sizeof (struct cl_target_option
)) != 0)
2383 if (CODE_CONTAINS_STRUCT (code
, TS_OPTIMIZATION
))
2384 if (memcmp (TREE_OPTIMIZATION (t1
), TREE_OPTIMIZATION (t2
),
2385 sizeof (struct cl_optimization
)) != 0)
2388 if (CODE_CONTAINS_STRUCT (code
, TS_BINFO
))
2389 if (vec_safe_length (BINFO_BASE_ACCESSES (t1
))
2390 != vec_safe_length (BINFO_BASE_ACCESSES (t2
)))
2393 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
2394 compare_values (CONSTRUCTOR_NELTS
);
2396 if (CODE_CONTAINS_STRUCT (code
, TS_IDENTIFIER
))
2397 if (IDENTIFIER_LENGTH (t1
) != IDENTIFIER_LENGTH (t2
)
2398 || memcmp (IDENTIFIER_POINTER (t1
), IDENTIFIER_POINTER (t2
),
2399 IDENTIFIER_LENGTH (t1
)) != 0)
2402 if (CODE_CONTAINS_STRUCT (code
, TS_STRING
))
2403 if (TREE_STRING_LENGTH (t1
) != TREE_STRING_LENGTH (t2
)
2404 || memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
2405 TREE_STRING_LENGTH (t1
)) != 0)
2408 #undef compare_values
2411 /* Compare pointer fields. */
2413 /* Recurse. Search & Replaced from DFS_write_tree_body.
2414 Folding the early checks into the compare_tree_edges recursion
2415 macro makes debugging way quicker as you are able to break on
2416 compare_tree_sccs_1 and simply finish until a call returns false
2417 to spot the SCC members with the difference. */
2418 #define compare_tree_edges(E1, E2) \
2420 tree t1_ = (E1), t2_ = (E2); \
2423 || !TREE_VISITED (t2_) \
2424 || (!TREE_ASM_WRITTEN (t2_) \
2425 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
2427 /* Only non-NULL trees outside of the SCC may compare equal. */ \
2428 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
2431 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
2433 if (code
!= IDENTIFIER_NODE
)
2434 compare_tree_edges (TREE_TYPE (t1
), TREE_TYPE (t2
));
2437 if (CODE_CONTAINS_STRUCT (code
, TS_VECTOR
))
2440 /* Note that the number of elements for EXPR has already been emitted
2441 in EXPR's header (see streamer_write_tree_header). */
2442 for (i
= 0; i
< VECTOR_CST_NELTS (t1
); ++i
)
2443 compare_tree_edges (VECTOR_CST_ELT (t1
, i
), VECTOR_CST_ELT (t2
, i
));
2446 if (CODE_CONTAINS_STRUCT (code
, TS_COMPLEX
))
2448 compare_tree_edges (TREE_REALPART (t1
), TREE_REALPART (t2
));
2449 compare_tree_edges (TREE_IMAGPART (t1
), TREE_IMAGPART (t2
));
2452 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_MINIMAL
))
2454 compare_tree_edges (DECL_NAME (t1
), DECL_NAME (t2
));
2455 /* ??? Global decls from different TUs have non-matching
2456 TRANSLATION_UNIT_DECLs. Only consider a small set of
2457 decls equivalent, we should not end up merging others. */
2458 if ((code
== TYPE_DECL
2459 || code
== NAMESPACE_DECL
2460 || code
== IMPORTED_DECL
2461 || code
== CONST_DECL
2462 || (VAR_OR_FUNCTION_DECL_P (t1
)
2463 && (TREE_PUBLIC (t1
) || DECL_EXTERNAL (t1
))))
2464 && DECL_FILE_SCOPE_P (t1
) && DECL_FILE_SCOPE_P (t2
))
2467 compare_tree_edges (DECL_CONTEXT (t1
), DECL_CONTEXT (t2
));
2470 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
2472 compare_tree_edges (DECL_SIZE (t1
), DECL_SIZE (t2
));
2473 compare_tree_edges (DECL_SIZE_UNIT (t1
), DECL_SIZE_UNIT (t2
));
2474 compare_tree_edges (DECL_ATTRIBUTES (t1
), DECL_ATTRIBUTES (t2
));
2475 if ((code
== VAR_DECL
2476 || code
== PARM_DECL
)
2477 && DECL_HAS_VALUE_EXPR_P (t1
))
2478 compare_tree_edges (DECL_VALUE_EXPR (t1
), DECL_VALUE_EXPR (t2
));
2479 if (code
== VAR_DECL
2480 && DECL_HAS_DEBUG_EXPR_P (t1
))
2481 compare_tree_edges (DECL_DEBUG_EXPR (t1
), DECL_DEBUG_EXPR (t2
));
2482 /* LTO specific edges. */
2483 if (code
!= FUNCTION_DECL
2484 && code
!= TRANSLATION_UNIT_DECL
)
2485 compare_tree_edges (DECL_INITIAL (t1
), DECL_INITIAL (t2
));
2488 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
2490 if (code
== FUNCTION_DECL
)
2493 for (a1
= DECL_ARGUMENTS (t1
), a2
= DECL_ARGUMENTS (t2
);
2495 a1
= TREE_CHAIN (a1
), a2
= TREE_CHAIN (a2
))
2496 compare_tree_edges (a1
, a2
);
2497 compare_tree_edges (DECL_RESULT (t1
), DECL_RESULT (t2
));
2499 else if (code
== TYPE_DECL
)
2500 compare_tree_edges (DECL_ORIGINAL_TYPE (t1
), DECL_ORIGINAL_TYPE (t2
));
2501 compare_tree_edges (DECL_VINDEX (t1
), DECL_VINDEX (t2
));
2504 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
2506 /* Make sure we don't inadvertently set the assembler name. */
2507 if (DECL_ASSEMBLER_NAME_SET_P (t1
))
2508 compare_tree_edges (DECL_ASSEMBLER_NAME (t1
),
2509 DECL_ASSEMBLER_NAME (t2
));
2510 compare_tree_edges (DECL_SECTION_NAME (t1
), DECL_SECTION_NAME (t2
));
2511 compare_tree_edges (DECL_COMDAT_GROUP (t1
), DECL_COMDAT_GROUP (t2
));
2514 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
2516 compare_tree_edges (DECL_FIELD_OFFSET (t1
), DECL_FIELD_OFFSET (t2
));
2517 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1
), DECL_BIT_FIELD_TYPE (t2
));
2518 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1
),
2519 DECL_BIT_FIELD_REPRESENTATIVE (t2
));
2520 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1
),
2521 DECL_FIELD_BIT_OFFSET (t2
));
2522 compare_tree_edges (DECL_FCONTEXT (t1
), DECL_FCONTEXT (t2
));
2525 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
2527 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1
),
2528 DECL_FUNCTION_PERSONALITY (t2
));
2529 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1
),
2530 DECL_FUNCTION_SPECIFIC_TARGET (t2
));
2531 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1
),
2532 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2
));
2535 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_COMMON
))
2537 compare_tree_edges (TYPE_SIZE (t1
), TYPE_SIZE (t2
));
2538 compare_tree_edges (TYPE_SIZE_UNIT (t1
), TYPE_SIZE_UNIT (t2
));
2539 compare_tree_edges (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
));
2540 compare_tree_edges (TYPE_NAME (t1
), TYPE_NAME (t2
));
2541 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
2542 reconstructed during fixup. */
2543 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
2545 compare_tree_edges (TYPE_MAIN_VARIANT (t1
), TYPE_MAIN_VARIANT (t2
));
2546 /* ??? Global types from different TUs have non-matching
2547 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
2549 if (TYPE_FILE_SCOPE_P (t1
) && TYPE_FILE_SCOPE_P (t2
))
2552 compare_tree_edges (TYPE_CONTEXT (t1
), TYPE_CONTEXT (t2
));
2553 /* TYPE_CANONICAL is re-computed during type merging, so do not
2555 compare_tree_edges (TYPE_STUB_DECL (t1
), TYPE_STUB_DECL (t2
));
2558 if (CODE_CONTAINS_STRUCT (code
, TS_TYPE_NON_COMMON
))
2560 if (code
== ENUMERAL_TYPE
)
2561 compare_tree_edges (TYPE_VALUES (t1
), TYPE_VALUES (t2
));
2562 else if (code
== ARRAY_TYPE
)
2563 compare_tree_edges (TYPE_DOMAIN (t1
), TYPE_DOMAIN (t2
));
2564 else if (RECORD_OR_UNION_TYPE_P (t1
))
2567 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
2569 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
2570 compare_tree_edges (f1
, f2
);
2571 compare_tree_edges (TYPE_BINFO (t1
), TYPE_BINFO (t2
));
2573 else if (code
== FUNCTION_TYPE
2574 || code
== METHOD_TYPE
)
2575 compare_tree_edges (TYPE_ARG_TYPES (t1
), TYPE_ARG_TYPES (t2
));
2576 if (!POINTER_TYPE_P (t1
))
2577 compare_tree_edges (TYPE_MINVAL (t1
), TYPE_MINVAL (t2
));
2578 compare_tree_edges (TYPE_MAXVAL (t1
), TYPE_MAXVAL (t2
));
2581 if (CODE_CONTAINS_STRUCT (code
, TS_LIST
))
2583 compare_tree_edges (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
));
2584 compare_tree_edges (TREE_VALUE (t1
), TREE_VALUE (t2
));
2585 compare_tree_edges (TREE_CHAIN (t1
), TREE_CHAIN (t2
));
2588 if (CODE_CONTAINS_STRUCT (code
, TS_VEC
))
2589 for (int i
= 0; i
< TREE_VEC_LENGTH (t1
); i
++)
2590 compare_tree_edges (TREE_VEC_ELT (t1
, i
), TREE_VEC_ELT (t2
, i
));
2592 if (CODE_CONTAINS_STRUCT (code
, TS_EXP
))
2594 for (int i
= 0; i
< TREE_OPERAND_LENGTH (t1
); i
++)
2595 compare_tree_edges (TREE_OPERAND (t1
, i
),
2596 TREE_OPERAND (t2
, i
));
2598 /* BLOCKs are function local and we don't merge anything there. */
2599 if (TREE_BLOCK (t1
) || TREE_BLOCK (t2
))
2603 if (CODE_CONTAINS_STRUCT (code
, TS_BINFO
))
2607 /* Lengths have already been compared above. */
2608 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1
), i
, t
)
2609 compare_tree_edges (t
, BINFO_BASE_BINFO (t2
, i
));
2610 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1
), i
, t
)
2611 compare_tree_edges (t
, BINFO_BASE_ACCESS (t2
, i
));
2612 compare_tree_edges (BINFO_OFFSET (t1
), BINFO_OFFSET (t2
));
2613 compare_tree_edges (BINFO_VTABLE (t1
), BINFO_VTABLE (t2
));
2614 compare_tree_edges (BINFO_VPTR_FIELD (t1
), BINFO_VPTR_FIELD (t2
));
2615 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
2616 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
2619 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
2623 /* Lengths have already been compared above. */
2624 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1
), i
, index
, value
)
2626 compare_tree_edges (index
, CONSTRUCTOR_ELT (t2
, i
)->index
);
2627 compare_tree_edges (value
, CONSTRUCTOR_ELT (t2
, i
)->value
);
2631 #undef compare_tree_edges
2636 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
2637 out MAP if they are equal. */
2640 compare_tree_sccs (tree_scc
*pscc
, tree_scc
*scc
,
2643 /* Assume SCC entry hashes are sorted after their cardinality. Which
2644 means we can simply take the first n-tuple of equal hashes
2645 (which is recorded as entry_len) and do n SCC entry candidate
2647 for (unsigned i
= 0; i
< pscc
->entry_len
; ++i
)
2650 num_scc_compare_collisions
++;
2651 if (compare_tree_sccs_1 (pscc
->entries
[0], scc
->entries
[i
], &mapp
))
2653 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
2654 on the scc as all trees will be freed. */
2657 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
2658 the SCC prevails. */
2659 for (unsigned j
= 0; j
< scc
->len
; ++j
)
2660 TREE_ASM_WRITTEN (scc
->entries
[j
]) = 0;
2666 /* QSort sort function to sort a map of two pointers after the 2nd
2670 cmp_tree (const void *p1_
, const void *p2_
)
2672 tree
*p1
= (tree
*)(const_cast<void *>(p1_
));
2673 tree
*p2
= (tree
*)(const_cast<void *>(p2_
));
2676 return ((uintptr_t)p1
[1] < (uintptr_t)p2
[1]) ? -1 : 1;
2679 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
2680 hash value SCC_HASH with an already recorded SCC. Return true if
2681 that was successful, otherwise return false. */
2684 unify_scc (struct streamer_tree_cache_d
*cache
, unsigned from
,
2685 unsigned len
, unsigned scc_entry_len
, hashval_t scc_hash
)
2687 bool unified_p
= false;
2689 = (tree_scc
*) alloca (sizeof (tree_scc
) + (len
- 1) * sizeof (tree
));
2691 scc
->hash
= scc_hash
;
2693 scc
->entry_len
= scc_entry_len
;
2694 for (unsigned i
= 0; i
< len
; ++i
)
2696 tree t
= streamer_tree_cache_get_tree (cache
, from
+ i
);
2697 scc
->entries
[i
] = t
;
2698 /* Do not merge SCCs with local entities inside them. Also do
2699 not merge TRANSLATION_UNIT_DECLs. */
2700 if (TREE_CODE (t
) == TRANSLATION_UNIT_DECL
2701 || (VAR_OR_FUNCTION_DECL_P (t
)
2702 && !(TREE_PUBLIC (t
) || DECL_EXTERNAL (t
)))
2703 || TREE_CODE (t
) == LABEL_DECL
)
2705 /* Avoid doing any work for these cases and do not worry to
2706 record the SCCs for further merging. */
2711 /* Look for the list of candidate SCCs to compare against. */
2713 slot
= tree_scc_hash
.find_slot_with_hash (scc
, scc_hash
, INSERT
);
2716 /* Try unifying against each candidate. */
2719 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
2720 outside of the scc when following tree edges. Make sure
2721 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
2722 to track whether we visited the SCC member during the compare.
2723 We cannot use TREE_VISITED on the pscc members as the extended
2724 scc and pscc can overlap. */
2725 for (unsigned i
= 0; i
< scc
->len
; ++i
)
2727 TREE_VISITED (scc
->entries
[i
]) = 1;
2728 gcc_checking_assert (!TREE_ASM_WRITTEN (scc
->entries
[i
]));
2731 tree
*map
= XALLOCAVEC (tree
, 2 * len
);
2732 for (tree_scc
*pscc
= *slot
; pscc
; pscc
= pscc
->next
)
2734 if (!compare_tree_sccs (pscc
, scc
, map
))
2737 /* Found an equal SCC. */
2739 num_scc_compare_collisions
--;
2741 total_scc_size_merged
+= len
;
2743 #ifdef ENABLE_CHECKING
2744 for (unsigned i
= 0; i
< len
; ++i
)
2746 tree t
= map
[2*i
+1];
2747 enum tree_code code
= TREE_CODE (t
);
2748 /* IDENTIFIER_NODEs should be singletons and are merged by the
2749 streamer. The others should be singletons, too, and we
2750 should not merge them in any way. */
2751 gcc_assert (code
!= TRANSLATION_UNIT_DECL
2752 && code
!= IDENTIFIER_NODE
2753 && !streamer_handle_as_builtin_p (t
));
2757 /* Fixup the streamer cache with the prevailing nodes according
2758 to the tree node mapping computed by compare_tree_sccs. */
2760 streamer_tree_cache_replace_tree (cache
, pscc
->entries
[0], from
);
2763 tree
*map2
= XALLOCAVEC (tree
, 2 * len
);
2764 for (unsigned i
= 0; i
< len
; ++i
)
2766 map2
[i
*2] = (tree
)(uintptr_t)(from
+ i
);
2767 map2
[i
*2+1] = scc
->entries
[i
];
2769 qsort (map2
, len
, 2 * sizeof (tree
), cmp_tree
);
2770 qsort (map
, len
, 2 * sizeof (tree
), cmp_tree
);
2771 for (unsigned i
= 0; i
< len
; ++i
)
2772 streamer_tree_cache_replace_tree (cache
, map
[2*i
],
2773 (uintptr_t)map2
[2*i
]);
2776 /* Free the tree nodes from the read SCC. */
2777 for (unsigned i
= 0; i
< len
; ++i
)
2779 if (TYPE_P (scc
->entries
[i
]))
2781 ggc_free (scc
->entries
[i
]);
2787 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
2789 for (unsigned i
= 0; i
< scc
->len
; ++i
)
2790 TREE_VISITED (scc
->entries
[i
]) = 0;
2793 /* If we didn't unify it to any candidate duplicate the relevant
2794 pieces to permanent storage and link it into the chain. */
2798 = XOBNEWVAR (&tree_scc_hash_obstack
, tree_scc
, sizeof (tree_scc
));
2799 memcpy (pscc
, scc
, sizeof (tree_scc
));
2800 pscc
->next
= (*slot
);
2807 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2808 RESOLUTIONS is the set of symbols picked by the linker (read from the
2809 resolution file when the linker plugin is being used). */
2812 lto_read_decls (struct lto_file_decl_data
*decl_data
, const void *data
,
2813 vec
<ld_plugin_symbol_resolution_t
> resolutions
)
2815 const struct lto_decl_header
*header
= (const struct lto_decl_header
*) data
;
2816 const int decl_offset
= sizeof (struct lto_decl_header
);
2817 const int main_offset
= decl_offset
+ header
->decl_state_size
;
2818 const int string_offset
= main_offset
+ header
->main_size
;
2819 struct lto_input_block ib_main
;
2820 struct data_in
*data_in
;
2822 const uint32_t *data_ptr
, *data_end
;
2823 uint32_t num_decl_states
;
2825 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
2828 data_in
= lto_data_in_create (decl_data
, (const char *) data
+ string_offset
,
2829 header
->string_size
, resolutions
);
2831 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2832 internal types that should not be merged. */
2834 /* Read the global declarations and types. */
2835 while (ib_main
.p
< ib_main
.len
)
2838 unsigned from
= data_in
->reader_cache
->nodes
.length ();
2839 /* Read and uniquify SCCs as in the input stream. */
2840 enum LTO_tags tag
= streamer_read_record_start (&ib_main
);
2841 if (tag
== LTO_tree_scc
)
2844 unsigned scc_entry_len
;
2845 hashval_t scc_hash
= lto_input_scc (&ib_main
, data_in
, &len_
,
2847 unsigned len
= data_in
->reader_cache
->nodes
.length () - from
;
2848 gcc_assert (len
== len_
);
2850 total_scc_size
+= len
;
2853 /* We have the special case of size-1 SCCs that are pre-merged
2854 by means of identifier and string sharing for example.
2855 ??? Maybe we should avoid streaming those as SCCs. */
2856 tree first
= streamer_tree_cache_get_tree (data_in
->reader_cache
,
2859 && (TREE_CODE (first
) == IDENTIFIER_NODE
2860 || TREE_CODE (first
) == INTEGER_CST
2861 || TREE_CODE (first
) == TRANSLATION_UNIT_DECL
2862 || streamer_handle_as_builtin_p (first
)))
2865 /* Try to unify the SCC with already existing ones. */
2867 && unify_scc (data_in
->reader_cache
, from
,
2868 len
, scc_entry_len
, scc_hash
))
2871 /* Do remaining fixup tasks for prevailing nodes. */
2872 bool seen_type
= false;
2873 bool not_merged_type_same_scc
= false;
2874 bool not_merged_type_not_same_scc
= false;
2875 for (unsigned i
= 0; i
< len
; ++i
)
2877 tree t
= streamer_tree_cache_get_tree (data_in
->reader_cache
,
2879 /* For statistics, see if the old code would have merged
2882 && (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
)))
2884 tree newt
= gimple_register_type (t
);
2887 num_not_merged_types
++;
2889 /* Check if we can never merge the types because
2890 they are in the same SCC and thus the old
2892 for (j
= 0; j
< len
; ++j
)
2894 && streamer_tree_cache_get_tree
2895 (data_in
->reader_cache
, from
+ j
) == newt
)
2897 num_not_merged_types_in_same_scc
++;
2898 not_merged_type_same_scc
= true;
2902 not_merged_type_not_same_scc
= true;
2905 /* Reconstruct the type variant and pointer-to/reference-to
2910 num_prevailing_types
++;
2911 lto_fixup_prevailing_type (t
);
2913 /* Compute the canonical type of all types.
2914 ??? Should be able to assert that !TYPE_CANONICAL. */
2915 if (TYPE_P (t
) && !TYPE_CANONICAL (t
))
2916 TYPE_CANONICAL (t
) = gimple_register_canonical_type (t
);
2917 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
2918 type which is also member of this SCC. */
2919 if (TREE_CODE (t
) == INTEGER_CST
2920 && !TREE_OVERFLOW (t
))
2921 cache_integer_cst (t
);
2922 /* Register TYPE_DECLs with the debuginfo machinery. */
2924 && TREE_CODE (t
) == TYPE_DECL
)
2925 debug_hooks
->type_decl (t
, !DECL_FILE_SCOPE_P (t
));
2928 /* Register variables and functions with the
2930 if (TREE_CODE (t
) == VAR_DECL
)
2931 lto_register_var_decl_in_symtab (data_in
, t
, from
+ i
);
2932 else if (TREE_CODE (t
) == FUNCTION_DECL
2933 && !DECL_BUILT_IN (t
))
2934 lto_register_function_decl_in_symtab (data_in
, t
, from
+ i
);
2935 /* Scan the tree for references to global functions or
2936 variables and record those for later fixup. */
2937 if (mentions_vars_p (t
))
2938 vec_safe_push (tree_with_vars
, t
);
2941 if (not_merged_type_same_scc
)
2943 num_not_merged_types_in_same_scc_trees
+= len
;
2944 num_not_merged_types_trees
+= len
;
2946 else if (not_merged_type_not_same_scc
)
2947 num_not_merged_types_trees
+= len
;
2949 num_type_scc_trees
+= len
;
2953 /* Pickle stray references. */
2954 t
= lto_input_tree_1 (&ib_main
, data_in
, tag
, 0);
2955 gcc_assert (t
&& data_in
->reader_cache
->nodes
.length () == from
);
2959 /* Read in lto_in_decl_state objects. */
2960 data_ptr
= (const uint32_t *) ((const char*) data
+ decl_offset
);
2962 (const uint32_t *) ((const char*) data_ptr
+ header
->decl_state_size
);
2963 num_decl_states
= *data_ptr
++;
2965 gcc_assert (num_decl_states
> 0);
2966 decl_data
->global_decl_state
= lto_new_in_decl_state ();
2967 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
,
2968 decl_data
->global_decl_state
);
2970 /* Read in per-function decl states and enter them in hash table. */
2971 decl_data
->function_decl_states
=
2972 htab_create_ggc (37, lto_hash_in_decl_state
, lto_eq_in_decl_state
, NULL
);
2974 for (i
= 1; i
< num_decl_states
; i
++)
2976 struct lto_in_decl_state
*state
= lto_new_in_decl_state ();
2979 data_ptr
= lto_read_in_decl_state (data_in
, data_ptr
, state
);
2980 slot
= htab_find_slot (decl_data
->function_decl_states
, state
, INSERT
);
2981 gcc_assert (*slot
== NULL
);
2985 if (data_ptr
!= data_end
)
2986 internal_error ("bytecode stream: garbage at the end of symbols section");
2988 /* Set the current decl state to be the global state. */
2989 decl_data
->current_decl_state
= decl_data
->global_decl_state
;
2991 lto_data_in_delete (data_in
);
2994 /* Custom version of strtoll, which is not portable. */
2996 static HOST_WIDEST_INT
2997 lto_parse_hex (const char *p
)
2999 HOST_WIDEST_INT ret
= 0;
3001 for (; *p
!= '\0'; ++p
)
3006 if (c
>= '0' && c
<= '9')
3008 else if (c
>= 'a' && c
<= 'f')
3009 part
= c
- 'a' + 10;
3010 else if (c
>= 'A' && c
<= 'F')
3011 part
= c
- 'A' + 10;
3013 internal_error ("could not parse hex number");
3020 /* Read resolution for file named FILE_NAME. The resolution is read from
3024 lto_resolution_read (splay_tree file_ids
, FILE *resolution
, lto_file
*file
)
3026 /* We require that objects in the resolution file are in the same
3027 order as the lto1 command line. */
3028 unsigned int name_len
;
3030 unsigned int num_symbols
;
3032 struct lto_file_decl_data
*file_data
;
3033 splay_tree_node nd
= NULL
;
3038 name_len
= strlen (file
->filename
);
3039 obj_name
= XNEWVEC (char, name_len
+ 1);
3040 fscanf (resolution
, " "); /* Read white space. */
3042 fread (obj_name
, sizeof (char), name_len
, resolution
);
3043 obj_name
[name_len
] = '\0';
3044 if (filename_cmp (obj_name
, file
->filename
) != 0)
3045 internal_error ("unexpected file name %s in linker resolution file. "
3046 "Expected %s", obj_name
, file
->filename
);
3047 if (file
->offset
!= 0)
3051 HOST_WIDEST_INT offset
;
3052 t
= fscanf (resolution
, "@0x%16s", offset_p
);
3054 internal_error ("could not parse file offset");
3055 offset
= lto_parse_hex (offset_p
);
3056 if (offset
!= file
->offset
)
3057 internal_error ("unexpected offset");
3062 fscanf (resolution
, "%u", &num_symbols
);
3064 for (i
= 0; i
< num_symbols
; i
++)
3068 unsigned HOST_WIDE_INT id
;
3070 enum ld_plugin_symbol_resolution r
= (enum ld_plugin_symbol_resolution
) 0;
3072 unsigned int lto_resolution_str_len
=
3073 sizeof (lto_resolution_str
) / sizeof (char *);
3076 t
= fscanf (resolution
, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
" %26s %*[^\n]\n",
3077 &index
, &id
, r_str
);
3079 internal_error ("invalid line in the resolution file");
3081 for (j
= 0; j
< lto_resolution_str_len
; j
++)
3083 if (strcmp (lto_resolution_str
[j
], r_str
) == 0)
3085 r
= (enum ld_plugin_symbol_resolution
) j
;
3089 if (j
== lto_resolution_str_len
)
3090 internal_error ("invalid resolution in the resolution file");
3092 if (!(nd
&& lto_splay_tree_id_equal_p (nd
->key
, id
)))
3094 nd
= lto_splay_tree_lookup (file_ids
, id
);
3096 internal_error ("resolution sub id %wx not in object file", id
);
3099 file_data
= (struct lto_file_decl_data
*)nd
->value
;
3100 /* The indexes are very sparse. To save memory save them in a compact
3101 format that is only unpacked later when the subfile is processed. */
3104 file_data
->respairs
.safe_push (rp
);
3105 if (file_data
->max_index
< index
)
3106 file_data
->max_index
= index
;
3110 /* List of file_decl_datas */
3111 struct file_data_list
3113 struct lto_file_decl_data
*first
, *last
;
3116 /* Is the name for a id'ed LTO section? */
3119 lto_section_with_id (const char *name
, unsigned HOST_WIDE_INT
*id
)
3123 if (strncmp (name
, LTO_SECTION_NAME_PREFIX
, strlen (LTO_SECTION_NAME_PREFIX
)))
3125 s
= strrchr (name
, '.');
3126 return s
&& sscanf (s
, "." HOST_WIDE_INT_PRINT_HEX_PURE
, id
) == 1;
3129 /* Create file_data of each sub file id */
3132 create_subid_section_table (struct lto_section_slot
*ls
, splay_tree file_ids
,
3133 struct file_data_list
*list
)
3135 struct lto_section_slot s_slot
, *new_slot
;
3136 unsigned HOST_WIDE_INT id
;
3140 struct lto_file_decl_data
*file_data
;
3142 if (!lto_section_with_id (ls
->name
, &id
))
3145 /* Find hash table of sub module id */
3146 nd
= lto_splay_tree_lookup (file_ids
, id
);
3149 file_data
= (struct lto_file_decl_data
*)nd
->value
;
3153 file_data
= ggc_alloc_lto_file_decl_data ();
3154 memset(file_data
, 0, sizeof (struct lto_file_decl_data
));
3156 file_data
->section_hash_table
= lto_obj_create_section_hash_table ();;
3157 lto_splay_tree_insert (file_ids
, id
, file_data
);
3159 /* Maintain list in linker order */
3161 list
->first
= file_data
;
3163 list
->last
->next
= file_data
;
3164 list
->last
= file_data
;
3167 /* Copy section into sub module hash table */
3168 new_name
= XDUPVEC (char, ls
->name
, strlen (ls
->name
) + 1);
3169 s_slot
.name
= new_name
;
3170 hash_slot
= htab_find_slot (file_data
->section_hash_table
, &s_slot
, INSERT
);
3171 gcc_assert (*hash_slot
== NULL
);
3173 new_slot
= XDUP (struct lto_section_slot
, ls
);
3174 new_slot
->name
= new_name
;
3175 *hash_slot
= new_slot
;
3179 /* Read declarations and other initializations for a FILE_DATA. */
3182 lto_file_finalize (struct lto_file_decl_data
*file_data
, lto_file
*file
)
3186 vec
<ld_plugin_symbol_resolution_t
>
3187 resolutions
= vNULL
;
3191 /* Create vector for fast access of resolution. We do this lazily
3193 resolutions
.safe_grow_cleared (file_data
->max_index
+ 1);
3194 for (i
= 0; file_data
->respairs
.iterate (i
, &rp
); i
++)
3195 resolutions
[rp
->index
] = rp
->res
;
3196 file_data
->respairs
.release ();
3198 file_data
->renaming_hash_table
= lto_create_renaming_table ();
3199 file_data
->file_name
= file
->filename
;
3200 data
= lto_get_section_data (file_data
, LTO_section_decls
, NULL
, &len
);
3203 internal_error ("cannot read LTO decls from %s", file_data
->file_name
);
3206 /* Frees resolutions */
3207 lto_read_decls (file_data
, data
, resolutions
);
3208 lto_free_section_data (file_data
, LTO_section_decls
, NULL
, data
, len
);
3211 /* Finalize FILE_DATA in FILE and increase COUNT. */
3214 lto_create_files_from_ids (lto_file
*file
, struct lto_file_decl_data
*file_data
,
3217 lto_file_finalize (file_data
, file
);
3218 if (cgraph_dump_file
)
3219 fprintf (cgraph_dump_file
, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX
"\n",
3220 file_data
->file_name
, file_data
->id
);
3225 /* Generate a TREE representation for all types and external decls
3228 Read all of the globals out of the file. Then read the cgraph
3229 and process the .o index into the cgraph nodes so that it can open
3230 the .o file to load the functions and ipa information. */
3232 static struct lto_file_decl_data
*
3233 lto_file_read (lto_file
*file
, FILE *resolution_file
, int *count
)
3235 struct lto_file_decl_data
*file_data
= NULL
;
3236 splay_tree file_ids
;
3237 htab_t section_hash_table
;
3238 struct lto_section_slot
*section
;
3239 struct file_data_list file_list
;
3240 struct lto_section_list section_list
;
3242 memset (§ion_list
, 0, sizeof (struct lto_section_list
));
3243 section_hash_table
= lto_obj_build_section_table (file
, §ion_list
);
3245 /* Find all sub modules in the object and put their sections into new hash
3246 tables in a splay tree. */
3247 file_ids
= lto_splay_tree_new ();
3248 memset (&file_list
, 0, sizeof (struct file_data_list
));
3249 for (section
= section_list
.first
; section
!= NULL
; section
= section
->next
)
3250 create_subid_section_table (section
, file_ids
, &file_list
);
3252 /* Add resolutions to file ids */
3253 lto_resolution_read (file_ids
, resolution_file
, file
);
3255 /* Finalize each lto file for each submodule in the merged object */
3256 for (file_data
= file_list
.first
; file_data
!= NULL
; file_data
= file_data
->next
)
3257 lto_create_files_from_ids (file
, file_data
, count
);
3259 splay_tree_delete (file_ids
);
3260 htab_delete (section_hash_table
);
3262 return file_list
.first
;
3265 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
3266 #define LTO_MMAP_IO 1
3270 /* Page size of machine is used for mmap and munmap calls. */
3271 static size_t page_mask
;
3274 /* Get the section data of length LEN from FILENAME starting at
3275 OFFSET. The data segment must be freed by the caller when the
3276 caller is finished. Returns NULL if all was not well. */
3279 lto_read_section_data (struct lto_file_decl_data
*file_data
,
3280 intptr_t offset
, size_t len
)
3284 static char *fd_name
;
3286 intptr_t computed_len
;
3287 intptr_t computed_offset
;
3291 /* Keep a single-entry file-descriptor cache. The last file we
3292 touched will get closed at exit.
3293 ??? Eventually we want to add a more sophisticated larger cache
3294 or rather fix function body streaming to not stream them in
3295 practically random order. */
3297 && filename_cmp (fd_name
, file_data
->file_name
) != 0)
3305 fd
= open (file_data
->file_name
, O_RDONLY
|O_BINARY
);
3308 fatal_error ("Cannot open %s", file_data
->file_name
);
3311 fd_name
= xstrdup (file_data
->file_name
);
3317 size_t page_size
= sysconf (_SC_PAGE_SIZE
);
3318 page_mask
= ~(page_size
- 1);
3321 computed_offset
= offset
& page_mask
;
3322 diff
= offset
- computed_offset
;
3323 computed_len
= len
+ diff
;
3325 result
= (char *) mmap (NULL
, computed_len
, PROT_READ
, MAP_PRIVATE
,
3326 fd
, computed_offset
);
3327 if (result
== MAP_FAILED
)
3329 fatal_error ("Cannot map %s", file_data
->file_name
);
3333 return result
+ diff
;
3335 result
= (char *) xmalloc (len
);
3336 if (lseek (fd
, offset
, SEEK_SET
) != offset
3337 || read (fd
, result
, len
) != (ssize_t
) len
)
3340 fatal_error ("Cannot read %s", file_data
->file_name
);
3344 /* Native windows doesn't supports delayed unlink on opened file. So
3345 we close file here again. This produces higher I/O load, but at least
3346 it prevents to have dangling file handles preventing unlink. */
3357 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
3358 NAME will be NULL unless the section type is for a function
3362 get_section_data (struct lto_file_decl_data
*file_data
,
3363 enum lto_section_type section_type
,
3367 htab_t section_hash_table
= file_data
->section_hash_table
;
3368 struct lto_section_slot
*f_slot
;
3369 struct lto_section_slot s_slot
;
3370 const char *section_name
= lto_get_section_name (section_type
, name
, file_data
);
3374 s_slot
.name
= section_name
;
3375 f_slot
= (struct lto_section_slot
*) htab_find (section_hash_table
, &s_slot
);
3378 data
= lto_read_section_data (file_data
, f_slot
->start
, f_slot
->len
);
3382 free (CONST_CAST (char *, section_name
));
3387 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
3388 starts at OFFSET and has LEN bytes. */
3391 free_section_data (struct lto_file_decl_data
*file_data ATTRIBUTE_UNUSED
,
3392 enum lto_section_type section_type ATTRIBUTE_UNUSED
,
3393 const char *name ATTRIBUTE_UNUSED
,
3394 const char *offset
, size_t len ATTRIBUTE_UNUSED
)
3397 intptr_t computed_len
;
3398 intptr_t computed_offset
;
3403 computed_offset
= ((intptr_t) offset
) & page_mask
;
3404 diff
= (intptr_t) offset
- computed_offset
;
3405 computed_len
= len
+ diff
;
3407 munmap ((caddr_t
) computed_offset
, computed_len
);
3409 free (CONST_CAST(char *, offset
));
3413 static lto_file
*current_lto_file
;
3415 /* Helper for qsort; compare partitions and return one with smaller size.
3416 We sort from greatest to smallest so parallel build doesn't stale on the
3417 longest compilation being executed too late. */
3420 cmp_partitions_size (const void *a
, const void *b
)
3422 const struct ltrans_partition_def
*pa
3423 = *(struct ltrans_partition_def
*const *)a
;
3424 const struct ltrans_partition_def
*pb
3425 = *(struct ltrans_partition_def
*const *)b
;
3426 return pb
->insns
- pa
->insns
;
3429 /* Helper for qsort; compare partitions and return one with smaller order. */
3432 cmp_partitions_order (const void *a
, const void *b
)
3434 const struct ltrans_partition_def
*pa
3435 = *(struct ltrans_partition_def
*const *)a
;
3436 const struct ltrans_partition_def
*pb
3437 = *(struct ltrans_partition_def
*const *)b
;
3438 int ordera
= -1, orderb
= -1;
3440 if (lto_symtab_encoder_size (pa
->encoder
))
3441 ordera
= lto_symtab_encoder_deref (pa
->encoder
, 0)->symbol
.order
;
3442 if (lto_symtab_encoder_size (pb
->encoder
))
3443 orderb
= lto_symtab_encoder_deref (pb
->encoder
, 0)->symbol
.order
;
3444 return orderb
- ordera
;
3447 /* Write all output files in WPA mode and the file with the list of
3451 lto_wpa_write_files (void)
3455 ltrans_partition part
;
3456 FILE *ltrans_output_list_stream
;
3457 char *temp_filename
;
3460 /* Open the LTRANS output list. */
3461 if (!ltrans_output_list
)
3462 fatal_error ("no LTRANS output list filename provided");
3463 ltrans_output_list_stream
= fopen (ltrans_output_list
, "w");
3464 if (ltrans_output_list_stream
== NULL
)
3465 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list
);
3467 timevar_push (TV_WHOPR_WPA
);
3469 FOR_EACH_VEC_ELT (ltrans_partitions
, i
, part
)
3470 lto_stats
.num_output_symtab_nodes
+= lto_symtab_encoder_size (part
->encoder
);
3472 /* Find out statics that need to be promoted
3473 to globals with hidden visibility because they are accessed from multiple
3475 lto_promote_cross_file_statics ();
3477 timevar_pop (TV_WHOPR_WPA
);
3479 timevar_push (TV_WHOPR_WPA_IO
);
3481 /* Generate a prefix for the LTRANS unit files. */
3482 blen
= strlen (ltrans_output_list
);
3483 temp_filename
= (char *) xmalloc (blen
+ sizeof ("2147483648.o"));
3484 strcpy (temp_filename
, ltrans_output_list
);
3485 if (blen
> sizeof (".out")
3486 && strcmp (temp_filename
+ blen
- sizeof (".out") + 1,
3488 temp_filename
[blen
- sizeof (".out") + 1] = '\0';
3489 blen
= strlen (temp_filename
);
3491 n_sets
= ltrans_partitions
.length ();
3493 /* Sort partitions by size so small ones are compiled last.
3494 FIXME: Even when not reordering we may want to output one list for parallel make
3495 and other for final link command. */
3496 ltrans_partitions
.qsort (flag_toplevel_reorder
3497 ? cmp_partitions_size
3498 : cmp_partitions_order
);
3499 for (i
= 0; i
< n_sets
; i
++)
3502 ltrans_partition part
= ltrans_partitions
[i
];
3504 /* Write all the nodes in SET. */
3505 sprintf (temp_filename
+ blen
, "%u.o", i
);
3506 file
= lto_obj_file_open (temp_filename
, true);
3508 fatal_error ("lto_obj_file_open() failed");
3511 fprintf (stderr
, " %s (%s %i insns)", temp_filename
, part
->name
, part
->insns
);
3512 if (cgraph_dump_file
)
3514 lto_symtab_encoder_iterator lsei
;
3516 fprintf (cgraph_dump_file
, "Writing partition %s to file %s, %i insns\n",
3517 part
->name
, temp_filename
, part
->insns
);
3518 fprintf (cgraph_dump_file
, " Symbols in partition: ");
3519 for (lsei
= lsei_start_in_partition (part
->encoder
); !lsei_end_p (lsei
);
3520 lsei_next_in_partition (&lsei
))
3522 symtab_node node
= lsei_node (lsei
);
3523 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
3525 fprintf (cgraph_dump_file
, "\n Symbols in boundary: ");
3526 for (lsei
= lsei_start (part
->encoder
); !lsei_end_p (lsei
);
3529 symtab_node node
= lsei_node (lsei
);
3530 if (!lto_symtab_encoder_in_partition_p (part
->encoder
, node
))
3532 fprintf (cgraph_dump_file
, "%s ", symtab_node_asm_name (node
));
3533 cgraph_node
*cnode
= dyn_cast
<cgraph_node
> (node
);
3535 && lto_symtab_encoder_encode_body_p (part
->encoder
, cnode
))
3536 fprintf (cgraph_dump_file
, "(body included)");
3539 varpool_node
*vnode
= dyn_cast
<varpool_node
> (node
);
3541 && lto_symtab_encoder_encode_initializer_p (part
->encoder
, vnode
))
3542 fprintf (cgraph_dump_file
, "(initializer included)");
3546 fprintf (cgraph_dump_file
, "\n");
3548 gcc_checking_assert (lto_symtab_encoder_size (part
->encoder
) || !i
);
3550 lto_set_current_out_file (file
);
3552 ipa_write_optimization_summaries (part
->encoder
);
3554 lto_set_current_out_file (NULL
);
3555 lto_obj_file_close (file
);
3557 part
->encoder
= NULL
;
3559 len
= strlen (temp_filename
);
3560 if (fwrite (temp_filename
, 1, len
, ltrans_output_list_stream
) < len
3561 || fwrite ("\n", 1, 1, ltrans_output_list_stream
) < 1)
3562 fatal_error ("writing to LTRANS output list %s: %m",
3563 ltrans_output_list
);
3566 lto_stats
.num_output_files
+= n_sets
;
3568 /* Close the LTRANS output list. */
3569 if (fclose (ltrans_output_list_stream
))
3570 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list
);
3572 free_ltrans_partitions();
3573 free (temp_filename
);
3575 timevar_pop (TV_WHOPR_WPA_IO
);
3579 /* If TT is a variable or function decl replace it with its
3580 prevailing variant. */
3581 #define LTO_SET_PREVAIL(tt) \
3583 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
3584 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
3586 tt = lto_symtab_prevailing_decl (tt); \
3591 /* Ensure that TT isn't a replacable var of function decl. */
3592 #define LTO_NO_PREVAIL(tt) \
3593 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
3595 /* Given a tree T replace all fields referring to variables or functions
3596 with their prevailing variant. */
3598 lto_fixup_prevailing_decls (tree t
)
3600 enum tree_code code
= TREE_CODE (t
);
3603 gcc_checking_assert (code
!= CONSTRUCTOR
&& code
!= TREE_BINFO
);
3604 LTO_NO_PREVAIL (TREE_TYPE (t
));
3605 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
3606 LTO_NO_PREVAIL (TREE_CHAIN (t
));
3609 LTO_NO_PREVAIL (DECL_NAME (t
));
3610 LTO_SET_PREVAIL (DECL_CONTEXT (t
));
3611 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
3613 LTO_SET_PREVAIL (DECL_SIZE (t
));
3614 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t
));
3615 LTO_SET_PREVAIL (DECL_INITIAL (t
));
3616 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t
));
3617 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t
));
3619 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_WITH_VIS
))
3621 LTO_NO_PREVAIL (t
->decl_with_vis
.assembler_name
);
3622 LTO_NO_PREVAIL (DECL_SECTION_NAME (t
));
3624 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_NON_COMMON
))
3626 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t
));
3627 LTO_NO_PREVAIL (DECL_RESULT_FLD (t
));
3628 LTO_NO_PREVAIL (DECL_VINDEX (t
));
3630 if (CODE_CONTAINS_STRUCT (code
, TS_FUNCTION_DECL
))
3631 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t
));
3632 if (CODE_CONTAINS_STRUCT (code
, TS_FIELD_DECL
))
3634 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t
));
3635 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t
));
3636 LTO_NO_PREVAIL (DECL_QUALIFIER (t
));
3637 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t
));
3638 LTO_NO_PREVAIL (DECL_FCONTEXT (t
));
3641 else if (TYPE_P (t
))
3643 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t
));
3644 LTO_SET_PREVAIL (TYPE_SIZE (t
));
3645 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t
));
3646 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t
));
3647 LTO_NO_PREVAIL (TYPE_NAME (t
));
3649 LTO_SET_PREVAIL (TYPE_MINVAL (t
));
3650 LTO_SET_PREVAIL (TYPE_MAXVAL (t
));
3651 LTO_NO_PREVAIL (t
->type_non_common
.binfo
);
3653 LTO_SET_PREVAIL (TYPE_CONTEXT (t
));
3655 LTO_NO_PREVAIL (TYPE_CANONICAL (t
));
3656 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t
));
3657 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t
));
3659 else if (EXPR_P (t
))
3662 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
3663 LTO_SET_PREVAIL (TREE_OPERAND (t
, i
));
3670 LTO_SET_PREVAIL (TREE_VALUE (t
));
3671 LTO_SET_PREVAIL (TREE_PURPOSE (t
));
3672 LTO_NO_PREVAIL (TREE_PURPOSE (t
));
3678 /* If we fixed nothing, then we missed something seen by
3680 gcc_checking_assert (fixed
);
3682 #undef LTO_SET_PREVAIL
3683 #undef LTO_NO_PREVAIL
3685 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
3686 replaces var and function decls with the corresponding prevailing def. */
3689 lto_fixup_state (struct lto_in_decl_state
*state
)
3692 struct lto_tree_ref_table
*table
;
3694 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
3695 we still need to walk from all DECLs to find the reachable
3696 FUNCTION_DECLs and VAR_DECLs. */
3697 for (si
= 0; si
< LTO_N_DECL_STREAMS
; si
++)
3699 table
= &state
->streams
[si
];
3700 for (i
= 0; i
< table
->size
; i
++)
3702 tree
*tp
= table
->trees
+ i
;
3703 if (VAR_OR_FUNCTION_DECL_P (*tp
)
3704 && (TREE_PUBLIC (*tp
) || DECL_EXTERNAL (*tp
)))
3705 *tp
= lto_symtab_prevailing_decl (*tp
);
3710 /* A callback of htab_traverse. Just extracts a state from SLOT
3711 and calls lto_fixup_state. */
3714 lto_fixup_state_aux (void **slot
, void *aux ATTRIBUTE_UNUSED
)
3716 struct lto_in_decl_state
*state
= (struct lto_in_decl_state
*) *slot
;
3717 lto_fixup_state (state
);
3721 /* Fix the decls from all FILES. Replaces each decl with the corresponding
3725 lto_fixup_decls (struct lto_file_decl_data
**files
)
3731 FOR_EACH_VEC_ELT ((*tree_with_vars
), i
, t
)
3732 lto_fixup_prevailing_decls (t
);
3734 for (i
= 0; files
[i
]; i
++)
3736 struct lto_file_decl_data
*file
= files
[i
];
3737 struct lto_in_decl_state
*state
= file
->global_decl_state
;
3738 lto_fixup_state (state
);
3740 htab_traverse (file
->function_decl_states
, lto_fixup_state_aux
, NULL
);
3744 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data
**all_file_decl_data
;
3746 /* Turn file datas for sub files into a single array, so that they look
3747 like separate files for further passes. */
3750 lto_flatten_files (struct lto_file_decl_data
**orig
, int count
, int last_file_ix
)
3752 struct lto_file_decl_data
*n
, *next
;
3755 lto_stats
.num_input_files
= count
;
3757 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count
+ 1);
3758 /* Set the hooks so that all of the ipa passes can read in their data. */
3759 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
3760 for (i
= 0, k
= 0; i
< last_file_ix
; i
++)
3762 for (n
= orig
[i
]; n
!= NULL
; n
= next
)
3764 all_file_decl_data
[k
++] = n
;
3769 all_file_decl_data
[k
] = NULL
;
3770 gcc_assert (k
== count
);
3773 /* Input file data before flattening (i.e. splitting them to subfiles to support
3774 incremental linking. */
3775 static int real_file_count
;
3776 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data
**real_file_decl_data
;
3778 static void print_lto_report_1 (void);
3780 /* Read all the symbols from the input files FNAMES. NFILES is the
3781 number of files requested in the command line. Instantiate a
3782 global call graph by aggregating all the sub-graphs found in each
3786 read_cgraph_and_symbols (unsigned nfiles
, const char **fnames
)
3788 unsigned int i
, last_file_ix
;
3791 struct lto_file_decl_data
**decl_data
;
3797 timevar_push (TV_IPA_LTO_DECL_IN
);
3800 = decl_data
= ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles
+ 1);
3801 real_file_count
= nfiles
;
3803 /* Read the resolution file. */
3805 if (resolution_file_name
)
3808 unsigned num_objects
;
3810 resolution
= fopen (resolution_file_name
, "r");
3811 if (resolution
== NULL
)
3812 fatal_error ("could not open symbol resolution file: %m");
3814 t
= fscanf (resolution
, "%u", &num_objects
);
3815 gcc_assert (t
== 1);
3817 /* True, since the plugin splits the archives. */
3818 gcc_assert (num_objects
== nfiles
);
3820 cgraph_state
= CGRAPH_LTO_STREAMING
;
3822 canonical_type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
3823 tree_int_map_eq
, NULL
);
3824 gimple_canonical_types
= htab_create_ggc (16381, gimple_canonical_type_hash
,
3825 gimple_canonical_type_eq
, 0);
3826 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
3827 tree_int_map_eq
, NULL
);
3828 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
3829 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
3830 gcc_obstack_init (&tree_scc_hash_obstack
);
3831 tree_scc_hash
.create (4096);
3833 /* Register the common node types with the canonical type machinery so
3834 we properly share alias-sets across languages and TUs. Do not
3835 expose the common nodes as type merge target - those that should be
3836 are already exposed so by pre-loading the LTO streamer caches. */
3837 for (i
= 0; i
< itk_none
; ++i
)
3838 lto_register_canonical_types (integer_types
[i
]);
3839 /* The sizetypes are not used to access data so we do not need to
3840 do anything about them. */
3841 for (i
= 0; i
< TI_MAX
; ++i
)
3842 lto_register_canonical_types (global_trees
[i
]);
3845 fprintf (stderr
, "Reading object files:");
3847 /* Read all of the object files specified on the command line. */
3848 for (i
= 0, last_file_ix
= 0; i
< nfiles
; ++i
)
3850 struct lto_file_decl_data
*file_data
= NULL
;
3853 fprintf (stderr
, " %s", fnames
[i
]);
3857 current_lto_file
= lto_obj_file_open (fnames
[i
], false);
3858 if (!current_lto_file
)
3861 file_data
= lto_file_read (current_lto_file
, resolution
, &count
);
3864 lto_obj_file_close (current_lto_file
);
3865 free (current_lto_file
);
3866 current_lto_file
= NULL
;
3870 decl_data
[last_file_ix
++] = file_data
;
3872 lto_obj_file_close (current_lto_file
);
3873 free (current_lto_file
);
3874 current_lto_file
= NULL
;
3877 lto_flatten_files (decl_data
, count
, last_file_ix
);
3878 lto_stats
.num_input_files
= count
;
3879 ggc_free(decl_data
);
3880 real_file_decl_data
= NULL
;
3882 if (resolution_file_name
)
3883 fclose (resolution
);
3885 /* Show the LTO report before launching LTRANS. */
3886 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
3887 print_lto_report_1 ();
3889 /* Free gimple type merging datastructures. */
3890 htab_delete (gimple_types
);
3891 gimple_types
= NULL
;
3892 htab_delete (type_hash_cache
);
3893 type_hash_cache
= NULL
;
3894 free (type_pair_cache
);
3895 type_pair_cache
= NULL
;
3896 tree_scc_hash
.dispose ();
3897 obstack_free (&tree_scc_hash_obstack
, NULL
);
3898 htab_delete (gimple_canonical_types
);
3899 gimple_canonical_types
= NULL
;
3900 htab_delete (canonical_type_hash_cache
);
3901 canonical_type_hash_cache
= NULL
;
3904 /* Set the hooks so that all of the ipa passes can read in their data. */
3905 lto_set_in_hooks (all_file_decl_data
, get_section_data
, free_section_data
);
3907 timevar_pop (TV_IPA_LTO_DECL_IN
);
3910 fprintf (stderr
, "\nReading the callgraph\n");
3912 timevar_push (TV_IPA_LTO_CGRAPH_IO
);
3913 /* Read the symtab. */
3916 /* Store resolutions into the symbol table. */
3918 FOR_EACH_SYMBOL (snode
)
3919 if (symtab_real_symbol_p (snode
)
3920 && snode
->symbol
.lto_file_data
3921 && snode
->symbol
.lto_file_data
->resolution_map
3922 && (res
= pointer_map_contains (snode
->symbol
.lto_file_data
->resolution_map
,
3923 snode
->symbol
.decl
)))
3924 snode
->symbol
.resolution
3925 = (enum ld_plugin_symbol_resolution
)(size_t)*res
;
3926 for (i
= 0; all_file_decl_data
[i
]; i
++)
3927 if (all_file_decl_data
[i
]->resolution_map
)
3929 pointer_map_destroy (all_file_decl_data
[i
]->resolution_map
);
3930 all_file_decl_data
[i
]->resolution_map
= NULL
;
3933 timevar_pop (TV_IPA_LTO_CGRAPH_IO
);
3936 fprintf (stderr
, "Merging declarations\n");
3938 timevar_push (TV_IPA_LTO_DECL_MERGE
);
3939 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3940 need to care about resolving symbols again, we only need to replace
3941 duplicated declarations read from the callgraph and from function
3945 lto_symtab_merge_decls ();
3947 /* If there were errors during symbol merging bail out, we have no
3948 good way to recover here. */
3950 fatal_error ("errors during merging of translation units");
3952 /* Fixup all decls. */
3953 lto_fixup_decls (all_file_decl_data
);
3956 ggc_free (tree_with_vars
);
3957 tree_with_vars
= NULL
;
3960 timevar_pop (TV_IPA_LTO_DECL_MERGE
);
3961 /* Each pass will set the appropriate timer. */
3964 fprintf (stderr
, "Reading summaries\n");
3966 /* Read the IPA summary data. */
3968 ipa_read_optimization_summaries ();
3970 ipa_read_summaries ();
3972 for (i
= 0; all_file_decl_data
[i
]; i
++)
3974 gcc_assert (all_file_decl_data
[i
]->symtab_node_encoder
);
3975 lto_symtab_encoder_delete (all_file_decl_data
[i
]->symtab_node_encoder
);
3976 all_file_decl_data
[i
]->symtab_node_encoder
= NULL
;
3977 lto_free_function_in_decl_state (all_file_decl_data
[i
]->global_decl_state
);
3978 all_file_decl_data
[i
]->global_decl_state
= NULL
;
3979 all_file_decl_data
[i
]->current_decl_state
= NULL
;
3982 /* Finally merge the cgraph according to the decl merging decisions. */
3983 timevar_push (TV_IPA_LTO_CGRAPH_MERGE
);
3984 if (cgraph_dump_file
)
3986 fprintf (cgraph_dump_file
, "Before merging:\n");
3987 dump_symtab (cgraph_dump_file
);
3989 lto_symtab_merge_symbols ();
3991 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
3993 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE
);
3995 timevar_push (TV_IPA_LTO_DECL_INIT_IO
);
3997 /* Indicate that the cgraph is built and ready. */
3998 cgraph_function_flags_ready
= true;
4000 timevar_pop (TV_IPA_LTO_DECL_INIT_IO
);
4001 ggc_free (all_file_decl_data
);
4002 all_file_decl_data
= NULL
;
4006 /* Materialize all the bodies for all the nodes in the callgraph. */
4009 materialize_cgraph (void)
4012 struct cgraph_node
*node
;
4014 timevar_id_t lto_timer
;
4018 flag_wpa
? "Materializing decls:" : "Reading function bodies:");
4020 /* Now that we have input the cgraph, we need to clear all of the aux
4021 nodes and read the functions if we are not running in WPA mode. */
4022 timevar_push (TV_IPA_LTO_GIMPLE_IN
);
4024 FOR_EACH_FUNCTION (node
)
4026 if (node
->symbol
.lto_file_data
)
4028 lto_materialize_function (node
);
4029 lto_stats
.num_input_cgraph_nodes
++;
4033 timevar_pop (TV_IPA_LTO_GIMPLE_IN
);
4035 /* Start the appropriate timer depending on the mode that we are
4037 lto_timer
= (flag_wpa
) ? TV_WHOPR_WPA
4038 : (flag_ltrans
) ? TV_WHOPR_LTRANS
4040 timevar_push (lto_timer
);
4042 current_function_decl
= NULL
;
4045 /* Inform the middle end about the global variables we have seen. */
4046 FOR_EACH_VEC_ELT (*lto_global_var_decls
, i
, decl
)
4047 rest_of_decl_compilation (decl
, 1, 0);
4050 fprintf (stderr
, "\n");
4052 timevar_pop (lto_timer
);
4056 /* Show various memory usage statistics related to LTO. */
4058 print_lto_report_1 (void)
4060 const char *pfx
= (flag_lto
) ? "LTO" : (flag_wpa
) ? "WPA" : "LTRANS";
4061 fprintf (stderr
, "%s statistics\n", pfx
);
4063 fprintf (stderr
, "[%s] read %lu SCCs of average size %f\n",
4064 pfx
, num_sccs_read
, total_scc_size
/ (double)num_sccs_read
);
4065 fprintf (stderr
, "[%s] %lu tree bodies read in total\n", pfx
, total_scc_size
);
4066 if (flag_wpa
&& tree_scc_hash
.is_created ())
4068 fprintf (stderr
, "[%s] tree SCC table: size %ld, %ld elements, "
4069 "collision ratio: %f\n", pfx
,
4070 (long) tree_scc_hash
.size (),
4071 (long) tree_scc_hash
.elements (),
4072 tree_scc_hash
.collisions ());
4073 hash_table
<tree_scc_hasher
>::iterator hiter
;
4074 tree_scc
*scc
, *max_scc
= NULL
;
4075 unsigned max_length
= 0;
4076 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash
, scc
, x
, hiter
)
4078 unsigned length
= 0;
4080 for (; s
; s
= s
->next
)
4082 if (length
> max_length
)
4084 max_length
= length
;
4088 fprintf (stderr
, "[%s] tree SCC max chain length %u (size %u)\n",
4089 pfx
, max_length
, max_scc
->len
);
4090 fprintf (stderr
, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx
,
4091 num_scc_compares
, num_scc_compare_collisions
,
4092 num_scc_compare_collisions
/ (double) num_scc_compares
);
4093 fprintf (stderr
, "[%s] Merged %lu SCCs\n", pfx
, num_sccs_merged
);
4094 fprintf (stderr
, "[%s] Merged %lu tree bodies\n", pfx
,
4095 total_scc_size_merged
);
4096 fprintf (stderr
, "[%s] Merged %lu types\n", pfx
, num_merged_types
);
4097 fprintf (stderr
, "[%s] %lu types prevailed (%lu associated trees)\n",
4098 pfx
, num_prevailing_types
, num_type_scc_trees
);
4099 fprintf (stderr
, "[%s] Old merging code merges an additional %lu types"
4100 " of which %lu are in the same SCC with their "
4101 "prevailing variant (%lu and %lu associated trees)\n",
4102 pfx
, num_not_merged_types
, num_not_merged_types_in_same_scc
,
4103 num_not_merged_types_trees
,
4104 num_not_merged_types_in_same_scc_trees
);
4105 fprintf (stderr
, "[%s] GIMPLE canonical type table: size %ld, "
4106 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx
,
4107 (long) htab_size (gimple_canonical_types
),
4108 (long) htab_elements (gimple_canonical_types
),
4109 (long) gimple_canonical_types
->searches
,
4110 (long) gimple_canonical_types
->collisions
,
4111 htab_collisions (gimple_canonical_types
));
4112 fprintf (stderr
, "[%s] GIMPLE canonical type hash table: size %ld, "
4113 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx
,
4114 (long) htab_size (canonical_type_hash_cache
),
4115 (long) htab_elements (canonical_type_hash_cache
),
4116 (long) canonical_type_hash_cache
->searches
,
4117 (long) canonical_type_hash_cache
->collisions
,
4118 htab_collisions (canonical_type_hash_cache
));
4121 print_lto_report (pfx
);
4124 /* Perform whole program analysis (WPA) on the callgraph and write out the
4125 optimization plan. */
4128 do_whole_program_analysis (void)
4132 timevar_start (TV_PHASE_OPT_GEN
);
4134 /* Note that since we are in WPA mode, materialize_cgraph will not
4135 actually read in all the function bodies. It only materializes
4136 the decls and cgraph nodes so that analysis can be performed. */
4137 materialize_cgraph ();
4139 /* Reading in the cgraph uses different timers, start timing WPA now. */
4140 timevar_push (TV_WHOPR_WPA
);
4142 if (pre_ipa_mem_report
)
4144 fprintf (stderr
, "Memory consumption before IPA\n");
4145 dump_memory_report (false);
4148 cgraph_function_flags_ready
= true;
4150 if (cgraph_dump_file
)
4151 dump_symtab (cgraph_dump_file
);
4152 bitmap_obstack_initialize (NULL
);
4153 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
4155 execute_ipa_pass_list (g
->get_passes ()->all_regular_ipa_passes
);
4156 symtab_remove_unreachable_nodes (false, dump_file
);
4158 if (cgraph_dump_file
)
4160 fprintf (cgraph_dump_file
, "Optimized ");
4161 dump_symtab (cgraph_dump_file
);
4163 #ifdef ENABLE_CHECKING
4166 bitmap_obstack_release (NULL
);
4168 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
4169 timevar_pop (TV_WHOPR_WPA
);
4171 timevar_push (TV_WHOPR_PARTITIONING
);
4172 if (flag_lto_partition_1to1
)
4174 else if (flag_lto_partition_max
)
4177 lto_balanced_map ();
4179 /* AUX pointers are used by partitioning code to bookkeep number of
4180 partitions symbol is in. This is no longer needed. */
4181 FOR_EACH_SYMBOL (node
)
4182 node
->symbol
.aux
= NULL
;
4184 lto_stats
.num_cgraph_partitions
+= ltrans_partitions
.length ();
4185 timevar_pop (TV_WHOPR_PARTITIONING
);
4187 timevar_stop (TV_PHASE_OPT_GEN
);
4188 timevar_start (TV_PHASE_STREAM_OUT
);
4192 fprintf (stderr
, "\nStreaming out");
4195 lto_wpa_write_files ();
4197 fprintf (stderr
, "\n");
4199 timevar_stop (TV_PHASE_STREAM_OUT
);
4202 if (post_ipa_mem_report
)
4204 fprintf (stderr
, "Memory consumption after IPA\n");
4205 dump_memory_report (false);
4208 /* Show the LTO report before launching LTRANS. */
4209 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
4210 print_lto_report_1 ();
4212 dump_memory_report (true);
4216 static GTY(()) tree lto_eh_personality_decl
;
4218 /* Return the LTO personality function decl. */
4221 lto_eh_personality (void)
4223 if (!lto_eh_personality_decl
)
4225 /* Use the first personality DECL for our personality if we don't
4226 support multiple ones. This ensures that we don't artificially
4227 create the need for them in a single-language program. */
4228 if (first_personality_decl
&& !dwarf2out_do_cfi_asm ())
4229 lto_eh_personality_decl
= first_personality_decl
;
4231 lto_eh_personality_decl
= lhd_gcc_personality ();
4234 return lto_eh_personality_decl
;
4237 /* Set the process name based on the LTO mode. */
4240 lto_process_name (void)
4243 setproctitle ("lto1-lto");
4245 setproctitle ("lto1-wpa");
4247 setproctitle ("lto1-ltrans");
4251 /* Initialize the LTO front end. */
4256 lto_process_name ();
4257 lto_streamer_hooks_init ();
4259 lto_set_in_hooks (NULL
, get_section_data
, free_section_data
);
4260 memset (<o_stats
, 0, sizeof (lto_stats
));
4261 bitmap_obstack_initialize (NULL
);
4262 gimple_register_cfg_hooks ();
4266 /* Main entry point for the GIMPLE front end. This front end has
4267 three main personalities:
4269 - LTO (-flto). All the object files on the command line are
4270 loaded in memory and processed as a single translation unit.
4271 This is the traditional link-time optimization behavior.
4273 - WPA (-fwpa). Only the callgraph and summary information for
4274 files in the command file are loaded. A single callgraph
4275 (without function bodies) is instantiated for the whole set of
4276 files. IPA passes are only allowed to analyze the call graph
4277 and make transformation decisions. The callgraph is
4278 partitioned, each partition is written to a new object file
4279 together with the transformation decisions.
4281 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
4282 summary files from running again. Since WPA computed summary
4283 information and decided what transformations to apply, LTRANS
4284 simply applies them. */
4289 /* LTO is called as a front end, even though it is not a front end.
4290 Because it is called as a front end, TV_PHASE_PARSING and
4291 TV_PARSE_GLOBAL are active, and we need to turn them off while
4292 doing LTO. Later we turn them back on so they are active up in
4294 timevar_pop (TV_PARSE_GLOBAL
);
4295 timevar_stop (TV_PHASE_PARSING
);
4297 timevar_start (TV_PHASE_SETUP
);
4299 /* Initialize the LTO front end. */
4302 timevar_stop (TV_PHASE_SETUP
);
4303 timevar_start (TV_PHASE_STREAM_IN
);
4305 /* Read all the symbols and call graph from all the files in the
4307 read_cgraph_and_symbols (num_in_fnames
, in_fnames
);
4309 timevar_stop (TV_PHASE_STREAM_IN
);
4313 /* If WPA is enabled analyze the whole call graph and create an
4314 optimization plan. Otherwise, read in all the function
4315 bodies and continue with optimization. */
4317 do_whole_program_analysis ();
4320 struct varpool_node
*vnode
;
4322 timevar_start (TV_PHASE_OPT_GEN
);
4324 materialize_cgraph ();
4326 lto_promote_statics_nonwpa ();
4328 /* Let the middle end know that we have read and merged all of
4332 timevar_stop (TV_PHASE_OPT_GEN
);
4334 /* FIXME lto, if the processes spawned by WPA fail, we miss
4335 the chance to print WPA's report, so WPA will call
4336 print_lto_report before launching LTRANS. If LTRANS was
4337 launched directly by the driver we would not need to do
4339 if (flag_lto_report
|| (flag_wpa
&& flag_lto_report_wpa
))
4340 print_lto_report_1 ();
4342 /* Record the global variables. */
4343 FOR_EACH_DEFINED_VARIABLE (vnode
)
4344 vec_safe_push (lto_global_var_decls
, vnode
->symbol
.decl
);
4348 /* Here we make LTO pretend to be a parser. */
4349 timevar_start (TV_PHASE_PARSING
);
4350 timevar_push (TV_PARSE_GLOBAL
);
4353 #include "gt-lto-lto.h"