lto-symtab.c (lto_cgraph_replace_node): Free decl_in_state.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48 #include "data-streamer.h"
49 #include "context.h"
50 #include "pass_manager.h"
51
52 static GTY(()) tree first_personality_decl;
53
54 /* Returns a hash code for P. */
55
56 static hashval_t
57 hash_name (const void *p)
58 {
59 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
60 return (hashval_t) htab_hash_string (ds->name);
61 }
62
63
64 /* Returns nonzero if P1 and P2 are equal. */
65
66 static int
67 eq_name (const void *p1, const void *p2)
68 {
69 const struct lto_section_slot *s1 =
70 (const struct lto_section_slot *) p1;
71 const struct lto_section_slot *s2 =
72 (const struct lto_section_slot *) p2;
73
74 return strcmp (s1->name, s2->name) == 0;
75 }
76
77 /* Free lto_section_slot */
78
79 static void
80 free_with_string (void *arg)
81 {
82 struct lto_section_slot *s = (struct lto_section_slot *)arg;
83
84 free (CONST_CAST (char *, s->name));
85 free (arg);
86 }
87
88 /* Create section hash table */
89
90 htab_t
91 lto_obj_create_section_hash_table (void)
92 {
93 return htab_create (37, hash_name, eq_name, free_with_string);
94 }
95
96 /* Delete an allocated integer KEY in the splay tree. */
97
98 static void
99 lto_splay_tree_delete_id (splay_tree_key key)
100 {
101 free ((void *) key);
102 }
103
104 /* Compare splay tree node ids A and B. */
105
106 static int
107 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
108 {
109 unsigned HOST_WIDE_INT ai;
110 unsigned HOST_WIDE_INT bi;
111
112 ai = *(unsigned HOST_WIDE_INT *) a;
113 bi = *(unsigned HOST_WIDE_INT *) b;
114
115 if (ai < bi)
116 return -1;
117 else if (ai > bi)
118 return 1;
119 return 0;
120 }
121
122 /* Look up splay tree node by ID in splay tree T. */
123
124 static splay_tree_node
125 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
126 {
127 return splay_tree_lookup (t, (splay_tree_key) &id);
128 }
129
130 /* Check if KEY has ID. */
131
132 static bool
133 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
134 {
135 return *(unsigned HOST_WIDE_INT *) key == id;
136 }
137
138 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
139 The ID is allocated separately because we need HOST_WIDE_INTs which may
140 be wider than a splay_tree_key. */
141
142 static void
143 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
144 struct lto_file_decl_data *file_data)
145 {
146 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
147 *idp = id;
148 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
149 }
150
151 /* Create a splay tree. */
152
153 static splay_tree
154 lto_splay_tree_new (void)
155 {
156 return splay_tree_new (lto_splay_tree_compare_ids,
157 lto_splay_tree_delete_id,
158 NULL);
159 }
160
161 /* Return true when NODE has a clone that is analyzed (i.e. we need
162 to load its body even if the node itself is not needed). */
163
164 static bool
165 has_analyzed_clone_p (struct cgraph_node *node)
166 {
167 struct cgraph_node *orig = node;
168 node = node->clones;
169 if (node)
170 while (node != orig)
171 {
172 if (node->symbol.analyzed)
173 return true;
174 if (node->clones)
175 node = node->clones;
176 else if (node->next_sibling_clone)
177 node = node->next_sibling_clone;
178 else
179 {
180 while (node != orig && !node->next_sibling_clone)
181 node = node->clone_of;
182 if (node != orig)
183 node = node->next_sibling_clone;
184 }
185 }
186 return false;
187 }
188
189 /* Read the function body for the function associated with NODE. */
190
191 static void
192 lto_materialize_function (struct cgraph_node *node)
193 {
194 tree decl;
195
196 decl = node->symbol.decl;
197 /* Read in functions with body (analyzed nodes)
198 and also functions that are needed to produce virtual clones. */
199 if ((cgraph_function_with_gimple_body_p (node) && node->symbol.analyzed)
200 || node->used_as_abstract_origin
201 || has_analyzed_clone_p (node))
202 {
203 /* Clones don't need to be read. */
204 if (node->clone_of)
205 return;
206 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
207 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
208 }
209
210 /* Let the middle end know about the function. */
211 rest_of_decl_compilation (decl, 1, 0);
212 }
213
214
215 /* Decode the content of memory pointed to by DATA in the in decl
216 state object STATE. DATA_IN points to a data_in structure for
217 decoding. Return the address after the decoded object in the
218 input. */
219
220 static const uint32_t *
221 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
222 struct lto_in_decl_state *state)
223 {
224 uint32_t ix;
225 tree decl;
226 uint32_t i, j;
227
228 ix = *data++;
229 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
230 if (TREE_CODE (decl) != FUNCTION_DECL)
231 {
232 gcc_assert (decl == void_type_node);
233 decl = NULL_TREE;
234 }
235 state->fn_decl = decl;
236
237 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
238 {
239 uint32_t size = *data++;
240 tree *decls = ggc_alloc_vec_tree (size);
241
242 for (j = 0; j < size; j++)
243 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
244
245 state->streams[i].size = size;
246 state->streams[i].trees = decls;
247 data += size;
248 }
249
250 return data;
251 }
252
253
254
255 /* ??? Old hashing and merging code follows, we keep it for statistics
256 purposes for now. */
257
258 /* Global type table. FIXME, it should be possible to re-use some
259 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
260 etc), but those assume that types were built with the various
261 build_*_type routines which is not the case with the streamer. */
262 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
263 htab_t gimple_types;
264 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
265 htab_t type_hash_cache;
266
267 static hashval_t gimple_type_hash (const void *);
268
269 /* Structure used to maintain a cache of some type pairs compared by
270 gimple_types_compatible_p when comparing aggregate types. There are
271 three possible values for SAME_P:
272
273 -2: The pair (T1, T2) has just been inserted in the table.
274 0: T1 and T2 are different types.
275 1: T1 and T2 are the same type. */
276
277 struct type_pair_d
278 {
279 unsigned int uid1;
280 unsigned int uid2;
281 signed char same_p;
282 };
283 typedef struct type_pair_d *type_pair_t;
284
285 #define GIMPLE_TYPE_PAIR_SIZE 16381
286 struct type_pair_d *type_pair_cache;
287
288
289 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
290 entry if none existed. */
291
292 static inline type_pair_t
293 lookup_type_pair (tree t1, tree t2)
294 {
295 unsigned int index;
296 unsigned int uid1, uid2;
297
298 if (TYPE_UID (t1) < TYPE_UID (t2))
299 {
300 uid1 = TYPE_UID (t1);
301 uid2 = TYPE_UID (t2);
302 }
303 else
304 {
305 uid1 = TYPE_UID (t2);
306 uid2 = TYPE_UID (t1);
307 }
308 gcc_checking_assert (uid1 != uid2);
309
310 /* iterative_hash_hashval_t imply an function calls.
311 We know that UIDS are in limited range. */
312 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
313 % GIMPLE_TYPE_PAIR_SIZE);
314 if (type_pair_cache [index].uid1 == uid1
315 && type_pair_cache [index].uid2 == uid2)
316 return &type_pair_cache[index];
317
318 type_pair_cache [index].uid1 = uid1;
319 type_pair_cache [index].uid2 = uid2;
320 type_pair_cache [index].same_p = -2;
321
322 return &type_pair_cache[index];
323 }
324
325 /* Per pointer state for the SCC finding. The on_sccstack flag
326 is not strictly required, it is true when there is no hash value
327 recorded for the type and false otherwise. But querying that
328 is slower. */
329
330 struct sccs
331 {
332 unsigned int dfsnum;
333 unsigned int low;
334 bool on_sccstack;
335 union {
336 hashval_t hash;
337 signed char same_p;
338 } u;
339 };
340
341 static unsigned int next_dfs_num;
342 static unsigned int gtc_next_dfs_num;
343
344 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
345 true then if any type has no name return false, otherwise return
346 true if both types have no names. */
347
348 static bool
349 compare_type_names_p (tree t1, tree t2)
350 {
351 tree name1 = TYPE_NAME (t1);
352 tree name2 = TYPE_NAME (t2);
353
354 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
355 return false;
356
357 if (name1 == NULL_TREE)
358 return true;
359
360 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
361 if (TREE_CODE (name1) != TREE_CODE (name2))
362 return false;
363
364 if (TREE_CODE (name1) == TYPE_DECL)
365 name1 = DECL_NAME (name1);
366 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
367
368 if (TREE_CODE (name2) == TYPE_DECL)
369 name2 = DECL_NAME (name2);
370 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
371
372 /* Identifiers can be compared with pointer equality rather
373 than a string comparison. */
374 if (name1 == name2)
375 return true;
376
377 return false;
378 }
379
380 static bool
381 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
382 vec<type_pair_t> *,
383 struct pointer_map_t *, struct obstack *);
384
385 /* DFS visit the edge from the callers type pair with state *STATE to
386 the pair T1, T2 while operating in FOR_MERGING_P mode.
387 Update the merging status if it is not part of the SCC containing the
388 callers pair and return it.
389 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
390
391 static bool
392 gtc_visit (tree t1, tree t2,
393 struct sccs *state,
394 vec<type_pair_t> *sccstack,
395 struct pointer_map_t *sccstate,
396 struct obstack *sccstate_obstack)
397 {
398 struct sccs *cstate = NULL;
399 type_pair_t p;
400 void **slot;
401
402 /* Check first for the obvious case of pointer identity. */
403 if (t1 == t2)
404 return true;
405
406 /* Check that we have two types to compare. */
407 if (t1 == NULL_TREE || t2 == NULL_TREE)
408 return false;
409
410 /* Can't be the same type if the types don't have the same code. */
411 if (TREE_CODE (t1) != TREE_CODE (t2))
412 return false;
413
414 /* Can't be the same type if they have different CV qualifiers. */
415 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
416 return false;
417
418 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
419 return false;
420
421 /* Void types and nullptr types are always the same. */
422 if (TREE_CODE (t1) == VOID_TYPE
423 || TREE_CODE (t1) == NULLPTR_TYPE)
424 return true;
425
426 /* Can't be the same type if they have different alignment or mode. */
427 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
428 || TYPE_MODE (t1) != TYPE_MODE (t2))
429 return false;
430
431 /* Do some simple checks before doing three hashtable queries. */
432 if (INTEGRAL_TYPE_P (t1)
433 || SCALAR_FLOAT_TYPE_P (t1)
434 || FIXED_POINT_TYPE_P (t1)
435 || TREE_CODE (t1) == VECTOR_TYPE
436 || TREE_CODE (t1) == COMPLEX_TYPE
437 || TREE_CODE (t1) == OFFSET_TYPE
438 || POINTER_TYPE_P (t1))
439 {
440 /* Can't be the same type if they have different sign or precision. */
441 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
442 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
443 return false;
444
445 if (TREE_CODE (t1) == INTEGER_TYPE
446 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
447 return false;
448
449 /* That's all we need to check for float and fixed-point types. */
450 if (SCALAR_FLOAT_TYPE_P (t1)
451 || FIXED_POINT_TYPE_P (t1))
452 return true;
453
454 /* For other types fall through to more complex checks. */
455 }
456
457 /* If the hash values of t1 and t2 are different the types can't
458 possibly be the same. This helps keeping the type-pair hashtable
459 small, only tracking comparisons for hash collisions. */
460 if (gimple_type_hash (t1) != gimple_type_hash (t2))
461 return false;
462
463 /* Allocate a new cache entry for this comparison. */
464 p = lookup_type_pair (t1, t2);
465 if (p->same_p == 0 || p->same_p == 1)
466 {
467 /* We have already decided whether T1 and T2 are the
468 same, return the cached result. */
469 return p->same_p == 1;
470 }
471
472 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
473 cstate = (struct sccs *)*slot;
474 /* Not yet visited. DFS recurse. */
475 if (!cstate)
476 {
477 gimple_types_compatible_p_1 (t1, t2, p,
478 sccstack, sccstate, sccstate_obstack);
479 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
480 state->low = MIN (state->low, cstate->low);
481 }
482 /* If the type is still on the SCC stack adjust the parents low. */
483 if (cstate->dfsnum < state->dfsnum
484 && cstate->on_sccstack)
485 state->low = MIN (cstate->dfsnum, state->low);
486
487 /* Return the current lattice value. We start with an equality
488 assumption so types part of a SCC will be optimistically
489 treated equal unless proven otherwise. */
490 return cstate->u.same_p;
491 }
492
493 /* Worker for gimple_types_compatible.
494 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
495
496 static bool
497 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
498 vec<type_pair_t> *sccstack,
499 struct pointer_map_t *sccstate,
500 struct obstack *sccstate_obstack)
501 {
502 struct sccs *state;
503
504 gcc_assert (p->same_p == -2);
505
506 state = XOBNEW (sccstate_obstack, struct sccs);
507 *pointer_map_insert (sccstate, p) = state;
508
509 sccstack->safe_push (p);
510 state->dfsnum = gtc_next_dfs_num++;
511 state->low = state->dfsnum;
512 state->on_sccstack = true;
513 /* Start with an equality assumption. As we DFS recurse into child
514 SCCs this assumption may get revisited. */
515 state->u.same_p = 1;
516
517 /* The struct tags shall compare equal. */
518 if (!compare_type_names_p (t1, t2))
519 goto different_types;
520
521 /* The main variant of both types should compare equal. */
522 if (TYPE_MAIN_VARIANT (t1) != t1
523 || TYPE_MAIN_VARIANT (t2) != t2)
524 {
525 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
526 state, sccstack, sccstate, sccstate_obstack))
527 goto different_types;
528 }
529
530 /* We may not merge typedef types to the same type in different
531 contexts. */
532 if (TYPE_NAME (t1)
533 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
534 && DECL_CONTEXT (TYPE_NAME (t1))
535 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
536 {
537 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
538 DECL_CONTEXT (TYPE_NAME (t2)),
539 state, sccstack, sccstate, sccstate_obstack))
540 goto different_types;
541 }
542
543 /* If their attributes are not the same they can't be the same type. */
544 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
545 goto different_types;
546
547 /* Do type-specific comparisons. */
548 switch (TREE_CODE (t1))
549 {
550 case VECTOR_TYPE:
551 case COMPLEX_TYPE:
552 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
553 state, sccstack, sccstate, sccstate_obstack))
554 goto different_types;
555 goto same_types;
556
557 case ARRAY_TYPE:
558 /* Array types are the same if the element types are the same and
559 the number of elements are the same. */
560 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
561 state, sccstack, sccstate, sccstate_obstack)
562 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
563 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
564 goto different_types;
565 else
566 {
567 tree i1 = TYPE_DOMAIN (t1);
568 tree i2 = TYPE_DOMAIN (t2);
569
570 /* For an incomplete external array, the type domain can be
571 NULL_TREE. Check this condition also. */
572 if (i1 == NULL_TREE && i2 == NULL_TREE)
573 goto same_types;
574 else if (i1 == NULL_TREE || i2 == NULL_TREE)
575 goto different_types;
576 else
577 {
578 tree min1 = TYPE_MIN_VALUE (i1);
579 tree min2 = TYPE_MIN_VALUE (i2);
580 tree max1 = TYPE_MAX_VALUE (i1);
581 tree max2 = TYPE_MAX_VALUE (i2);
582
583 /* The minimum/maximum values have to be the same. */
584 if ((min1 == min2
585 || (min1 && min2
586 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
587 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
588 || operand_equal_p (min1, min2, 0))))
589 && (max1 == max2
590 || (max1 && max2
591 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
592 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
593 || operand_equal_p (max1, max2, 0)))))
594 goto same_types;
595 else
596 goto different_types;
597 }
598 }
599
600 case METHOD_TYPE:
601 /* Method types should belong to the same class. */
602 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
603 state, sccstack, sccstate, sccstate_obstack))
604 goto different_types;
605
606 /* Fallthru */
607
608 case FUNCTION_TYPE:
609 /* Function types are the same if the return type and arguments types
610 are the same. */
611 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
612 state, sccstack, sccstate, sccstate_obstack))
613 goto different_types;
614
615 if (!comp_type_attributes (t1, t2))
616 goto different_types;
617
618 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
619 goto same_types;
620 else
621 {
622 tree parms1, parms2;
623
624 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
625 parms1 && parms2;
626 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
627 {
628 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
629 state, sccstack, sccstate, sccstate_obstack))
630 goto different_types;
631 }
632
633 if (parms1 || parms2)
634 goto different_types;
635
636 goto same_types;
637 }
638
639 case OFFSET_TYPE:
640 {
641 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
642 state, sccstack, sccstate, sccstate_obstack)
643 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
644 TYPE_OFFSET_BASETYPE (t2),
645 state, sccstack, sccstate, sccstate_obstack))
646 goto different_types;
647
648 goto same_types;
649 }
650
651 case POINTER_TYPE:
652 case REFERENCE_TYPE:
653 {
654 /* If the two pointers have different ref-all attributes,
655 they can't be the same type. */
656 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
657 goto different_types;
658
659 /* Otherwise, pointer and reference types are the same if the
660 pointed-to types are the same. */
661 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
662 state, sccstack, sccstate, sccstate_obstack))
663 goto same_types;
664
665 goto different_types;
666 }
667
668 case INTEGER_TYPE:
669 case BOOLEAN_TYPE:
670 {
671 tree min1 = TYPE_MIN_VALUE (t1);
672 tree max1 = TYPE_MAX_VALUE (t1);
673 tree min2 = TYPE_MIN_VALUE (t2);
674 tree max2 = TYPE_MAX_VALUE (t2);
675 bool min_equal_p = false;
676 bool max_equal_p = false;
677
678 /* If either type has a minimum value, the other type must
679 have the same. */
680 if (min1 == NULL_TREE && min2 == NULL_TREE)
681 min_equal_p = true;
682 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
683 min_equal_p = true;
684
685 /* Likewise, if either type has a maximum value, the other
686 type must have the same. */
687 if (max1 == NULL_TREE && max2 == NULL_TREE)
688 max_equal_p = true;
689 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
690 max_equal_p = true;
691
692 if (!min_equal_p || !max_equal_p)
693 goto different_types;
694
695 goto same_types;
696 }
697
698 case ENUMERAL_TYPE:
699 {
700 /* FIXME lto, we cannot check bounds on enumeral types because
701 different front ends will produce different values.
702 In C, enumeral types are integers, while in C++ each element
703 will have its own symbolic value. We should decide how enums
704 are to be represented in GIMPLE and have each front end lower
705 to that. */
706 tree v1, v2;
707
708 /* For enumeral types, all the values must be the same. */
709 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
710 goto same_types;
711
712 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
713 v1 && v2;
714 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
715 {
716 tree c1 = TREE_VALUE (v1);
717 tree c2 = TREE_VALUE (v2);
718
719 if (TREE_CODE (c1) == CONST_DECL)
720 c1 = DECL_INITIAL (c1);
721
722 if (TREE_CODE (c2) == CONST_DECL)
723 c2 = DECL_INITIAL (c2);
724
725 if (tree_int_cst_equal (c1, c2) != 1)
726 goto different_types;
727
728 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
729 goto different_types;
730 }
731
732 /* If one enumeration has more values than the other, they
733 are not the same. */
734 if (v1 || v2)
735 goto different_types;
736
737 goto same_types;
738 }
739
740 case RECORD_TYPE:
741 case UNION_TYPE:
742 case QUAL_UNION_TYPE:
743 {
744 tree f1, f2;
745
746 /* For aggregate types, all the fields must be the same. */
747 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
748 f1 && f2;
749 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
750 {
751 /* Different field kinds are not compatible. */
752 if (TREE_CODE (f1) != TREE_CODE (f2))
753 goto different_types;
754 /* Field decls must have the same name and offset. */
755 if (TREE_CODE (f1) == FIELD_DECL
756 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
757 || !gimple_compare_field_offset (f1, f2)))
758 goto different_types;
759 /* All entities should have the same name and type. */
760 if (DECL_NAME (f1) != DECL_NAME (f2)
761 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
762 state, sccstack, sccstate, sccstate_obstack))
763 goto different_types;
764 }
765
766 /* If one aggregate has more fields than the other, they
767 are not the same. */
768 if (f1 || f2)
769 goto different_types;
770
771 goto same_types;
772 }
773
774 default:
775 gcc_unreachable ();
776 }
777
778 /* Common exit path for types that are not compatible. */
779 different_types:
780 state->u.same_p = 0;
781 goto pop;
782
783 /* Common exit path for types that are compatible. */
784 same_types:
785 gcc_assert (state->u.same_p == 1);
786
787 pop:
788 if (state->low == state->dfsnum)
789 {
790 type_pair_t x;
791
792 /* Pop off the SCC and set its cache values to the final
793 comparison result. */
794 do
795 {
796 struct sccs *cstate;
797 x = sccstack->pop ();
798 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
799 cstate->on_sccstack = false;
800 x->same_p = state->u.same_p;
801 }
802 while (x != p);
803 }
804
805 return state->u.same_p;
806 }
807
808 /* Return true iff T1 and T2 are structurally identical. When
809 FOR_MERGING_P is true the an incomplete type and a complete type
810 are considered different, otherwise they are considered compatible. */
811
812 static bool
813 gimple_types_compatible_p (tree t1, tree t2)
814 {
815 vec<type_pair_t> sccstack = vNULL;
816 struct pointer_map_t *sccstate;
817 struct obstack sccstate_obstack;
818 type_pair_t p = NULL;
819 bool res;
820
821 /* Before starting to set up the SCC machinery handle simple cases. */
822
823 /* Check first for the obvious case of pointer identity. */
824 if (t1 == t2)
825 return true;
826
827 /* Check that we have two types to compare. */
828 if (t1 == NULL_TREE || t2 == NULL_TREE)
829 return false;
830
831 /* Can't be the same type if the types don't have the same code. */
832 if (TREE_CODE (t1) != TREE_CODE (t2))
833 return false;
834
835 /* Can't be the same type if they have different CV qualifiers. */
836 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
837 return false;
838
839 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
840 return false;
841
842 /* Void types and nullptr types are always the same. */
843 if (TREE_CODE (t1) == VOID_TYPE
844 || TREE_CODE (t1) == NULLPTR_TYPE)
845 return true;
846
847 /* Can't be the same type if they have different alignment or mode. */
848 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
849 || TYPE_MODE (t1) != TYPE_MODE (t2))
850 return false;
851
852 /* Do some simple checks before doing three hashtable queries. */
853 if (INTEGRAL_TYPE_P (t1)
854 || SCALAR_FLOAT_TYPE_P (t1)
855 || FIXED_POINT_TYPE_P (t1)
856 || TREE_CODE (t1) == VECTOR_TYPE
857 || TREE_CODE (t1) == COMPLEX_TYPE
858 || TREE_CODE (t1) == OFFSET_TYPE
859 || POINTER_TYPE_P (t1))
860 {
861 /* Can't be the same type if they have different sign or precision. */
862 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
863 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
864 return false;
865
866 if (TREE_CODE (t1) == INTEGER_TYPE
867 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
868 return false;
869
870 /* That's all we need to check for float and fixed-point types. */
871 if (SCALAR_FLOAT_TYPE_P (t1)
872 || FIXED_POINT_TYPE_P (t1))
873 return true;
874
875 /* For other types fall through to more complex checks. */
876 }
877
878 /* If the hash values of t1 and t2 are different the types can't
879 possibly be the same. This helps keeping the type-pair hashtable
880 small, only tracking comparisons for hash collisions. */
881 if (gimple_type_hash (t1) != gimple_type_hash (t2))
882 return false;
883
884 /* If we've visited this type pair before (in the case of aggregates
885 with self-referential types), and we made a decision, return it. */
886 p = lookup_type_pair (t1, t2);
887 if (p->same_p == 0 || p->same_p == 1)
888 {
889 /* We have already decided whether T1 and T2 are the
890 same, return the cached result. */
891 return p->same_p == 1;
892 }
893
894 /* Now set up the SCC machinery for the comparison. */
895 gtc_next_dfs_num = 1;
896 sccstate = pointer_map_create ();
897 gcc_obstack_init (&sccstate_obstack);
898 res = gimple_types_compatible_p_1 (t1, t2, p,
899 &sccstack, sccstate, &sccstate_obstack);
900 sccstack.release ();
901 pointer_map_destroy (sccstate);
902 obstack_free (&sccstate_obstack, NULL);
903
904 return res;
905 }
906
907 static hashval_t
908 iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
909 struct pointer_map_t *, struct obstack *);
910
911 /* DFS visit the edge from the callers type with state *STATE to T.
912 Update the callers type hash V with the hash for T if it is not part
913 of the SCC containing the callers type and return it.
914 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
915
916 static hashval_t
917 visit (tree t, struct sccs *state, hashval_t v,
918 vec<tree> *sccstack,
919 struct pointer_map_t *sccstate,
920 struct obstack *sccstate_obstack)
921 {
922 struct sccs *cstate = NULL;
923 struct tree_int_map m;
924 void **slot;
925
926 /* If there is a hash value recorded for this type then it can't
927 possibly be part of our parent SCC. Simply mix in its hash. */
928 m.base.from = t;
929 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
930 && *slot)
931 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
932
933 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
934 cstate = (struct sccs *)*slot;
935 if (!cstate)
936 {
937 hashval_t tem;
938 /* Not yet visited. DFS recurse. */
939 tem = iterative_hash_gimple_type (t, v,
940 sccstack, sccstate, sccstate_obstack);
941 if (!cstate)
942 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
943 state->low = MIN (state->low, cstate->low);
944 /* If the type is no longer on the SCC stack and thus is not part
945 of the parents SCC mix in its hash value. Otherwise we will
946 ignore the type for hashing purposes and return the unaltered
947 hash value. */
948 if (!cstate->on_sccstack)
949 return tem;
950 }
951 if (cstate->dfsnum < state->dfsnum
952 && cstate->on_sccstack)
953 state->low = MIN (cstate->dfsnum, state->low);
954
955 /* We are part of our parents SCC, skip this type during hashing
956 and return the unaltered hash value. */
957 return v;
958 }
959
960 /* Hash NAME with the previous hash value V and return it. */
961
962 static hashval_t
963 iterative_hash_name (tree name, hashval_t v)
964 {
965 if (!name)
966 return v;
967 v = iterative_hash_hashval_t (TREE_CODE (name), v);
968 if (TREE_CODE (name) == TYPE_DECL)
969 name = DECL_NAME (name);
970 if (!name)
971 return v;
972 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
973 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
974 }
975
976 /* A type, hashvalue pair for sorting SCC members. */
977
978 struct type_hash_pair {
979 tree type;
980 hashval_t hash;
981 };
982
983 /* Compare two type, hashvalue pairs. */
984
985 static int
986 type_hash_pair_compare (const void *p1_, const void *p2_)
987 {
988 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
989 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
990 if (p1->hash < p2->hash)
991 return -1;
992 else if (p1->hash > p2->hash)
993 return 1;
994 return 0;
995 }
996
997 /* Returning a hash value for gimple type TYPE combined with VAL.
998 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
999
1000 To hash a type we end up hashing in types that are reachable.
1001 Through pointers we can end up with cycles which messes up the
1002 required property that we need to compute the same hash value
1003 for structurally equivalent types. To avoid this we have to
1004 hash all types in a cycle (the SCC) in a commutative way. The
1005 easiest way is to not mix in the hashes of the SCC members at
1006 all. To make this work we have to delay setting the hash
1007 values of the SCC until it is complete. */
1008
1009 static hashval_t
1010 iterative_hash_gimple_type (tree type, hashval_t val,
1011 vec<tree> *sccstack,
1012 struct pointer_map_t *sccstate,
1013 struct obstack *sccstate_obstack)
1014 {
1015 hashval_t v;
1016 void **slot;
1017 struct sccs *state;
1018
1019 /* Not visited during this DFS walk. */
1020 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1021 state = XOBNEW (sccstate_obstack, struct sccs);
1022 *pointer_map_insert (sccstate, type) = state;
1023
1024 sccstack->safe_push (type);
1025 state->dfsnum = next_dfs_num++;
1026 state->low = state->dfsnum;
1027 state->on_sccstack = true;
1028
1029 /* Combine a few common features of types so that types are grouped into
1030 smaller sets; when searching for existing matching types to merge,
1031 only existing types having the same features as the new type will be
1032 checked. */
1033 v = iterative_hash_name (TYPE_NAME (type), 0);
1034 if (TYPE_NAME (type)
1035 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1036 && DECL_CONTEXT (TYPE_NAME (type))
1037 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1038 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1039 sccstack, sccstate, sccstate_obstack);
1040
1041 /* Factor in the variant structure. */
1042 if (TYPE_MAIN_VARIANT (type) != type)
1043 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1044 sccstack, sccstate, sccstate_obstack);
1045
1046 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1047 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1048 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1049
1050 /* Do not hash the types size as this will cause differences in
1051 hash values for the complete vs. the incomplete type variant. */
1052
1053 /* Incorporate common features of numerical types. */
1054 if (INTEGRAL_TYPE_P (type)
1055 || SCALAR_FLOAT_TYPE_P (type)
1056 || FIXED_POINT_TYPE_P (type))
1057 {
1058 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1059 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1060 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1061 }
1062
1063 /* For pointer and reference types, fold in information about the type
1064 pointed to. */
1065 if (POINTER_TYPE_P (type))
1066 v = visit (TREE_TYPE (type), state, v,
1067 sccstack, sccstate, sccstate_obstack);
1068
1069 /* For integer types hash the types min/max values and the string flag. */
1070 if (TREE_CODE (type) == INTEGER_TYPE)
1071 {
1072 /* OMP lowering can introduce error_mark_node in place of
1073 random local decls in types. */
1074 if (TYPE_MIN_VALUE (type) != error_mark_node)
1075 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1076 if (TYPE_MAX_VALUE (type) != error_mark_node)
1077 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1078 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1079 }
1080
1081 /* For array types hash the domain and the string flag. */
1082 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1083 {
1084 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1085 v = visit (TYPE_DOMAIN (type), state, v,
1086 sccstack, sccstate, sccstate_obstack);
1087 }
1088
1089 /* Recurse for aggregates with a single element type. */
1090 if (TREE_CODE (type) == ARRAY_TYPE
1091 || TREE_CODE (type) == COMPLEX_TYPE
1092 || TREE_CODE (type) == VECTOR_TYPE)
1093 v = visit (TREE_TYPE (type), state, v,
1094 sccstack, sccstate, sccstate_obstack);
1095
1096 /* Incorporate function return and argument types. */
1097 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1098 {
1099 unsigned na;
1100 tree p;
1101
1102 /* For method types also incorporate their parent class. */
1103 if (TREE_CODE (type) == METHOD_TYPE)
1104 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1105 sccstack, sccstate, sccstate_obstack);
1106
1107 /* Check result and argument types. */
1108 v = visit (TREE_TYPE (type), state, v,
1109 sccstack, sccstate, sccstate_obstack);
1110 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1111 {
1112 v = visit (TREE_VALUE (p), state, v,
1113 sccstack, sccstate, sccstate_obstack);
1114 na++;
1115 }
1116
1117 v = iterative_hash_hashval_t (na, v);
1118 }
1119
1120 if (RECORD_OR_UNION_TYPE_P (type))
1121 {
1122 unsigned nf;
1123 tree f;
1124
1125 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1126 {
1127 v = iterative_hash_name (DECL_NAME (f), v);
1128 v = visit (TREE_TYPE (f), state, v,
1129 sccstack, sccstate, sccstate_obstack);
1130 nf++;
1131 }
1132
1133 v = iterative_hash_hashval_t (nf, v);
1134 }
1135
1136 /* Record hash for us. */
1137 state->u.hash = v;
1138
1139 /* See if we found an SCC. */
1140 if (state->low == state->dfsnum)
1141 {
1142 tree x;
1143 struct tree_int_map *m;
1144
1145 /* Pop off the SCC and set its hash values. */
1146 x = sccstack->pop ();
1147 /* Optimize SCC size one. */
1148 if (x == type)
1149 {
1150 state->on_sccstack = false;
1151 m = ggc_alloc_cleared_tree_int_map ();
1152 m->base.from = x;
1153 m->to = v;
1154 slot = htab_find_slot (type_hash_cache, m, INSERT);
1155 gcc_assert (!*slot);
1156 *slot = (void *) m;
1157 }
1158 else
1159 {
1160 struct sccs *cstate;
1161 unsigned first, i, size, j;
1162 struct type_hash_pair *pairs;
1163 /* Pop off the SCC and build an array of type, hash pairs. */
1164 first = sccstack->length () - 1;
1165 while ((*sccstack)[first] != type)
1166 --first;
1167 size = sccstack->length () - first + 1;
1168 pairs = XALLOCAVEC (struct type_hash_pair, size);
1169 i = 0;
1170 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1171 cstate->on_sccstack = false;
1172 pairs[i].type = x;
1173 pairs[i].hash = cstate->u.hash;
1174 do
1175 {
1176 x = sccstack->pop ();
1177 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1178 cstate->on_sccstack = false;
1179 ++i;
1180 pairs[i].type = x;
1181 pairs[i].hash = cstate->u.hash;
1182 }
1183 while (x != type);
1184 gcc_assert (i + 1 == size);
1185 /* Sort the arrays of type, hash pairs so that when we mix in
1186 all members of the SCC the hash value becomes independent on
1187 the order we visited the SCC. Disregard hashes equal to
1188 the hash of the type we mix into because we cannot guarantee
1189 a stable sort for those across different TUs. */
1190 qsort (pairs, size, sizeof (struct type_hash_pair),
1191 type_hash_pair_compare);
1192 for (i = 0; i < size; ++i)
1193 {
1194 hashval_t hash;
1195 m = ggc_alloc_cleared_tree_int_map ();
1196 m->base.from = pairs[i].type;
1197 hash = pairs[i].hash;
1198 /* Skip same hashes. */
1199 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1200 ;
1201 for (; j < size; ++j)
1202 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1203 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1204 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1205 m->to = hash;
1206 if (pairs[i].type == type)
1207 v = hash;
1208 slot = htab_find_slot (type_hash_cache, m, INSERT);
1209 gcc_assert (!*slot);
1210 *slot = (void *) m;
1211 }
1212 }
1213 }
1214
1215 return iterative_hash_hashval_t (v, val);
1216 }
1217
1218 /* Returns a hash value for P (assumed to be a type). The hash value
1219 is computed using some distinguishing features of the type. Note
1220 that we cannot use pointer hashing here as we may be dealing with
1221 two distinct instances of the same type.
1222
1223 This function should produce the same hash value for two compatible
1224 types according to gimple_types_compatible_p. */
1225
1226 static hashval_t
1227 gimple_type_hash (const void *p)
1228 {
1229 const_tree t = (const_tree) p;
1230 vec<tree> sccstack = vNULL;
1231 struct pointer_map_t *sccstate;
1232 struct obstack sccstate_obstack;
1233 hashval_t val;
1234 void **slot;
1235 struct tree_int_map m;
1236
1237 m.base.from = CONST_CAST_TREE (t);
1238 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1239 && *slot)
1240 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1241
1242 /* Perform a DFS walk and pre-hash all reachable types. */
1243 next_dfs_num = 1;
1244 sccstate = pointer_map_create ();
1245 gcc_obstack_init (&sccstate_obstack);
1246 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1247 &sccstack, sccstate, &sccstate_obstack);
1248 sccstack.release ();
1249 pointer_map_destroy (sccstate);
1250 obstack_free (&sccstate_obstack, NULL);
1251
1252 return val;
1253 }
1254
1255 /* Returns nonzero if P1 and P2 are equal. */
1256
1257 static int
1258 gimple_type_eq (const void *p1, const void *p2)
1259 {
1260 const_tree t1 = (const_tree) p1;
1261 const_tree t2 = (const_tree) p2;
1262 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1263 CONST_CAST_TREE (t2));
1264 }
1265
1266 /* Register type T in the global type table gimple_types. */
1267
1268 static tree
1269 gimple_register_type (tree t)
1270 {
1271 void **slot;
1272
1273 /* See if we already have an equivalent type registered. */
1274 slot = htab_find_slot (gimple_types, t, INSERT);
1275 if (*slot
1276 && *(tree *)slot != t)
1277 return (tree) *((tree *) slot);
1278
1279 /* If not, insert it to the cache and the hash. */
1280 *slot = (void *) t;
1281 return t;
1282 }
1283
1284 /* End of old merging code. */
1285
1286
1287
1288 /* A hashtable of trees that potentially refer to variables or functions
1289 that must be replaced with their prevailing variant. */
1290 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node))) htab_t
1291 tree_with_vars;
1292
1293 /* Remember that T is a tree that (potentially) refers to a variable
1294 or function decl that may be replaced with its prevailing variant. */
1295 static void
1296 remember_with_vars (tree t)
1297 {
1298 *(tree *) htab_find_slot (tree_with_vars, t, INSERT) = t;
1299 }
1300
1301 #define MAYBE_REMEMBER_WITH_VARS(tt) \
1302 do \
1303 { \
1304 if (tt) \
1305 { \
1306 if (VAR_OR_FUNCTION_DECL_P (tt) && TREE_PUBLIC (tt)) \
1307 remember_with_vars (t); \
1308 } \
1309 } while (0)
1310
1311 /* Fix up fields of a tree_typed T. */
1312
1313 static void
1314 maybe_remember_with_vars_typed (tree t)
1315 {
1316 MAYBE_REMEMBER_WITH_VARS (TREE_TYPE (t));
1317 }
1318
1319 /* Fix up fields of a tree_common T. */
1320
1321 static void
1322 maybe_remember_with_vars_common (tree t)
1323 {
1324 maybe_remember_with_vars_typed (t);
1325 MAYBE_REMEMBER_WITH_VARS (TREE_CHAIN (t));
1326 }
1327
1328 /* Fix up fields of a decl_minimal T. */
1329
1330 static void
1331 maybe_remember_with_vars_decl_minimal (tree t)
1332 {
1333 maybe_remember_with_vars_common (t);
1334 MAYBE_REMEMBER_WITH_VARS (DECL_NAME (t));
1335 MAYBE_REMEMBER_WITH_VARS (DECL_CONTEXT (t));
1336 }
1337
1338 /* Fix up fields of a decl_common T. */
1339
1340 static void
1341 maybe_remember_with_vars_decl_common (tree t)
1342 {
1343 maybe_remember_with_vars_decl_minimal (t);
1344 MAYBE_REMEMBER_WITH_VARS (DECL_SIZE (t));
1345 MAYBE_REMEMBER_WITH_VARS (DECL_SIZE_UNIT (t));
1346 MAYBE_REMEMBER_WITH_VARS (DECL_INITIAL (t));
1347 MAYBE_REMEMBER_WITH_VARS (DECL_ATTRIBUTES (t));
1348 MAYBE_REMEMBER_WITH_VARS (DECL_ABSTRACT_ORIGIN (t));
1349 }
1350
1351 /* Fix up fields of a decl_with_vis T. */
1352
1353 static void
1354 maybe_remember_with_vars_decl_with_vis (tree t)
1355 {
1356 maybe_remember_with_vars_decl_common (t);
1357
1358 /* Accessor macro has side-effects, use field-name here. */
1359 MAYBE_REMEMBER_WITH_VARS (t->decl_with_vis.assembler_name);
1360 MAYBE_REMEMBER_WITH_VARS (DECL_SECTION_NAME (t));
1361 }
1362
1363 /* Fix up fields of a decl_non_common T. */
1364
1365 static void
1366 maybe_remember_with_vars_decl_non_common (tree t)
1367 {
1368 maybe_remember_with_vars_decl_with_vis (t);
1369 MAYBE_REMEMBER_WITH_VARS (DECL_ARGUMENT_FLD (t));
1370 MAYBE_REMEMBER_WITH_VARS (DECL_RESULT_FLD (t));
1371 MAYBE_REMEMBER_WITH_VARS (DECL_VINDEX (t));
1372 }
1373
1374 /* Fix up fields of a decl_non_common T. */
1375
1376 static void
1377 maybe_remember_with_vars_function (tree t)
1378 {
1379 maybe_remember_with_vars_decl_non_common (t);
1380 MAYBE_REMEMBER_WITH_VARS (DECL_FUNCTION_PERSONALITY (t));
1381 }
1382
1383 /* Fix up fields of a field_decl T. */
1384
1385 static void
1386 maybe_remember_with_vars_field_decl (tree t)
1387 {
1388 maybe_remember_with_vars_decl_common (t);
1389 MAYBE_REMEMBER_WITH_VARS (DECL_FIELD_OFFSET (t));
1390 MAYBE_REMEMBER_WITH_VARS (DECL_BIT_FIELD_TYPE (t));
1391 MAYBE_REMEMBER_WITH_VARS (DECL_QUALIFIER (t));
1392 MAYBE_REMEMBER_WITH_VARS (DECL_FIELD_BIT_OFFSET (t));
1393 MAYBE_REMEMBER_WITH_VARS (DECL_FCONTEXT (t));
1394 }
1395
1396 /* Fix up fields of a type T. */
1397
1398 static void
1399 maybe_remember_with_vars_type (tree t)
1400 {
1401 maybe_remember_with_vars_common (t);
1402 MAYBE_REMEMBER_WITH_VARS (TYPE_CACHED_VALUES (t));
1403 MAYBE_REMEMBER_WITH_VARS (TYPE_SIZE (t));
1404 MAYBE_REMEMBER_WITH_VARS (TYPE_SIZE_UNIT (t));
1405 MAYBE_REMEMBER_WITH_VARS (TYPE_ATTRIBUTES (t));
1406 MAYBE_REMEMBER_WITH_VARS (TYPE_NAME (t));
1407
1408 /* Accessors are for derived node types only. */
1409 if (!POINTER_TYPE_P (t))
1410 MAYBE_REMEMBER_WITH_VARS (TYPE_MINVAL (t));
1411 MAYBE_REMEMBER_WITH_VARS (TYPE_MAXVAL (t));
1412
1413 /* Accessor is for derived node types only. */
1414 MAYBE_REMEMBER_WITH_VARS (t->type_non_common.binfo);
1415
1416 MAYBE_REMEMBER_WITH_VARS (TYPE_CONTEXT (t));
1417 }
1418
1419 /* Fix up fields of a BINFO T. */
1420
1421 static void
1422 maybe_remember_with_vars_binfo (tree t)
1423 {
1424 unsigned HOST_WIDE_INT i, n;
1425
1426 maybe_remember_with_vars_common (t);
1427 MAYBE_REMEMBER_WITH_VARS (BINFO_VTABLE (t));
1428 MAYBE_REMEMBER_WITH_VARS (BINFO_OFFSET (t));
1429 MAYBE_REMEMBER_WITH_VARS (BINFO_VIRTUALS (t));
1430 MAYBE_REMEMBER_WITH_VARS (BINFO_VPTR_FIELD (t));
1431 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
1432 for (i = 0; i < n; i++)
1433 MAYBE_REMEMBER_WITH_VARS (BINFO_BASE_ACCESS (t, i));
1434 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1435 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1436 n = BINFO_N_BASE_BINFOS (t);
1437 for (i = 0; i < n; i++)
1438 MAYBE_REMEMBER_WITH_VARS (BINFO_BASE_BINFO (t, i));
1439 }
1440
1441 /* Fix up fields of a CONSTRUCTOR T. */
1442
1443 static void
1444 maybe_remember_with_vars_constructor (tree t)
1445 {
1446 unsigned HOST_WIDE_INT idx;
1447 constructor_elt *ce;
1448
1449 maybe_remember_with_vars_typed (t);
1450
1451 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
1452 {
1453 MAYBE_REMEMBER_WITH_VARS (ce->index);
1454 MAYBE_REMEMBER_WITH_VARS (ce->value);
1455 }
1456 }
1457
1458 /* Fix up fields of an expression tree T. */
1459
1460 static void
1461 maybe_remember_with_vars_expr (tree t)
1462 {
1463 int i;
1464 maybe_remember_with_vars_typed (t);
1465 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1466 MAYBE_REMEMBER_WITH_VARS (TREE_OPERAND (t, i));
1467 }
1468
1469 /* Given a tree T fixup fields of T by replacing types with their merged
1470 variant and other entities by an equal entity from an earlier compilation
1471 unit, or an entity being canonical in a different way. This includes
1472 for instance integer or string constants. */
1473
1474 static void
1475 maybe_remember_with_vars (tree t)
1476 {
1477 switch (TREE_CODE (t))
1478 {
1479 case IDENTIFIER_NODE:
1480 break;
1481
1482 case TREE_LIST:
1483 MAYBE_REMEMBER_WITH_VARS (TREE_VALUE (t));
1484 MAYBE_REMEMBER_WITH_VARS (TREE_PURPOSE (t));
1485 MAYBE_REMEMBER_WITH_VARS (TREE_CHAIN (t));
1486 break;
1487
1488 case FIELD_DECL:
1489 maybe_remember_with_vars_field_decl (t);
1490 break;
1491
1492 case LABEL_DECL:
1493 case CONST_DECL:
1494 case PARM_DECL:
1495 case RESULT_DECL:
1496 case IMPORTED_DECL:
1497 maybe_remember_with_vars_decl_common (t);
1498 break;
1499
1500 case VAR_DECL:
1501 maybe_remember_with_vars_decl_with_vis (t);
1502 break;
1503
1504 case TYPE_DECL:
1505 maybe_remember_with_vars_decl_non_common (t);
1506 break;
1507
1508 case FUNCTION_DECL:
1509 maybe_remember_with_vars_function (t);
1510 break;
1511
1512 case TREE_BINFO:
1513 maybe_remember_with_vars_binfo (t);
1514 break;
1515
1516 case PLACEHOLDER_EXPR:
1517 maybe_remember_with_vars_common (t);
1518 break;
1519
1520 case BLOCK:
1521 case TRANSLATION_UNIT_DECL:
1522 case OPTIMIZATION_NODE:
1523 case TARGET_OPTION_NODE:
1524 break;
1525
1526 case CONSTRUCTOR:
1527 maybe_remember_with_vars_constructor (t);
1528 break;
1529
1530 default:
1531 if (TYPE_P (t))
1532 maybe_remember_with_vars_type (t);
1533 else if (CONSTANT_CLASS_P (t))
1534 MAYBE_REMEMBER_WITH_VARS (TREE_TYPE (t));
1535 else if (EXPR_P (t))
1536 maybe_remember_with_vars_expr (t);
1537 else
1538 remember_with_vars (t);
1539 }
1540 }
1541
1542
1543 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1544
1545 static enum ld_plugin_symbol_resolution
1546 get_resolution (struct data_in *data_in, unsigned index)
1547 {
1548 if (data_in->globals_resolution.exists ())
1549 {
1550 ld_plugin_symbol_resolution_t ret;
1551 /* We can have references to not emitted functions in
1552 DECL_FUNCTION_PERSONALITY at least. So we can and have
1553 to indeed return LDPR_UNKNOWN in some cases. */
1554 if (data_in->globals_resolution.length () <= index)
1555 return LDPR_UNKNOWN;
1556 ret = data_in->globals_resolution[index];
1557 return ret;
1558 }
1559 else
1560 /* Delay resolution finding until decl merging. */
1561 return LDPR_UNKNOWN;
1562 }
1563
1564 /* We need to record resolutions until symbol table is read. */
1565 static void
1566 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1567 enum ld_plugin_symbol_resolution resolution)
1568 {
1569 if (resolution == LDPR_UNKNOWN)
1570 return;
1571 if (!file_data->resolution_map)
1572 file_data->resolution_map = pointer_map_create ();
1573 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1574 }
1575
1576 /* Register DECL with the global symbol table and change its
1577 name if necessary to avoid name clashes for static globals across
1578 different files. */
1579
1580 static void
1581 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1582 unsigned ix)
1583 {
1584 tree context;
1585
1586 /* Variable has file scope, not local. */
1587 if (!TREE_PUBLIC (decl)
1588 && !((context = decl_function_context (decl))
1589 && auto_var_in_fn_p (decl, context)))
1590 rest_of_decl_compilation (decl, 1, 0);
1591
1592 /* If this variable has already been declared, queue the
1593 declaration for merging. */
1594 if (TREE_PUBLIC (decl))
1595 register_resolution (data_in->file_data,
1596 decl, get_resolution (data_in, ix));
1597 }
1598
1599
1600 /* Register DECL with the global symbol table and change its
1601 name if necessary to avoid name clashes for static globals across
1602 different files. DATA_IN contains descriptors and tables for the
1603 file being read. */
1604
1605 static void
1606 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1607 unsigned ix)
1608 {
1609 /* If this variable has already been declared, queue the
1610 declaration for merging. */
1611 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1612 register_resolution (data_in->file_data,
1613 decl, get_resolution (data_in, ix));
1614 }
1615
1616
1617 /* For the type T re-materialize it in the type variant list and
1618 the pointer/reference-to chains. */
1619
1620 static void
1621 lto_fixup_prevailing_type (tree t)
1622 {
1623 /* The following re-creates proper variant lists while fixing up
1624 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1625 variant list state before fixup is broken. */
1626
1627 /* If we are not our own variant leader link us into our new leaders
1628 variant list. */
1629 if (TYPE_MAIN_VARIANT (t) != t)
1630 {
1631 tree mv = TYPE_MAIN_VARIANT (t);
1632 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1633 TYPE_NEXT_VARIANT (mv) = t;
1634 }
1635
1636 /* The following reconstructs the pointer chains
1637 of the new pointed-to type if we are a main variant. We do
1638 not stream those so they are broken before fixup. */
1639 if (TREE_CODE (t) == POINTER_TYPE
1640 && TYPE_MAIN_VARIANT (t) == t)
1641 {
1642 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1643 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1644 }
1645 else if (TREE_CODE (t) == REFERENCE_TYPE
1646 && TYPE_MAIN_VARIANT (t) == t)
1647 {
1648 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1649 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1650 }
1651 }
1652
1653
1654 /* We keep prevailing tree SCCs in a hashtable with manual collision
1655 handling (in case all hashes compare the same) and keep the colliding
1656 entries in the tree_scc->next chain. */
1657
1658 struct tree_scc
1659 {
1660 tree_scc *next;
1661 /* Hash of the whole SCC. */
1662 hashval_t hash;
1663 /* Number of trees in the SCC. */
1664 unsigned len;
1665 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1666 which share the same individual tree hash). */
1667 unsigned entry_len;
1668 /* The members of the SCC.
1669 We only need to remember the first entry node candidate for prevailing
1670 SCCs (but of course have access to all entries for SCCs we are
1671 processing).
1672 ??? For prevailing SCCs we really only need hash and the first
1673 entry candidate, but that's too awkward to implement. */
1674 tree entries[1];
1675 };
1676
1677 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1678 {
1679 typedef tree_scc value_type;
1680 typedef tree_scc compare_type;
1681 static inline hashval_t hash (const value_type *);
1682 static inline bool equal (const value_type *, const compare_type *);
1683 };
1684
1685 hashval_t
1686 tree_scc_hasher::hash (const value_type *scc)
1687 {
1688 return scc->hash;
1689 }
1690
1691 bool
1692 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1693 {
1694 if (scc1->hash != scc2->hash
1695 || scc1->len != scc2->len
1696 || scc1->entry_len != scc2->entry_len)
1697 return false;
1698 return true;
1699 }
1700
1701 static hash_table <tree_scc_hasher> tree_scc_hash;
1702 static struct obstack tree_scc_hash_obstack;
1703
1704 static unsigned long num_merged_types;
1705 static unsigned long num_prevailing_types;
1706 static unsigned long num_not_merged_types;
1707 static unsigned long num_not_merged_types_in_same_scc;
1708 static unsigned long num_not_merged_types_trees;
1709 static unsigned long num_not_merged_types_in_same_scc_trees;
1710 static unsigned long num_type_scc_trees;
1711 static unsigned long total_scc_size;
1712 static unsigned long num_sccs_read;
1713 static unsigned long total_scc_size_merged;
1714 static unsigned long num_sccs_merged;
1715 static unsigned long num_scc_compares;
1716 static unsigned long num_scc_compare_collisions;
1717
1718
1719 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1720 recursing through in-SCC tree edges. Returns true if the SCCs entered
1721 through T1 and T2 are equal and fills in *MAP with the pairs of
1722 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1723
1724 static bool
1725 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1726 {
1727 enum tree_code code;
1728
1729 /* Mark already visited nodes. */
1730 TREE_ASM_WRITTEN (t2) = 1;
1731
1732 /* Push the pair onto map. */
1733 (*map)[0] = t1;
1734 (*map)[1] = t2;
1735 *map = *map + 2;
1736
1737 /* Compare value-fields. */
1738 #define compare_values(X) \
1739 do { \
1740 if (X(t1) != X(t2)) \
1741 return false; \
1742 } while (0)
1743
1744 compare_values (TREE_CODE);
1745 code = TREE_CODE (t1);
1746
1747 if (!TYPE_P (t1))
1748 {
1749 compare_values (TREE_SIDE_EFFECTS);
1750 compare_values (TREE_CONSTANT);
1751 compare_values (TREE_READONLY);
1752 compare_values (TREE_PUBLIC);
1753 }
1754 compare_values (TREE_ADDRESSABLE);
1755 compare_values (TREE_THIS_VOLATILE);
1756 if (DECL_P (t1))
1757 compare_values (DECL_UNSIGNED);
1758 else if (TYPE_P (t1))
1759 compare_values (TYPE_UNSIGNED);
1760 if (TYPE_P (t1))
1761 compare_values (TYPE_ARTIFICIAL);
1762 else
1763 compare_values (TREE_NO_WARNING);
1764 compare_values (TREE_NOTHROW);
1765 compare_values (TREE_STATIC);
1766 if (code != TREE_BINFO)
1767 compare_values (TREE_PRIVATE);
1768 compare_values (TREE_PROTECTED);
1769 compare_values (TREE_DEPRECATED);
1770 if (TYPE_P (t1))
1771 {
1772 compare_values (TYPE_SATURATING);
1773 compare_values (TYPE_ADDR_SPACE);
1774 }
1775 else if (code == SSA_NAME)
1776 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1777
1778 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1779 {
1780 compare_values (TREE_INT_CST_LOW);
1781 compare_values (TREE_INT_CST_HIGH);
1782 }
1783
1784 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1785 {
1786 /* ??? No suitable compare routine available. */
1787 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1788 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1789 if (r1.cl != r2.cl
1790 || r1.decimal != r2.decimal
1791 || r1.sign != r2.sign
1792 || r1.signalling != r2.signalling
1793 || r1.canonical != r2.canonical
1794 || r1.uexp != r2.uexp)
1795 return false;
1796 for (unsigned i = 0; i < SIGSZ; ++i)
1797 if (r1.sig[i] != r2.sig[i])
1798 return false;
1799 }
1800
1801 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1802 if (!fixed_compare (EQ_EXPR,
1803 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1804 return false;
1805
1806
1807 /* We don't want to compare locations, so there is nothing do compare
1808 for TS_DECL_MINIMAL. */
1809
1810 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1811 {
1812 compare_values (DECL_MODE);
1813 compare_values (DECL_NONLOCAL);
1814 compare_values (DECL_VIRTUAL_P);
1815 compare_values (DECL_IGNORED_P);
1816 compare_values (DECL_ABSTRACT);
1817 compare_values (DECL_ARTIFICIAL);
1818 compare_values (DECL_USER_ALIGN);
1819 compare_values (DECL_PRESERVE_P);
1820 compare_values (DECL_EXTERNAL);
1821 compare_values (DECL_GIMPLE_REG_P);
1822 compare_values (DECL_ALIGN);
1823 if (code == LABEL_DECL)
1824 {
1825 compare_values (EH_LANDING_PAD_NR);
1826 compare_values (LABEL_DECL_UID);
1827 }
1828 else if (code == FIELD_DECL)
1829 {
1830 compare_values (DECL_PACKED);
1831 compare_values (DECL_NONADDRESSABLE_P);
1832 compare_values (DECL_OFFSET_ALIGN);
1833 }
1834 else if (code == VAR_DECL)
1835 {
1836 compare_values (DECL_HAS_DEBUG_EXPR_P);
1837 compare_values (DECL_NONLOCAL_FRAME);
1838 }
1839 if (code == RESULT_DECL
1840 || code == PARM_DECL
1841 || code == VAR_DECL)
1842 {
1843 compare_values (DECL_BY_REFERENCE);
1844 if (code == VAR_DECL
1845 || code == PARM_DECL)
1846 compare_values (DECL_HAS_VALUE_EXPR_P);
1847 }
1848 }
1849
1850 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1851 compare_values (DECL_REGISTER);
1852
1853 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1854 {
1855 compare_values (DECL_COMMON);
1856 compare_values (DECL_DLLIMPORT_P);
1857 compare_values (DECL_WEAK);
1858 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1859 compare_values (DECL_COMDAT);
1860 compare_values (DECL_VISIBILITY);
1861 compare_values (DECL_VISIBILITY_SPECIFIED);
1862 if (code == VAR_DECL)
1863 {
1864 compare_values (DECL_HARD_REGISTER);
1865 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1866 compare_values (DECL_IN_CONSTANT_POOL);
1867 compare_values (DECL_TLS_MODEL);
1868 }
1869 if (VAR_OR_FUNCTION_DECL_P (t1))
1870 compare_values (DECL_INIT_PRIORITY);
1871 }
1872
1873 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1874 {
1875 compare_values (DECL_BUILT_IN_CLASS);
1876 compare_values (DECL_STATIC_CONSTRUCTOR);
1877 compare_values (DECL_STATIC_DESTRUCTOR);
1878 compare_values (DECL_UNINLINABLE);
1879 compare_values (DECL_POSSIBLY_INLINED);
1880 compare_values (DECL_IS_NOVOPS);
1881 compare_values (DECL_IS_RETURNS_TWICE);
1882 compare_values (DECL_IS_MALLOC);
1883 compare_values (DECL_IS_OPERATOR_NEW);
1884 compare_values (DECL_DECLARED_INLINE_P);
1885 compare_values (DECL_STATIC_CHAIN);
1886 compare_values (DECL_NO_INLINE_WARNING_P);
1887 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1888 compare_values (DECL_NO_LIMIT_STACK);
1889 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1890 compare_values (DECL_PURE_P);
1891 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1892 compare_values (DECL_FINAL_P);
1893 compare_values (DECL_CXX_CONSTRUCTOR_P);
1894 compare_values (DECL_CXX_DESTRUCTOR_P);
1895 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1896 compare_values (DECL_FUNCTION_CODE);
1897 if (DECL_STATIC_DESTRUCTOR (t1))
1898 compare_values (DECL_FINI_PRIORITY);
1899 }
1900
1901 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1902 {
1903 compare_values (TYPE_MODE);
1904 compare_values (TYPE_STRING_FLAG);
1905 compare_values (TYPE_NO_FORCE_BLK);
1906 compare_values (TYPE_NEEDS_CONSTRUCTING);
1907 if (RECORD_OR_UNION_TYPE_P (t1))
1908 {
1909 compare_values (TYPE_TRANSPARENT_AGGR);
1910 compare_values (TYPE_FINAL_P);
1911 }
1912 else if (code == ARRAY_TYPE)
1913 compare_values (TYPE_NONALIASED_COMPONENT);
1914 compare_values (TYPE_PACKED);
1915 compare_values (TYPE_RESTRICT);
1916 compare_values (TYPE_USER_ALIGN);
1917 compare_values (TYPE_READONLY);
1918 compare_values (TYPE_PRECISION);
1919 compare_values (TYPE_ALIGN);
1920 compare_values (TYPE_ALIAS_SET);
1921 }
1922
1923 /* We don't want to compare locations, so there is nothing do compare
1924 for TS_EXP. */
1925
1926 /* BLOCKs are function local and we don't merge anything there, so
1927 simply refuse to merge. */
1928 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1929 return false;
1930
1931 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1932 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1933 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1934 return false;
1935
1936 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1937 if (memcmp (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2),
1938 sizeof (struct cl_target_option)) != 0)
1939 return false;
1940
1941 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1942 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1943 sizeof (struct cl_optimization)) != 0)
1944 return false;
1945
1946 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1947 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1948 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1949 return false;
1950
1951 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1952 compare_values (CONSTRUCTOR_NELTS);
1953
1954 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1955 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1956 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1957 IDENTIFIER_LENGTH (t1)) != 0)
1958 return false;
1959
1960 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1961 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1962 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1963 TREE_STRING_LENGTH (t1)) != 0)
1964 return false;
1965
1966 #undef compare_values
1967
1968
1969 /* Compare pointer fields. */
1970
1971 /* Recurse. Search & Replaced from DFS_write_tree_body.
1972 Folding the early checks into the compare_tree_edges recursion
1973 macro makes debugging way quicker as you are able to break on
1974 compare_tree_sccs_1 and simply finish until a call returns false
1975 to spot the SCC members with the difference. */
1976 #define compare_tree_edges(E1, E2) \
1977 do { \
1978 tree t1_ = (E1), t2_ = (E2); \
1979 if (t1_ != t2_ \
1980 && (!t1_ || !t2_ \
1981 || !TREE_VISITED (t2_) \
1982 || (!TREE_ASM_WRITTEN (t2_) \
1983 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1984 return false; \
1985 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1986 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1987 } while (0)
1988
1989 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1990 {
1991 if (code != IDENTIFIER_NODE)
1992 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1993 }
1994
1995 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1996 {
1997 unsigned i;
1998 /* Note that the number of elements for EXPR has already been emitted
1999 in EXPR's header (see streamer_write_tree_header). */
2000 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
2001 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
2002 }
2003
2004 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
2005 {
2006 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
2007 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
2008 }
2009
2010 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
2011 {
2012 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
2013 /* ??? Global decls from different TUs have non-matching
2014 TRANSLATION_UNIT_DECLs. Only consider a small set of
2015 decls equivalent, we should not end up merging others. */
2016 if ((code == TYPE_DECL
2017 || code == NAMESPACE_DECL
2018 || code == IMPORTED_DECL
2019 || code == CONST_DECL
2020 || (VAR_OR_FUNCTION_DECL_P (t1)
2021 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
2022 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
2023 ;
2024 else
2025 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
2026 }
2027
2028 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2029 {
2030 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
2031 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
2032 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
2033 if ((code == VAR_DECL
2034 || code == PARM_DECL)
2035 && DECL_HAS_VALUE_EXPR_P (t1))
2036 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
2037 if (code == VAR_DECL
2038 && DECL_HAS_DEBUG_EXPR_P (t1))
2039 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
2040 /* LTO specific edges. */
2041 if (code != FUNCTION_DECL
2042 && code != TRANSLATION_UNIT_DECL)
2043 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
2044 }
2045
2046 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2047 {
2048 if (code == FUNCTION_DECL)
2049 {
2050 tree a1, a2;
2051 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
2052 a1 || a2;
2053 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
2054 compare_tree_edges (a1, a2);
2055 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
2056 }
2057 else if (code == TYPE_DECL)
2058 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
2059 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
2060 }
2061
2062 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2063 {
2064 /* Make sure we don't inadvertently set the assembler name. */
2065 if (DECL_ASSEMBLER_NAME_SET_P (t1))
2066 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
2067 DECL_ASSEMBLER_NAME (t2));
2068 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
2069 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
2070 }
2071
2072 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2073 {
2074 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
2075 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
2076 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
2077 DECL_BIT_FIELD_REPRESENTATIVE (t2));
2078 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
2079 DECL_FIELD_BIT_OFFSET (t2));
2080 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
2081 }
2082
2083 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2084 {
2085 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
2086 DECL_FUNCTION_PERSONALITY (t2));
2087 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
2088 DECL_FUNCTION_SPECIFIC_TARGET (t2));
2089 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
2090 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
2091 }
2092
2093 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
2094 {
2095 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
2096 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
2097 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
2098 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
2099 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
2100 reconstructed during fixup. */
2101 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
2102 during fixup. */
2103 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
2104 /* ??? Global types from different TUs have non-matching
2105 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
2106 equal. */
2107 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
2108 ;
2109 else
2110 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
2111 /* TYPE_CANONICAL is re-computed during type merging, so do not
2112 compare it here. */
2113 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
2114 }
2115
2116 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
2117 {
2118 if (code == ENUMERAL_TYPE)
2119 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
2120 else if (code == ARRAY_TYPE)
2121 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
2122 else if (RECORD_OR_UNION_TYPE_P (t1))
2123 {
2124 tree f1, f2;
2125 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
2126 f1 || f2;
2127 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2128 compare_tree_edges (f1, f2);
2129 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
2130 }
2131 else if (code == FUNCTION_TYPE
2132 || code == METHOD_TYPE)
2133 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
2134 if (!POINTER_TYPE_P (t1))
2135 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
2136 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
2137 }
2138
2139 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
2140 {
2141 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2142 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
2143 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
2144 }
2145
2146 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
2147 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
2148 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
2149
2150 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
2151 {
2152 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
2153 compare_tree_edges (TREE_OPERAND (t1, i),
2154 TREE_OPERAND (t2, i));
2155
2156 /* BLOCKs are function local and we don't merge anything there. */
2157 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
2158 return false;
2159 }
2160
2161 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
2162 {
2163 unsigned i;
2164 tree t;
2165 /* Lengths have already been compared above. */
2166 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
2167 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
2168 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
2169 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
2170 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
2171 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
2172 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
2173 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
2174 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
2175 }
2176
2177 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
2178 {
2179 unsigned i;
2180 tree index, value;
2181 /* Lengths have already been compared above. */
2182 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
2183 {
2184 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
2185 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
2186 }
2187 }
2188
2189 #undef compare_tree_edges
2190
2191 return true;
2192 }
2193
2194 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
2195 out MAP if they are equal. */
2196
2197 static bool
2198 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
2199 tree *map)
2200 {
2201 /* Assume SCC entry hashes are sorted after their cardinality. Which
2202 means we can simply take the first n-tuple of equal hashes
2203 (which is recorded as entry_len) and do n SCC entry candidate
2204 comparisons. */
2205 for (unsigned i = 0; i < pscc->entry_len; ++i)
2206 {
2207 tree *mapp = map;
2208 num_scc_compare_collisions++;
2209 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
2210 {
2211 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
2212 on the scc as all trees will be freed. */
2213 return true;
2214 }
2215 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
2216 the SCC prevails. */
2217 for (unsigned j = 0; j < scc->len; ++j)
2218 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
2219 }
2220
2221 return false;
2222 }
2223
2224 /* QSort sort function to sort a map of two pointers after the 2nd
2225 pointer. */
2226
2227 static int
2228 cmp_tree (const void *p1_, const void *p2_)
2229 {
2230 tree *p1 = (tree *)(const_cast<void *>(p1_));
2231 tree *p2 = (tree *)(const_cast<void *>(p2_));
2232 if (p1[1] == p2[1])
2233 return 0;
2234 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
2235 }
2236
2237 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
2238 hash value SCC_HASH with an already recorded SCC. Return true if
2239 that was successful, otherwise return false. */
2240
2241 static bool
2242 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
2243 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
2244 {
2245 bool unified_p = false;
2246 tree_scc *scc
2247 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
2248 scc->next = NULL;
2249 scc->hash = scc_hash;
2250 scc->len = len;
2251 scc->entry_len = scc_entry_len;
2252 for (unsigned i = 0; i < len; ++i)
2253 {
2254 tree t = streamer_tree_cache_get_tree (cache, from + i);
2255 scc->entries[i] = t;
2256 /* Do not merge SCCs with local entities inside them. Also do
2257 not merge TRANSLATION_UNIT_DECLs. */
2258 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
2259 || (VAR_OR_FUNCTION_DECL_P (t)
2260 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2261 || TREE_CODE (t) == LABEL_DECL)
2262 {
2263 /* Avoid doing any work for these cases and do not worry to
2264 record the SCCs for further merging. */
2265 return false;
2266 }
2267 }
2268
2269 /* Look for the list of candidate SCCs to compare against. */
2270 tree_scc **slot;
2271 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
2272 if (*slot)
2273 {
2274 /* Try unifying against each candidate. */
2275 num_scc_compares++;
2276
2277 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
2278 outside of the scc when following tree edges. Make sure
2279 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
2280 to track whether we visited the SCC member during the compare.
2281 We cannot use TREE_VISITED on the pscc members as the extended
2282 scc and pscc can overlap. */
2283 for (unsigned i = 0; i < scc->len; ++i)
2284 {
2285 TREE_VISITED (scc->entries[i]) = 1;
2286 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
2287 }
2288
2289 tree *map = XALLOCAVEC (tree, 2 * len);
2290 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
2291 {
2292 if (!compare_tree_sccs (pscc, scc, map))
2293 continue;
2294
2295 /* Found an equal SCC. */
2296 unified_p = true;
2297 num_scc_compare_collisions--;
2298 num_sccs_merged++;
2299 total_scc_size_merged += len;
2300
2301 #ifdef ENABLE_CHECKING
2302 for (unsigned i = 0; i < len; ++i)
2303 {
2304 tree t = map[2*i+1];
2305 enum tree_code code = TREE_CODE (t);
2306 /* IDENTIFIER_NODEs should be singletons and are merged by the
2307 streamer. The others should be singletons, too, and we
2308 should not merge them in any way. */
2309 gcc_assert (code != TRANSLATION_UNIT_DECL
2310 && code != IDENTIFIER_NODE
2311 && !streamer_handle_as_builtin_p (t));
2312 }
2313 #endif
2314
2315 /* Fixup the streamer cache with the prevailing nodes according
2316 to the tree node mapping computed by compare_tree_sccs. */
2317 if (len == 1)
2318 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
2319 else
2320 {
2321 tree *map2 = XALLOCAVEC (tree, 2 * len);
2322 for (unsigned i = 0; i < len; ++i)
2323 {
2324 map2[i*2] = (tree)(uintptr_t)(from + i);
2325 map2[i*2+1] = scc->entries[i];
2326 }
2327 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
2328 qsort (map, len, 2 * sizeof (tree), cmp_tree);
2329 for (unsigned i = 0; i < len; ++i)
2330 streamer_tree_cache_replace_tree (cache, map[2*i],
2331 (uintptr_t)map2[2*i]);
2332 }
2333
2334 /* Free the tree nodes from the read SCC. */
2335 for (unsigned i = 0; i < len; ++i)
2336 {
2337 if (TYPE_P (scc->entries[i]))
2338 num_merged_types++;
2339 ggc_free (scc->entries[i]);
2340 }
2341
2342 break;
2343 }
2344
2345 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
2346 if (!unified_p)
2347 for (unsigned i = 0; i < scc->len; ++i)
2348 TREE_VISITED (scc->entries[i]) = 0;
2349 }
2350
2351 /* If we didn't unify it to any candidate duplicate the relevant
2352 pieces to permanent storage and link it into the chain. */
2353 if (!unified_p)
2354 {
2355 tree_scc *pscc
2356 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
2357 memcpy (pscc, scc, sizeof (tree_scc));
2358 pscc->next = (*slot);
2359 *slot = pscc;
2360 }
2361 return unified_p;
2362 }
2363
2364
2365 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2366 RESOLUTIONS is the set of symbols picked by the linker (read from the
2367 resolution file when the linker plugin is being used). */
2368
2369 static void
2370 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2371 vec<ld_plugin_symbol_resolution_t> resolutions)
2372 {
2373 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2374 const int decl_offset = sizeof (struct lto_decl_header);
2375 const int main_offset = decl_offset + header->decl_state_size;
2376 const int string_offset = main_offset + header->main_size;
2377 struct lto_input_block ib_main;
2378 struct data_in *data_in;
2379 unsigned int i;
2380 const uint32_t *data_ptr, *data_end;
2381 uint32_t num_decl_states;
2382
2383 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2384 header->main_size);
2385
2386 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2387 header->string_size, resolutions);
2388
2389 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2390 internal types that should not be merged. */
2391
2392 /* Read the global declarations and types. */
2393 while (ib_main.p < ib_main.len)
2394 {
2395 tree t;
2396 unsigned from = data_in->reader_cache->nodes.length ();
2397 /* Read and uniquify SCCs as in the input stream. */
2398 enum LTO_tags tag = streamer_read_record_start (&ib_main);
2399 if (tag == LTO_tree_scc)
2400 {
2401 unsigned len_;
2402 unsigned scc_entry_len;
2403 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
2404 &scc_entry_len);
2405 unsigned len = data_in->reader_cache->nodes.length () - from;
2406 gcc_assert (len == len_);
2407
2408 total_scc_size += len;
2409 num_sccs_read++;
2410
2411 /* We have the special case of size-1 SCCs that are pre-merged
2412 by means of identifier and string sharing for example.
2413 ??? Maybe we should avoid streaming those as SCCs. */
2414 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
2415 from);
2416 if (len == 1
2417 && (TREE_CODE (first) == IDENTIFIER_NODE
2418 || TREE_CODE (first) == INTEGER_CST
2419 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
2420 || streamer_handle_as_builtin_p (first)))
2421 continue;
2422
2423 /* Try to unify the SCC with already existing ones. */
2424 if (!flag_ltrans
2425 && unify_scc (data_in->reader_cache, from,
2426 len, scc_entry_len, scc_hash))
2427 continue;
2428
2429 /* Do remaining fixup tasks for prevailing nodes. */
2430 bool seen_type = false;
2431 bool not_merged_type_same_scc = false;
2432 bool not_merged_type_not_same_scc = false;
2433 for (unsigned i = 0; i < len; ++i)
2434 {
2435 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
2436 from + i);
2437 /* For statistics, see if the old code would have merged
2438 the type. */
2439 if (TYPE_P (t)
2440 && (flag_lto_report || (flag_wpa && flag_lto_report_wpa)))
2441 {
2442 tree newt = gimple_register_type (t);
2443 if (newt != t)
2444 {
2445 num_not_merged_types++;
2446 unsigned j;
2447 /* Check if we can never merge the types because
2448 they are in the same SCC and thus the old
2449 code was broken. */
2450 for (j = 0; j < len; ++j)
2451 if (i != j
2452 && streamer_tree_cache_get_tree
2453 (data_in->reader_cache, from + j) == newt)
2454 {
2455 num_not_merged_types_in_same_scc++;
2456 not_merged_type_same_scc = true;
2457 break;
2458 }
2459 if (j == len)
2460 not_merged_type_not_same_scc = true;
2461 }
2462 }
2463 /* Reconstruct the type variant and pointer-to/reference-to
2464 chains. */
2465 if (TYPE_P (t))
2466 {
2467 seen_type = true;
2468 num_prevailing_types++;
2469 lto_fixup_prevailing_type (t);
2470 }
2471 /* Compute the canonical type of all types.
2472 ??? Should be able to assert that !TYPE_CANONICAL. */
2473 if (TYPE_P (t) && !TYPE_CANONICAL (t))
2474 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2475 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
2476 type which is also member of this SCC. */
2477 if (TREE_CODE (t) == INTEGER_CST
2478 && !TREE_OVERFLOW (t))
2479 cache_integer_cst (t);
2480 /* Register TYPE_DECLs with the debuginfo machinery. */
2481 if (!flag_wpa
2482 && TREE_CODE (t) == TYPE_DECL)
2483 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2484 if (!flag_ltrans)
2485 {
2486 /* Register variables and functions with the
2487 symbol table. */
2488 if (TREE_CODE (t) == VAR_DECL)
2489 lto_register_var_decl_in_symtab (data_in, t, from + i);
2490 else if (TREE_CODE (t) == FUNCTION_DECL
2491 && !DECL_BUILT_IN (t))
2492 lto_register_function_decl_in_symtab (data_in, t, from + i);
2493 /* Scan the tree for references to global functions or
2494 variables and record those for later fixup. */
2495 maybe_remember_with_vars (t);
2496 }
2497 }
2498 if (not_merged_type_same_scc)
2499 {
2500 num_not_merged_types_in_same_scc_trees += len;
2501 num_not_merged_types_trees += len;
2502 }
2503 else if (not_merged_type_not_same_scc)
2504 num_not_merged_types_trees += len;
2505 if (seen_type)
2506 num_type_scc_trees += len;
2507 }
2508 else
2509 {
2510 /* Pickle stray references. */
2511 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
2512 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
2513 }
2514 }
2515
2516 /* Read in lto_in_decl_state objects. */
2517 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2518 data_end =
2519 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2520 num_decl_states = *data_ptr++;
2521
2522 gcc_assert (num_decl_states > 0);
2523 decl_data->global_decl_state = lto_new_in_decl_state ();
2524 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2525 decl_data->global_decl_state);
2526
2527 /* Read in per-function decl states and enter them in hash table. */
2528 decl_data->function_decl_states =
2529 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2530
2531 for (i = 1; i < num_decl_states; i++)
2532 {
2533 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2534 void **slot;
2535
2536 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2537 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2538 gcc_assert (*slot == NULL);
2539 *slot = state;
2540 }
2541
2542 if (data_ptr != data_end)
2543 internal_error ("bytecode stream: garbage at the end of symbols section");
2544
2545 /* Set the current decl state to be the global state. */
2546 decl_data->current_decl_state = decl_data->global_decl_state;
2547
2548 lto_data_in_delete (data_in);
2549 }
2550
2551 /* Custom version of strtoll, which is not portable. */
2552
2553 static HOST_WIDEST_INT
2554 lto_parse_hex (const char *p)
2555 {
2556 HOST_WIDEST_INT ret = 0;
2557
2558 for (; *p != '\0'; ++p)
2559 {
2560 char c = *p;
2561 unsigned char part;
2562 ret <<= 4;
2563 if (c >= '0' && c <= '9')
2564 part = c - '0';
2565 else if (c >= 'a' && c <= 'f')
2566 part = c - 'a' + 10;
2567 else if (c >= 'A' && c <= 'F')
2568 part = c - 'A' + 10;
2569 else
2570 internal_error ("could not parse hex number");
2571 ret |= part;
2572 }
2573
2574 return ret;
2575 }
2576
2577 /* Read resolution for file named FILE_NAME. The resolution is read from
2578 RESOLUTION. */
2579
2580 static void
2581 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2582 {
2583 /* We require that objects in the resolution file are in the same
2584 order as the lto1 command line. */
2585 unsigned int name_len;
2586 char *obj_name;
2587 unsigned int num_symbols;
2588 unsigned int i;
2589 struct lto_file_decl_data *file_data;
2590 splay_tree_node nd = NULL;
2591
2592 if (!resolution)
2593 return;
2594
2595 name_len = strlen (file->filename);
2596 obj_name = XNEWVEC (char, name_len + 1);
2597 fscanf (resolution, " "); /* Read white space. */
2598
2599 fread (obj_name, sizeof (char), name_len, resolution);
2600 obj_name[name_len] = '\0';
2601 if (filename_cmp (obj_name, file->filename) != 0)
2602 internal_error ("unexpected file name %s in linker resolution file. "
2603 "Expected %s", obj_name, file->filename);
2604 if (file->offset != 0)
2605 {
2606 int t;
2607 char offset_p[17];
2608 HOST_WIDEST_INT offset;
2609 t = fscanf (resolution, "@0x%16s", offset_p);
2610 if (t != 1)
2611 internal_error ("could not parse file offset");
2612 offset = lto_parse_hex (offset_p);
2613 if (offset != file->offset)
2614 internal_error ("unexpected offset");
2615 }
2616
2617 free (obj_name);
2618
2619 fscanf (resolution, "%u", &num_symbols);
2620
2621 for (i = 0; i < num_symbols; i++)
2622 {
2623 int t;
2624 unsigned index;
2625 unsigned HOST_WIDE_INT id;
2626 char r_str[27];
2627 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2628 unsigned int j;
2629 unsigned int lto_resolution_str_len =
2630 sizeof (lto_resolution_str) / sizeof (char *);
2631 res_pair rp;
2632
2633 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2634 &index, &id, r_str);
2635 if (t != 3)
2636 internal_error ("invalid line in the resolution file");
2637
2638 for (j = 0; j < lto_resolution_str_len; j++)
2639 {
2640 if (strcmp (lto_resolution_str[j], r_str) == 0)
2641 {
2642 r = (enum ld_plugin_symbol_resolution) j;
2643 break;
2644 }
2645 }
2646 if (j == lto_resolution_str_len)
2647 internal_error ("invalid resolution in the resolution file");
2648
2649 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2650 {
2651 nd = lto_splay_tree_lookup (file_ids, id);
2652 if (nd == NULL)
2653 internal_error ("resolution sub id %wx not in object file", id);
2654 }
2655
2656 file_data = (struct lto_file_decl_data *)nd->value;
2657 /* The indexes are very sparse. To save memory save them in a compact
2658 format that is only unpacked later when the subfile is processed. */
2659 rp.res = r;
2660 rp.index = index;
2661 file_data->respairs.safe_push (rp);
2662 if (file_data->max_index < index)
2663 file_data->max_index = index;
2664 }
2665 }
2666
2667 /* List of file_decl_datas */
2668 struct file_data_list
2669 {
2670 struct lto_file_decl_data *first, *last;
2671 };
2672
2673 /* Is the name for a id'ed LTO section? */
2674
2675 static int
2676 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2677 {
2678 const char *s;
2679
2680 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2681 return 0;
2682 s = strrchr (name, '.');
2683 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2684 }
2685
2686 /* Create file_data of each sub file id */
2687
2688 static int
2689 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2690 struct file_data_list *list)
2691 {
2692 struct lto_section_slot s_slot, *new_slot;
2693 unsigned HOST_WIDE_INT id;
2694 splay_tree_node nd;
2695 void **hash_slot;
2696 char *new_name;
2697 struct lto_file_decl_data *file_data;
2698
2699 if (!lto_section_with_id (ls->name, &id))
2700 return 1;
2701
2702 /* Find hash table of sub module id */
2703 nd = lto_splay_tree_lookup (file_ids, id);
2704 if (nd != NULL)
2705 {
2706 file_data = (struct lto_file_decl_data *)nd->value;
2707 }
2708 else
2709 {
2710 file_data = ggc_alloc_lto_file_decl_data ();
2711 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2712 file_data->id = id;
2713 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2714 lto_splay_tree_insert (file_ids, id, file_data);
2715
2716 /* Maintain list in linker order */
2717 if (!list->first)
2718 list->first = file_data;
2719 if (list->last)
2720 list->last->next = file_data;
2721 list->last = file_data;
2722 }
2723
2724 /* Copy section into sub module hash table */
2725 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2726 s_slot.name = new_name;
2727 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2728 gcc_assert (*hash_slot == NULL);
2729
2730 new_slot = XDUP (struct lto_section_slot, ls);
2731 new_slot->name = new_name;
2732 *hash_slot = new_slot;
2733 return 1;
2734 }
2735
2736 /* Read declarations and other initializations for a FILE_DATA. */
2737
2738 static void
2739 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2740 {
2741 const char *data;
2742 size_t len;
2743 vec<ld_plugin_symbol_resolution_t>
2744 resolutions = vNULL;
2745 int i;
2746 res_pair *rp;
2747
2748 /* Create vector for fast access of resolution. We do this lazily
2749 to save memory. */
2750 resolutions.safe_grow_cleared (file_data->max_index + 1);
2751 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2752 resolutions[rp->index] = rp->res;
2753 file_data->respairs.release ();
2754
2755 file_data->renaming_hash_table = lto_create_renaming_table ();
2756 file_data->file_name = file->filename;
2757 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2758 if (data == NULL)
2759 {
2760 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2761 return;
2762 }
2763 /* Frees resolutions */
2764 lto_read_decls (file_data, data, resolutions);
2765 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2766 }
2767
2768 /* Finalize FILE_DATA in FILE and increase COUNT. */
2769
2770 static int
2771 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2772 int *count)
2773 {
2774 lto_file_finalize (file_data, file);
2775 if (cgraph_dump_file)
2776 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2777 file_data->file_name, file_data->id);
2778 (*count)++;
2779 return 0;
2780 }
2781
2782 /* Generate a TREE representation for all types and external decls
2783 entities in FILE.
2784
2785 Read all of the globals out of the file. Then read the cgraph
2786 and process the .o index into the cgraph nodes so that it can open
2787 the .o file to load the functions and ipa information. */
2788
2789 static struct lto_file_decl_data *
2790 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2791 {
2792 struct lto_file_decl_data *file_data = NULL;
2793 splay_tree file_ids;
2794 htab_t section_hash_table;
2795 struct lto_section_slot *section;
2796 struct file_data_list file_list;
2797 struct lto_section_list section_list;
2798
2799 memset (&section_list, 0, sizeof (struct lto_section_list));
2800 section_hash_table = lto_obj_build_section_table (file, &section_list);
2801
2802 /* Find all sub modules in the object and put their sections into new hash
2803 tables in a splay tree. */
2804 file_ids = lto_splay_tree_new ();
2805 memset (&file_list, 0, sizeof (struct file_data_list));
2806 for (section = section_list.first; section != NULL; section = section->next)
2807 create_subid_section_table (section, file_ids, &file_list);
2808
2809 /* Add resolutions to file ids */
2810 lto_resolution_read (file_ids, resolution_file, file);
2811
2812 /* Finalize each lto file for each submodule in the merged object */
2813 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2814 lto_create_files_from_ids (file, file_data, count);
2815
2816 splay_tree_delete (file_ids);
2817 htab_delete (section_hash_table);
2818
2819 return file_list.first;
2820 }
2821
2822 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2823 #define LTO_MMAP_IO 1
2824 #endif
2825
2826 #if LTO_MMAP_IO
2827 /* Page size of machine is used for mmap and munmap calls. */
2828 static size_t page_mask;
2829 #endif
2830
2831 /* Get the section data of length LEN from FILENAME starting at
2832 OFFSET. The data segment must be freed by the caller when the
2833 caller is finished. Returns NULL if all was not well. */
2834
2835 static char *
2836 lto_read_section_data (struct lto_file_decl_data *file_data,
2837 intptr_t offset, size_t len)
2838 {
2839 char *result;
2840 static int fd = -1;
2841 static char *fd_name;
2842 #if LTO_MMAP_IO
2843 intptr_t computed_len;
2844 intptr_t computed_offset;
2845 intptr_t diff;
2846 #endif
2847
2848 /* Keep a single-entry file-descriptor cache. The last file we
2849 touched will get closed at exit.
2850 ??? Eventually we want to add a more sophisticated larger cache
2851 or rather fix function body streaming to not stream them in
2852 practically random order. */
2853 if (fd != -1
2854 && filename_cmp (fd_name, file_data->file_name) != 0)
2855 {
2856 free (fd_name);
2857 close (fd);
2858 fd = -1;
2859 }
2860 if (fd == -1)
2861 {
2862 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2863 if (fd == -1)
2864 {
2865 fatal_error ("Cannot open %s", file_data->file_name);
2866 return NULL;
2867 }
2868 fd_name = xstrdup (file_data->file_name);
2869 }
2870
2871 #if LTO_MMAP_IO
2872 if (!page_mask)
2873 {
2874 size_t page_size = sysconf (_SC_PAGE_SIZE);
2875 page_mask = ~(page_size - 1);
2876 }
2877
2878 computed_offset = offset & page_mask;
2879 diff = offset - computed_offset;
2880 computed_len = len + diff;
2881
2882 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2883 fd, computed_offset);
2884 if (result == MAP_FAILED)
2885 {
2886 fatal_error ("Cannot map %s", file_data->file_name);
2887 return NULL;
2888 }
2889
2890 return result + diff;
2891 #else
2892 result = (char *) xmalloc (len);
2893 if (lseek (fd, offset, SEEK_SET) != offset
2894 || read (fd, result, len) != (ssize_t) len)
2895 {
2896 free (result);
2897 fatal_error ("Cannot read %s", file_data->file_name);
2898 result = NULL;
2899 }
2900 #ifdef __MINGW32__
2901 /* Native windows doesn't supports delayed unlink on opened file. So
2902 we close file here again. This produces higher I/O load, but at least
2903 it prevents to have dangling file handles preventing unlink. */
2904 free (fd_name);
2905 fd_name = NULL;
2906 close (fd);
2907 fd = -1;
2908 #endif
2909 return result;
2910 #endif
2911 }
2912
2913
2914 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2915 NAME will be NULL unless the section type is for a function
2916 body. */
2917
2918 static const char *
2919 get_section_data (struct lto_file_decl_data *file_data,
2920 enum lto_section_type section_type,
2921 const char *name,
2922 size_t *len)
2923 {
2924 htab_t section_hash_table = file_data->section_hash_table;
2925 struct lto_section_slot *f_slot;
2926 struct lto_section_slot s_slot;
2927 const char *section_name = lto_get_section_name (section_type, name, file_data);
2928 char *data = NULL;
2929
2930 *len = 0;
2931 s_slot.name = section_name;
2932 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2933 if (f_slot)
2934 {
2935 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2936 *len = f_slot->len;
2937 }
2938
2939 free (CONST_CAST (char *, section_name));
2940 return data;
2941 }
2942
2943
2944 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2945 starts at OFFSET and has LEN bytes. */
2946
2947 static void
2948 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2949 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2950 const char *name ATTRIBUTE_UNUSED,
2951 const char *offset, size_t len ATTRIBUTE_UNUSED)
2952 {
2953 #if LTO_MMAP_IO
2954 intptr_t computed_len;
2955 intptr_t computed_offset;
2956 intptr_t diff;
2957 #endif
2958
2959 #if LTO_MMAP_IO
2960 computed_offset = ((intptr_t) offset) & page_mask;
2961 diff = (intptr_t) offset - computed_offset;
2962 computed_len = len + diff;
2963
2964 munmap ((caddr_t) computed_offset, computed_len);
2965 #else
2966 free (CONST_CAST(char *, offset));
2967 #endif
2968 }
2969
2970 static lto_file *current_lto_file;
2971
2972 /* Helper for qsort; compare partitions and return one with smaller size.
2973 We sort from greatest to smallest so parallel build doesn't stale on the
2974 longest compilation being executed too late. */
2975
2976 static int
2977 cmp_partitions_size (const void *a, const void *b)
2978 {
2979 const struct ltrans_partition_def *pa
2980 = *(struct ltrans_partition_def *const *)a;
2981 const struct ltrans_partition_def *pb
2982 = *(struct ltrans_partition_def *const *)b;
2983 return pb->insns - pa->insns;
2984 }
2985
2986 /* Helper for qsort; compare partitions and return one with smaller order. */
2987
2988 static int
2989 cmp_partitions_order (const void *a, const void *b)
2990 {
2991 const struct ltrans_partition_def *pa
2992 = *(struct ltrans_partition_def *const *)a;
2993 const struct ltrans_partition_def *pb
2994 = *(struct ltrans_partition_def *const *)b;
2995 int ordera = -1, orderb = -1;
2996
2997 if (lto_symtab_encoder_size (pa->encoder))
2998 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
2999 if (lto_symtab_encoder_size (pb->encoder))
3000 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
3001 return orderb - ordera;
3002 }
3003
3004 /* Write all output files in WPA mode and the file with the list of
3005 LTRANS units. */
3006
3007 static void
3008 lto_wpa_write_files (void)
3009 {
3010 unsigned i, n_sets;
3011 lto_file *file;
3012 ltrans_partition part;
3013 FILE *ltrans_output_list_stream;
3014 char *temp_filename;
3015 size_t blen;
3016
3017 /* Open the LTRANS output list. */
3018 if (!ltrans_output_list)
3019 fatal_error ("no LTRANS output list filename provided");
3020 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
3021 if (ltrans_output_list_stream == NULL)
3022 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
3023
3024 timevar_push (TV_WHOPR_WPA);
3025
3026 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
3027 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
3028
3029 /* Find out statics that need to be promoted
3030 to globals with hidden visibility because they are accessed from multiple
3031 partitions. */
3032 lto_promote_cross_file_statics ();
3033
3034 timevar_pop (TV_WHOPR_WPA);
3035
3036 timevar_push (TV_WHOPR_WPA_IO);
3037
3038 /* Generate a prefix for the LTRANS unit files. */
3039 blen = strlen (ltrans_output_list);
3040 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
3041 strcpy (temp_filename, ltrans_output_list);
3042 if (blen > sizeof (".out")
3043 && strcmp (temp_filename + blen - sizeof (".out") + 1,
3044 ".out") == 0)
3045 temp_filename[blen - sizeof (".out") + 1] = '\0';
3046 blen = strlen (temp_filename);
3047
3048 n_sets = ltrans_partitions.length ();
3049
3050 /* Sort partitions by size so small ones are compiled last.
3051 FIXME: Even when not reordering we may want to output one list for parallel make
3052 and other for final link command. */
3053 ltrans_partitions.qsort (flag_toplevel_reorder
3054 ? cmp_partitions_size
3055 : cmp_partitions_order);
3056 for (i = 0; i < n_sets; i++)
3057 {
3058 size_t len;
3059 ltrans_partition part = ltrans_partitions[i];
3060
3061 /* Write all the nodes in SET. */
3062 sprintf (temp_filename + blen, "%u.o", i);
3063 file = lto_obj_file_open (temp_filename, true);
3064 if (!file)
3065 fatal_error ("lto_obj_file_open() failed");
3066
3067 if (!quiet_flag)
3068 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
3069 if (cgraph_dump_file)
3070 {
3071 lto_symtab_encoder_iterator lsei;
3072
3073 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
3074 part->name, temp_filename, part->insns);
3075 fprintf (cgraph_dump_file, " Symbols in partition: ");
3076 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
3077 lsei_next_in_partition (&lsei))
3078 {
3079 symtab_node node = lsei_node (lsei);
3080 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3081 }
3082 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
3083 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
3084 lsei_next (&lsei))
3085 {
3086 symtab_node node = lsei_node (lsei);
3087 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
3088 {
3089 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3090 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
3091 if (cnode
3092 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
3093 fprintf (cgraph_dump_file, "(body included)");
3094 else
3095 {
3096 varpool_node *vnode = dyn_cast <varpool_node> (node);
3097 if (vnode
3098 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
3099 fprintf (cgraph_dump_file, "(initializer included)");
3100 }
3101 }
3102 }
3103 fprintf (cgraph_dump_file, "\n");
3104 }
3105 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
3106
3107 lto_set_current_out_file (file);
3108
3109 ipa_write_optimization_summaries (part->encoder);
3110
3111 lto_set_current_out_file (NULL);
3112 lto_obj_file_close (file);
3113 free (file);
3114 part->encoder = NULL;
3115
3116 len = strlen (temp_filename);
3117 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
3118 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
3119 fatal_error ("writing to LTRANS output list %s: %m",
3120 ltrans_output_list);
3121 }
3122
3123 lto_stats.num_output_files += n_sets;
3124
3125 /* Close the LTRANS output list. */
3126 if (fclose (ltrans_output_list_stream))
3127 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
3128
3129 free_ltrans_partitions();
3130 free (temp_filename);
3131
3132 timevar_pop (TV_WHOPR_WPA_IO);
3133 }
3134
3135
3136 /* If TT is a variable or function decl replace it with its
3137 prevailing variant. */
3138 #define LTO_SET_PREVAIL(tt) \
3139 do {\
3140 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt)) \
3141 tt = lto_symtab_prevailing_decl (tt); \
3142 } while (0)
3143
3144 /* Ensure that TT isn't a replacable var of function decl. */
3145 #define LTO_NO_PREVAIL(tt) \
3146 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
3147
3148 /* Given a tree T replace all fields referring to variables or functions
3149 with their prevailing variant. */
3150 static void
3151 lto_fixup_prevailing_decls (tree t)
3152 {
3153 enum tree_code code = TREE_CODE (t);
3154 LTO_NO_PREVAIL (TREE_TYPE (t));
3155 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
3156 LTO_NO_PREVAIL (TREE_CHAIN (t));
3157 if (DECL_P (t))
3158 {
3159 LTO_NO_PREVAIL (DECL_NAME (t));
3160 LTO_SET_PREVAIL (DECL_CONTEXT (t));
3161 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
3162 {
3163 LTO_SET_PREVAIL (DECL_SIZE (t));
3164 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
3165 LTO_SET_PREVAIL (DECL_INITIAL (t));
3166 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
3167 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
3168 }
3169 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
3170 {
3171 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
3172 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
3173 }
3174 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
3175 {
3176 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
3177 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
3178 LTO_NO_PREVAIL (DECL_VINDEX (t));
3179 }
3180 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
3181 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
3182 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
3183 {
3184 LTO_NO_PREVAIL (DECL_FIELD_OFFSET (t));
3185 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
3186 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
3187 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
3188 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
3189 }
3190 }
3191 else if (TYPE_P (t))
3192 {
3193 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
3194 LTO_SET_PREVAIL (TYPE_SIZE (t));
3195 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
3196 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
3197 LTO_NO_PREVAIL (TYPE_NAME (t));
3198
3199 LTO_SET_PREVAIL (TYPE_MINVAL (t));
3200 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
3201 LTO_SET_PREVAIL (t->type_non_common.binfo);
3202
3203 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
3204
3205 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
3206 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
3207 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
3208 }
3209 else if (EXPR_P (t))
3210 {
3211 int i;
3212 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3213 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
3214 }
3215 else
3216 {
3217 switch (code)
3218 {
3219 case TREE_LIST:
3220 LTO_SET_PREVAIL (TREE_VALUE (t));
3221 LTO_SET_PREVAIL (TREE_PURPOSE (t));
3222 break;
3223 default:
3224 gcc_unreachable ();
3225 }
3226 }
3227 }
3228 #undef LTO_SET_PREVAIL
3229 #undef LTO_NO_PREVAIL
3230
3231 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
3232 replaces var and function decls with the corresponding prevailing def. */
3233
3234 static void
3235 lto_fixup_state (struct lto_in_decl_state *state)
3236 {
3237 unsigned i, si;
3238 struct lto_tree_ref_table *table;
3239
3240 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
3241 we still need to walk from all DECLs to find the reachable
3242 FUNCTION_DECLs and VAR_DECLs. */
3243 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
3244 {
3245 table = &state->streams[si];
3246 for (i = 0; i < table->size; i++)
3247 {
3248 tree *tp = table->trees + i;
3249 if (VAR_OR_FUNCTION_DECL_P (*tp))
3250 *tp = lto_symtab_prevailing_decl (*tp);
3251 }
3252 }
3253 }
3254
3255 /* A callback of htab_traverse. Just extracts a state from SLOT
3256 and calls lto_fixup_state. */
3257
3258 static int
3259 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
3260 {
3261 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
3262 lto_fixup_state (state);
3263 return 1;
3264 }
3265
3266 /* Fix the decls from all FILES. Replaces each decl with the corresponding
3267 prevailing one. */
3268
3269 static void
3270 lto_fixup_decls (struct lto_file_decl_data **files)
3271 {
3272 unsigned int i;
3273 htab_iterator hi;
3274 tree t;
3275
3276 FOR_EACH_HTAB_ELEMENT (tree_with_vars, t, tree, hi)
3277 lto_fixup_prevailing_decls (t);
3278
3279 for (i = 0; files[i]; i++)
3280 {
3281 struct lto_file_decl_data *file = files[i];
3282 struct lto_in_decl_state *state = file->global_decl_state;
3283 lto_fixup_state (state);
3284
3285 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
3286 }
3287 }
3288
3289 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
3290
3291 /* Turn file datas for sub files into a single array, so that they look
3292 like separate files for further passes. */
3293
3294 static void
3295 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
3296 {
3297 struct lto_file_decl_data *n, *next;
3298 int i, k;
3299
3300 lto_stats.num_input_files = count;
3301 all_file_decl_data
3302 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
3303 /* Set the hooks so that all of the ipa passes can read in their data. */
3304 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3305 for (i = 0, k = 0; i < last_file_ix; i++)
3306 {
3307 for (n = orig[i]; n != NULL; n = next)
3308 {
3309 all_file_decl_data[k++] = n;
3310 next = n->next;
3311 n->next = NULL;
3312 }
3313 }
3314 all_file_decl_data[k] = NULL;
3315 gcc_assert (k == count);
3316 }
3317
3318 /* Input file data before flattening (i.e. splitting them to subfiles to support
3319 incremental linking. */
3320 static int real_file_count;
3321 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
3322
3323 static void print_lto_report_1 (void);
3324
3325 /* Read all the symbols from the input files FNAMES. NFILES is the
3326 number of files requested in the command line. Instantiate a
3327 global call graph by aggregating all the sub-graphs found in each
3328 file. */
3329
3330 static void
3331 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
3332 {
3333 unsigned int i, last_file_ix;
3334 FILE *resolution;
3335 int count = 0;
3336 struct lto_file_decl_data **decl_data;
3337 void **res;
3338 symtab_node snode;
3339
3340 init_cgraph ();
3341
3342 timevar_push (TV_IPA_LTO_DECL_IN);
3343
3344 real_file_decl_data
3345 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
3346 real_file_count = nfiles;
3347
3348 /* Read the resolution file. */
3349 resolution = NULL;
3350 if (resolution_file_name)
3351 {
3352 int t;
3353 unsigned num_objects;
3354
3355 resolution = fopen (resolution_file_name, "r");
3356 if (resolution == NULL)
3357 fatal_error ("could not open symbol resolution file: %m");
3358
3359 t = fscanf (resolution, "%u", &num_objects);
3360 gcc_assert (t == 1);
3361
3362 /* True, since the plugin splits the archives. */
3363 gcc_assert (num_objects == nfiles);
3364 }
3365 cgraph_state = CGRAPH_LTO_STREAMING;
3366
3367 tree_with_vars = htab_create_ggc (101, htab_hash_pointer, htab_eq_pointer,
3368 NULL);
3369 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3370 tree_int_map_eq, NULL);
3371 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3372 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
3373 gcc_obstack_init (&tree_scc_hash_obstack);
3374 tree_scc_hash.create (4096);
3375
3376 if (!quiet_flag)
3377 fprintf (stderr, "Reading object files:");
3378
3379 /* Read all of the object files specified on the command line. */
3380 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
3381 {
3382 struct lto_file_decl_data *file_data = NULL;
3383 if (!quiet_flag)
3384 {
3385 fprintf (stderr, " %s", fnames[i]);
3386 fflush (stderr);
3387 }
3388
3389 current_lto_file = lto_obj_file_open (fnames[i], false);
3390 if (!current_lto_file)
3391 break;
3392
3393 file_data = lto_file_read (current_lto_file, resolution, &count);
3394 if (!file_data)
3395 {
3396 lto_obj_file_close (current_lto_file);
3397 free (current_lto_file);
3398 current_lto_file = NULL;
3399 break;
3400 }
3401
3402 decl_data[last_file_ix++] = file_data;
3403
3404 lto_obj_file_close (current_lto_file);
3405 free (current_lto_file);
3406 current_lto_file = NULL;
3407 }
3408
3409 lto_flatten_files (decl_data, count, last_file_ix);
3410 lto_stats.num_input_files = count;
3411 ggc_free(decl_data);
3412 real_file_decl_data = NULL;
3413
3414 if (resolution_file_name)
3415 fclose (resolution);
3416
3417 /* Show the LTO report before launching LTRANS. */
3418 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3419 print_lto_report_1 ();
3420
3421 /* Free gimple type merging datastructures. */
3422 htab_delete (gimple_types);
3423 gimple_types = NULL;
3424 htab_delete (type_hash_cache);
3425 type_hash_cache = NULL;
3426 free (type_pair_cache);
3427 type_pair_cache = NULL;
3428 tree_scc_hash.dispose ();
3429 obstack_free (&tree_scc_hash_obstack, NULL);
3430 free_gimple_type_tables ();
3431 ggc_collect ();
3432
3433 /* Set the hooks so that all of the ipa passes can read in their data. */
3434 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3435
3436 timevar_pop (TV_IPA_LTO_DECL_IN);
3437
3438 if (!quiet_flag)
3439 fprintf (stderr, "\nReading the callgraph\n");
3440
3441 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3442 /* Read the symtab. */
3443 input_symtab ();
3444
3445 /* Store resolutions into the symbol table. */
3446
3447 FOR_EACH_SYMBOL (snode)
3448 if (symtab_real_symbol_p (snode)
3449 && snode->symbol.lto_file_data
3450 && snode->symbol.lto_file_data->resolution_map
3451 && (res = pointer_map_contains (snode->symbol.lto_file_data->resolution_map,
3452 snode->symbol.decl)))
3453 snode->symbol.resolution
3454 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3455 for (i = 0; all_file_decl_data[i]; i++)
3456 if (all_file_decl_data[i]->resolution_map)
3457 {
3458 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3459 all_file_decl_data[i]->resolution_map = NULL;
3460 }
3461
3462 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3463
3464 if (!quiet_flag)
3465 fprintf (stderr, "Merging declarations\n");
3466
3467 timevar_push (TV_IPA_LTO_DECL_MERGE);
3468 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3469 need to care about resolving symbols again, we only need to replace
3470 duplicated declarations read from the callgraph and from function
3471 sections. */
3472 if (!flag_ltrans)
3473 {
3474 lto_symtab_merge_decls ();
3475
3476 /* If there were errors during symbol merging bail out, we have no
3477 good way to recover here. */
3478 if (seen_error ())
3479 fatal_error ("errors during merging of translation units");
3480
3481 /* Fixup all decls. */
3482 lto_fixup_decls (all_file_decl_data);
3483 }
3484 htab_delete (tree_with_vars);
3485 tree_with_vars = NULL;
3486 ggc_collect ();
3487
3488 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3489 /* Each pass will set the appropriate timer. */
3490
3491 if (!quiet_flag)
3492 fprintf (stderr, "Reading summaries\n");
3493
3494 /* Read the IPA summary data. */
3495 if (flag_ltrans)
3496 ipa_read_optimization_summaries ();
3497 else
3498 ipa_read_summaries ();
3499
3500 for (i = 0; all_file_decl_data[i]; i++)
3501 {
3502 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3503 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3504 all_file_decl_data[i]->symtab_node_encoder = NULL;
3505 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3506 all_file_decl_data[i]->global_decl_state = NULL;
3507 all_file_decl_data[i]->current_decl_state = NULL;
3508 }
3509
3510 /* Finally merge the cgraph according to the decl merging decisions. */
3511 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3512 if (cgraph_dump_file)
3513 {
3514 fprintf (cgraph_dump_file, "Before merging:\n");
3515 dump_symtab (cgraph_dump_file);
3516 }
3517 lto_symtab_merge_symbols ();
3518 ggc_collect ();
3519 cgraph_state = CGRAPH_STATE_IPA_SSA;
3520
3521 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3522
3523 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3524
3525 /* Indicate that the cgraph is built and ready. */
3526 cgraph_function_flags_ready = true;
3527
3528 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3529 ggc_free (all_file_decl_data);
3530 all_file_decl_data = NULL;
3531 }
3532
3533
3534 /* Materialize all the bodies for all the nodes in the callgraph. */
3535
3536 static void
3537 materialize_cgraph (void)
3538 {
3539 tree decl;
3540 struct cgraph_node *node;
3541 unsigned i;
3542 timevar_id_t lto_timer;
3543
3544 if (!quiet_flag)
3545 fprintf (stderr,
3546 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3547
3548 /* Now that we have input the cgraph, we need to clear all of the aux
3549 nodes and read the functions if we are not running in WPA mode. */
3550 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3551
3552 FOR_EACH_FUNCTION (node)
3553 {
3554 if (node->symbol.lto_file_data)
3555 {
3556 lto_materialize_function (node);
3557 lto_stats.num_input_cgraph_nodes++;
3558 }
3559 }
3560
3561 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3562
3563 /* Start the appropriate timer depending on the mode that we are
3564 operating in. */
3565 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3566 : (flag_ltrans) ? TV_WHOPR_LTRANS
3567 : TV_LTO;
3568 timevar_push (lto_timer);
3569
3570 current_function_decl = NULL;
3571 set_cfun (NULL);
3572
3573 /* Inform the middle end about the global variables we have seen. */
3574 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
3575 rest_of_decl_compilation (decl, 1, 0);
3576
3577 if (!quiet_flag)
3578 fprintf (stderr, "\n");
3579
3580 timevar_pop (lto_timer);
3581 }
3582
3583
3584 /* Show various memory usage statistics related to LTO. */
3585 static void
3586 print_lto_report_1 (void)
3587 {
3588 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3589 fprintf (stderr, "%s statistics\n", pfx);
3590
3591 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3592 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3593 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3594 if (flag_wpa && tree_scc_hash.is_created ())
3595 {
3596 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3597 "collision ratio: %f\n", pfx,
3598 (long) tree_scc_hash.size (),
3599 (long) tree_scc_hash.elements (),
3600 tree_scc_hash.collisions ());
3601 hash_table<tree_scc_hasher>::iterator hiter;
3602 tree_scc *scc, *max_scc = NULL;
3603 unsigned max_length = 0;
3604 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3605 {
3606 unsigned length = 0;
3607 tree_scc *s = scc;
3608 for (; s; s = s->next)
3609 length++;
3610 if (length > max_length)
3611 {
3612 max_length = length;
3613 max_scc = scc;
3614 }
3615 }
3616 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3617 pfx, max_length, max_scc->len);
3618 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3619 num_scc_compares, num_scc_compare_collisions,
3620 num_scc_compare_collisions / (double) num_scc_compares);
3621 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3622 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3623 total_scc_size_merged);
3624 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3625 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3626 pfx, num_prevailing_types, num_type_scc_trees);
3627 fprintf (stderr, "[%s] Old merging code merges an additional %lu types"
3628 " of which %lu are in the same SCC with their "
3629 "prevailing variant (%lu and %lu associated trees)\n",
3630 pfx, num_not_merged_types, num_not_merged_types_in_same_scc,
3631 num_not_merged_types_trees,
3632 num_not_merged_types_in_same_scc_trees);
3633 }
3634
3635 print_gimple_types_stats (pfx);
3636 print_lto_report (pfx);
3637 }
3638
3639 /* Perform whole program analysis (WPA) on the callgraph and write out the
3640 optimization plan. */
3641
3642 static void
3643 do_whole_program_analysis (void)
3644 {
3645 symtab_node node;
3646
3647 timevar_start (TV_PHASE_OPT_GEN);
3648
3649 /* Note that since we are in WPA mode, materialize_cgraph will not
3650 actually read in all the function bodies. It only materializes
3651 the decls and cgraph nodes so that analysis can be performed. */
3652 materialize_cgraph ();
3653
3654 /* Reading in the cgraph uses different timers, start timing WPA now. */
3655 timevar_push (TV_WHOPR_WPA);
3656
3657 if (pre_ipa_mem_report)
3658 {
3659 fprintf (stderr, "Memory consumption before IPA\n");
3660 dump_memory_report (false);
3661 }
3662
3663 cgraph_function_flags_ready = true;
3664
3665 if (cgraph_dump_file)
3666 dump_symtab (cgraph_dump_file);
3667 bitmap_obstack_initialize (NULL);
3668 cgraph_state = CGRAPH_STATE_IPA_SSA;
3669
3670 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3671 symtab_remove_unreachable_nodes (false, dump_file);
3672
3673 if (cgraph_dump_file)
3674 {
3675 fprintf (cgraph_dump_file, "Optimized ");
3676 dump_symtab (cgraph_dump_file);
3677 }
3678 #ifdef ENABLE_CHECKING
3679 verify_cgraph ();
3680 #endif
3681 bitmap_obstack_release (NULL);
3682
3683 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3684 timevar_pop (TV_WHOPR_WPA);
3685
3686 timevar_push (TV_WHOPR_PARTITIONING);
3687 if (flag_lto_partition_1to1)
3688 lto_1_to_1_map ();
3689 else if (flag_lto_partition_max)
3690 lto_max_map ();
3691 else
3692 lto_balanced_map ();
3693
3694 /* AUX pointers are used by partitioning code to bookkeep number of
3695 partitions symbol is in. This is no longer needed. */
3696 FOR_EACH_SYMBOL (node)
3697 node->symbol.aux = NULL;
3698
3699 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3700 timevar_pop (TV_WHOPR_PARTITIONING);
3701
3702 timevar_stop (TV_PHASE_OPT_GEN);
3703 timevar_start (TV_PHASE_STREAM_OUT);
3704
3705 if (!quiet_flag)
3706 {
3707 fprintf (stderr, "\nStreaming out");
3708 fflush (stderr);
3709 }
3710 lto_wpa_write_files ();
3711 if (!quiet_flag)
3712 fprintf (stderr, "\n");
3713
3714 timevar_stop (TV_PHASE_STREAM_OUT);
3715
3716 ggc_collect ();
3717 if (post_ipa_mem_report)
3718 {
3719 fprintf (stderr, "Memory consumption after IPA\n");
3720 dump_memory_report (false);
3721 }
3722
3723 /* Show the LTO report before launching LTRANS. */
3724 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3725 print_lto_report_1 ();
3726 if (mem_report_wpa)
3727 dump_memory_report (true);
3728 }
3729
3730
3731 static GTY(()) tree lto_eh_personality_decl;
3732
3733 /* Return the LTO personality function decl. */
3734
3735 tree
3736 lto_eh_personality (void)
3737 {
3738 if (!lto_eh_personality_decl)
3739 {
3740 /* Use the first personality DECL for our personality if we don't
3741 support multiple ones. This ensures that we don't artificially
3742 create the need for them in a single-language program. */
3743 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3744 lto_eh_personality_decl = first_personality_decl;
3745 else
3746 lto_eh_personality_decl = lhd_gcc_personality ();
3747 }
3748
3749 return lto_eh_personality_decl;
3750 }
3751
3752 /* Set the process name based on the LTO mode. */
3753
3754 static void
3755 lto_process_name (void)
3756 {
3757 if (flag_lto)
3758 setproctitle ("lto1-lto");
3759 if (flag_wpa)
3760 setproctitle ("lto1-wpa");
3761 if (flag_ltrans)
3762 setproctitle ("lto1-ltrans");
3763 }
3764
3765
3766 /* Initialize the LTO front end. */
3767
3768 static void
3769 lto_init (void)
3770 {
3771 lto_process_name ();
3772 lto_streamer_hooks_init ();
3773 lto_reader_init ();
3774 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3775 memset (&lto_stats, 0, sizeof (lto_stats));
3776 bitmap_obstack_initialize (NULL);
3777 gimple_register_cfg_hooks ();
3778 }
3779
3780
3781 /* Main entry point for the GIMPLE front end. This front end has
3782 three main personalities:
3783
3784 - LTO (-flto). All the object files on the command line are
3785 loaded in memory and processed as a single translation unit.
3786 This is the traditional link-time optimization behavior.
3787
3788 - WPA (-fwpa). Only the callgraph and summary information for
3789 files in the command file are loaded. A single callgraph
3790 (without function bodies) is instantiated for the whole set of
3791 files. IPA passes are only allowed to analyze the call graph
3792 and make transformation decisions. The callgraph is
3793 partitioned, each partition is written to a new object file
3794 together with the transformation decisions.
3795
3796 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3797 summary files from running again. Since WPA computed summary
3798 information and decided what transformations to apply, LTRANS
3799 simply applies them. */
3800
3801 void
3802 lto_main (void)
3803 {
3804 /* LTO is called as a front end, even though it is not a front end.
3805 Because it is called as a front end, TV_PHASE_PARSING and
3806 TV_PARSE_GLOBAL are active, and we need to turn them off while
3807 doing LTO. Later we turn them back on so they are active up in
3808 toplev.c. */
3809 timevar_pop (TV_PARSE_GLOBAL);
3810 timevar_stop (TV_PHASE_PARSING);
3811
3812 timevar_start (TV_PHASE_SETUP);
3813
3814 /* Initialize the LTO front end. */
3815 lto_init ();
3816
3817 timevar_stop (TV_PHASE_SETUP);
3818 timevar_start (TV_PHASE_STREAM_IN);
3819
3820 /* Read all the symbols and call graph from all the files in the
3821 command line. */
3822 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3823
3824 timevar_stop (TV_PHASE_STREAM_IN);
3825
3826 if (!seen_error ())
3827 {
3828 /* If WPA is enabled analyze the whole call graph and create an
3829 optimization plan. Otherwise, read in all the function
3830 bodies and continue with optimization. */
3831 if (flag_wpa)
3832 do_whole_program_analysis ();
3833 else
3834 {
3835 struct varpool_node *vnode;
3836
3837 timevar_start (TV_PHASE_OPT_GEN);
3838
3839 materialize_cgraph ();
3840 if (!flag_ltrans)
3841 lto_promote_statics_nonwpa ();
3842
3843 /* Let the middle end know that we have read and merged all of
3844 the input files. */
3845 compile ();
3846
3847 timevar_stop (TV_PHASE_OPT_GEN);
3848
3849 /* FIXME lto, if the processes spawned by WPA fail, we miss
3850 the chance to print WPA's report, so WPA will call
3851 print_lto_report before launching LTRANS. If LTRANS was
3852 launched directly by the driver we would not need to do
3853 this. */
3854 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3855 print_lto_report_1 ();
3856
3857 /* Record the global variables. */
3858 FOR_EACH_DEFINED_VARIABLE (vnode)
3859 vec_safe_push (lto_global_var_decls, vnode->symbol.decl);
3860 }
3861 }
3862
3863 /* Here we make LTO pretend to be a parser. */
3864 timevar_start (TV_PHASE_PARSING);
3865 timevar_push (TV_PARSE_GLOBAL);
3866 }
3867
3868 #include "gt-lto-lto.h"