lto-streamer.h (lto_global_var_decls): Remove.
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2013 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "ggc.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 #include "vec.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "ipa-prop.h"
39 #include "common.h"
40 #include "debug.h"
41 #include "gimple.h"
42 #include "lto.h"
43 #include "lto-tree.h"
44 #include "lto-streamer.h"
45 #include "tree-streamer.h"
46 #include "splay-tree.h"
47 #include "lto-partition.h"
48 #include "data-streamer.h"
49 #include "context.h"
50 #include "pass_manager.h"
51
52 /* Vector to keep track of external variables we've seen so far. */
53 vec<tree, va_gc> *lto_global_var_decls;
54
55 static GTY(()) tree first_personality_decl;
56
57 /* Returns a hash code for P. */
58
59 static hashval_t
60 hash_name (const void *p)
61 {
62 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
63 return (hashval_t) htab_hash_string (ds->name);
64 }
65
66
67 /* Returns nonzero if P1 and P2 are equal. */
68
69 static int
70 eq_name (const void *p1, const void *p2)
71 {
72 const struct lto_section_slot *s1 =
73 (const struct lto_section_slot *) p1;
74 const struct lto_section_slot *s2 =
75 (const struct lto_section_slot *) p2;
76
77 return strcmp (s1->name, s2->name) == 0;
78 }
79
80 /* Free lto_section_slot */
81
82 static void
83 free_with_string (void *arg)
84 {
85 struct lto_section_slot *s = (struct lto_section_slot *)arg;
86
87 free (CONST_CAST (char *, s->name));
88 free (arg);
89 }
90
91 /* Create section hash table */
92
93 htab_t
94 lto_obj_create_section_hash_table (void)
95 {
96 return htab_create (37, hash_name, eq_name, free_with_string);
97 }
98
99 /* Delete an allocated integer KEY in the splay tree. */
100
101 static void
102 lto_splay_tree_delete_id (splay_tree_key key)
103 {
104 free ((void *) key);
105 }
106
107 /* Compare splay tree node ids A and B. */
108
109 static int
110 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
111 {
112 unsigned HOST_WIDE_INT ai;
113 unsigned HOST_WIDE_INT bi;
114
115 ai = *(unsigned HOST_WIDE_INT *) a;
116 bi = *(unsigned HOST_WIDE_INT *) b;
117
118 if (ai < bi)
119 return -1;
120 else if (ai > bi)
121 return 1;
122 return 0;
123 }
124
125 /* Look up splay tree node by ID in splay tree T. */
126
127 static splay_tree_node
128 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
129 {
130 return splay_tree_lookup (t, (splay_tree_key) &id);
131 }
132
133 /* Check if KEY has ID. */
134
135 static bool
136 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
137 {
138 return *(unsigned HOST_WIDE_INT *) key == id;
139 }
140
141 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
142 The ID is allocated separately because we need HOST_WIDE_INTs which may
143 be wider than a splay_tree_key. */
144
145 static void
146 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
147 struct lto_file_decl_data *file_data)
148 {
149 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
150 *idp = id;
151 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
152 }
153
154 /* Create a splay tree. */
155
156 static splay_tree
157 lto_splay_tree_new (void)
158 {
159 return splay_tree_new (lto_splay_tree_compare_ids,
160 lto_splay_tree_delete_id,
161 NULL);
162 }
163
164 /* Return true when NODE has a clone that is analyzed (i.e. we need
165 to load its body even if the node itself is not needed). */
166
167 static bool
168 has_analyzed_clone_p (struct cgraph_node *node)
169 {
170 struct cgraph_node *orig = node;
171 node = node->clones;
172 if (node)
173 while (node != orig)
174 {
175 if (node->symbol.analyzed)
176 return true;
177 if (node->clones)
178 node = node->clones;
179 else if (node->next_sibling_clone)
180 node = node->next_sibling_clone;
181 else
182 {
183 while (node != orig && !node->next_sibling_clone)
184 node = node->clone_of;
185 if (node != orig)
186 node = node->next_sibling_clone;
187 }
188 }
189 return false;
190 }
191
192 /* Read the function body for the function associated with NODE. */
193
194 static void
195 lto_materialize_function (struct cgraph_node *node)
196 {
197 tree decl;
198
199 decl = node->symbol.decl;
200 /* Read in functions with body (analyzed nodes)
201 and also functions that are needed to produce virtual clones. */
202 if ((cgraph_function_with_gimple_body_p (node) && node->symbol.analyzed)
203 || node->used_as_abstract_origin
204 || has_analyzed_clone_p (node))
205 {
206 /* Clones don't need to be read. */
207 if (node->clone_of)
208 return;
209 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
210 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
211 }
212
213 /* Let the middle end know about the function. */
214 rest_of_decl_compilation (decl, 1, 0);
215 }
216
217
218 /* Decode the content of memory pointed to by DATA in the in decl
219 state object STATE. DATA_IN points to a data_in structure for
220 decoding. Return the address after the decoded object in the
221 input. */
222
223 static const uint32_t *
224 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
225 struct lto_in_decl_state *state)
226 {
227 uint32_t ix;
228 tree decl;
229 uint32_t i, j;
230
231 ix = *data++;
232 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
233 if (TREE_CODE (decl) != FUNCTION_DECL)
234 {
235 gcc_assert (decl == void_type_node);
236 decl = NULL_TREE;
237 }
238 state->fn_decl = decl;
239
240 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
241 {
242 uint32_t size = *data++;
243 tree *decls = ggc_alloc_vec_tree (size);
244
245 for (j = 0; j < size; j++)
246 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
247
248 state->streams[i].size = size;
249 state->streams[i].trees = decls;
250 data += size;
251 }
252
253 return data;
254 }
255
256
257
258 /* ??? Old hashing and merging code follows, we keep it for statistics
259 purposes for now. */
260
261 /* Global type table. FIXME, it should be possible to re-use some
262 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
263 etc), but those assume that types were built with the various
264 build_*_type routines which is not the case with the streamer. */
265 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
266 htab_t gimple_types;
267 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
268 htab_t type_hash_cache;
269
270 static hashval_t gimple_type_hash (const void *);
271
272 /* Structure used to maintain a cache of some type pairs compared by
273 gimple_types_compatible_p when comparing aggregate types. There are
274 three possible values for SAME_P:
275
276 -2: The pair (T1, T2) has just been inserted in the table.
277 0: T1 and T2 are different types.
278 1: T1 and T2 are the same type. */
279
280 struct type_pair_d
281 {
282 unsigned int uid1;
283 unsigned int uid2;
284 signed char same_p;
285 };
286 typedef struct type_pair_d *type_pair_t;
287
288 #define GIMPLE_TYPE_PAIR_SIZE 16381
289 struct type_pair_d *type_pair_cache;
290
291
292 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
293 entry if none existed. */
294
295 static inline type_pair_t
296 lookup_type_pair (tree t1, tree t2)
297 {
298 unsigned int index;
299 unsigned int uid1, uid2;
300
301 if (TYPE_UID (t1) < TYPE_UID (t2))
302 {
303 uid1 = TYPE_UID (t1);
304 uid2 = TYPE_UID (t2);
305 }
306 else
307 {
308 uid1 = TYPE_UID (t2);
309 uid2 = TYPE_UID (t1);
310 }
311 gcc_checking_assert (uid1 != uid2);
312
313 /* iterative_hash_hashval_t imply an function calls.
314 We know that UIDS are in limited range. */
315 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
316 % GIMPLE_TYPE_PAIR_SIZE);
317 if (type_pair_cache [index].uid1 == uid1
318 && type_pair_cache [index].uid2 == uid2)
319 return &type_pair_cache[index];
320
321 type_pair_cache [index].uid1 = uid1;
322 type_pair_cache [index].uid2 = uid2;
323 type_pair_cache [index].same_p = -2;
324
325 return &type_pair_cache[index];
326 }
327
328 /* Per pointer state for the SCC finding. The on_sccstack flag
329 is not strictly required, it is true when there is no hash value
330 recorded for the type and false otherwise. But querying that
331 is slower. */
332
333 struct sccs
334 {
335 unsigned int dfsnum;
336 unsigned int low;
337 bool on_sccstack;
338 union {
339 hashval_t hash;
340 signed char same_p;
341 } u;
342 };
343
344 static unsigned int next_dfs_num;
345 static unsigned int gtc_next_dfs_num;
346
347 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
348 true then if any type has no name return false, otherwise return
349 true if both types have no names. */
350
351 static bool
352 compare_type_names_p (tree t1, tree t2)
353 {
354 tree name1 = TYPE_NAME (t1);
355 tree name2 = TYPE_NAME (t2);
356
357 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
358 return false;
359
360 if (name1 == NULL_TREE)
361 return true;
362
363 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
364 if (TREE_CODE (name1) != TREE_CODE (name2))
365 return false;
366
367 if (TREE_CODE (name1) == TYPE_DECL)
368 name1 = DECL_NAME (name1);
369 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
370
371 if (TREE_CODE (name2) == TYPE_DECL)
372 name2 = DECL_NAME (name2);
373 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
374
375 /* Identifiers can be compared with pointer equality rather
376 than a string comparison. */
377 if (name1 == name2)
378 return true;
379
380 return false;
381 }
382
383 static bool
384 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
385 vec<type_pair_t> *,
386 struct pointer_map_t *, struct obstack *);
387
388 /* DFS visit the edge from the callers type pair with state *STATE to
389 the pair T1, T2 while operating in FOR_MERGING_P mode.
390 Update the merging status if it is not part of the SCC containing the
391 callers pair and return it.
392 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
393
394 static bool
395 gtc_visit (tree t1, tree t2,
396 struct sccs *state,
397 vec<type_pair_t> *sccstack,
398 struct pointer_map_t *sccstate,
399 struct obstack *sccstate_obstack)
400 {
401 struct sccs *cstate = NULL;
402 type_pair_t p;
403 void **slot;
404
405 /* Check first for the obvious case of pointer identity. */
406 if (t1 == t2)
407 return true;
408
409 /* Check that we have two types to compare. */
410 if (t1 == NULL_TREE || t2 == NULL_TREE)
411 return false;
412
413 /* Can't be the same type if the types don't have the same code. */
414 if (TREE_CODE (t1) != TREE_CODE (t2))
415 return false;
416
417 /* Can't be the same type if they have different CV qualifiers. */
418 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
419 return false;
420
421 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
422 return false;
423
424 /* Void types and nullptr types are always the same. */
425 if (TREE_CODE (t1) == VOID_TYPE
426 || TREE_CODE (t1) == NULLPTR_TYPE)
427 return true;
428
429 /* Can't be the same type if they have different alignment or mode. */
430 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
431 || TYPE_MODE (t1) != TYPE_MODE (t2))
432 return false;
433
434 /* Do some simple checks before doing three hashtable queries. */
435 if (INTEGRAL_TYPE_P (t1)
436 || SCALAR_FLOAT_TYPE_P (t1)
437 || FIXED_POINT_TYPE_P (t1)
438 || TREE_CODE (t1) == VECTOR_TYPE
439 || TREE_CODE (t1) == COMPLEX_TYPE
440 || TREE_CODE (t1) == OFFSET_TYPE
441 || POINTER_TYPE_P (t1))
442 {
443 /* Can't be the same type if they have different sign or precision. */
444 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
445 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
446 return false;
447
448 if (TREE_CODE (t1) == INTEGER_TYPE
449 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
450 return false;
451
452 /* That's all we need to check for float and fixed-point types. */
453 if (SCALAR_FLOAT_TYPE_P (t1)
454 || FIXED_POINT_TYPE_P (t1))
455 return true;
456
457 /* For other types fall through to more complex checks. */
458 }
459
460 /* If the hash values of t1 and t2 are different the types can't
461 possibly be the same. This helps keeping the type-pair hashtable
462 small, only tracking comparisons for hash collisions. */
463 if (gimple_type_hash (t1) != gimple_type_hash (t2))
464 return false;
465
466 /* Allocate a new cache entry for this comparison. */
467 p = lookup_type_pair (t1, t2);
468 if (p->same_p == 0 || p->same_p == 1)
469 {
470 /* We have already decided whether T1 and T2 are the
471 same, return the cached result. */
472 return p->same_p == 1;
473 }
474
475 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
476 cstate = (struct sccs *)*slot;
477 /* Not yet visited. DFS recurse. */
478 if (!cstate)
479 {
480 gimple_types_compatible_p_1 (t1, t2, p,
481 sccstack, sccstate, sccstate_obstack);
482 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
483 state->low = MIN (state->low, cstate->low);
484 }
485 /* If the type is still on the SCC stack adjust the parents low. */
486 if (cstate->dfsnum < state->dfsnum
487 && cstate->on_sccstack)
488 state->low = MIN (cstate->dfsnum, state->low);
489
490 /* Return the current lattice value. We start with an equality
491 assumption so types part of a SCC will be optimistically
492 treated equal unless proven otherwise. */
493 return cstate->u.same_p;
494 }
495
496 /* Worker for gimple_types_compatible.
497 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
498
499 static bool
500 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
501 vec<type_pair_t> *sccstack,
502 struct pointer_map_t *sccstate,
503 struct obstack *sccstate_obstack)
504 {
505 struct sccs *state;
506
507 gcc_assert (p->same_p == -2);
508
509 state = XOBNEW (sccstate_obstack, struct sccs);
510 *pointer_map_insert (sccstate, p) = state;
511
512 sccstack->safe_push (p);
513 state->dfsnum = gtc_next_dfs_num++;
514 state->low = state->dfsnum;
515 state->on_sccstack = true;
516 /* Start with an equality assumption. As we DFS recurse into child
517 SCCs this assumption may get revisited. */
518 state->u.same_p = 1;
519
520 /* The struct tags shall compare equal. */
521 if (!compare_type_names_p (t1, t2))
522 goto different_types;
523
524 /* The main variant of both types should compare equal. */
525 if (TYPE_MAIN_VARIANT (t1) != t1
526 || TYPE_MAIN_VARIANT (t2) != t2)
527 {
528 if (!gtc_visit (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2),
529 state, sccstack, sccstate, sccstate_obstack))
530 goto different_types;
531 }
532
533 /* We may not merge typedef types to the same type in different
534 contexts. */
535 if (TYPE_NAME (t1)
536 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
537 && DECL_CONTEXT (TYPE_NAME (t1))
538 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
539 {
540 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
541 DECL_CONTEXT (TYPE_NAME (t2)),
542 state, sccstack, sccstate, sccstate_obstack))
543 goto different_types;
544 }
545
546 /* If their attributes are not the same they can't be the same type. */
547 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
548 goto different_types;
549
550 /* Do type-specific comparisons. */
551 switch (TREE_CODE (t1))
552 {
553 case VECTOR_TYPE:
554 case COMPLEX_TYPE:
555 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
556 state, sccstack, sccstate, sccstate_obstack))
557 goto different_types;
558 goto same_types;
559
560 case ARRAY_TYPE:
561 /* Array types are the same if the element types are the same and
562 the number of elements are the same. */
563 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
564 state, sccstack, sccstate, sccstate_obstack)
565 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
566 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
567 goto different_types;
568 else
569 {
570 tree i1 = TYPE_DOMAIN (t1);
571 tree i2 = TYPE_DOMAIN (t2);
572
573 /* For an incomplete external array, the type domain can be
574 NULL_TREE. Check this condition also. */
575 if (i1 == NULL_TREE && i2 == NULL_TREE)
576 goto same_types;
577 else if (i1 == NULL_TREE || i2 == NULL_TREE)
578 goto different_types;
579 else
580 {
581 tree min1 = TYPE_MIN_VALUE (i1);
582 tree min2 = TYPE_MIN_VALUE (i2);
583 tree max1 = TYPE_MAX_VALUE (i1);
584 tree max2 = TYPE_MAX_VALUE (i2);
585
586 /* The minimum/maximum values have to be the same. */
587 if ((min1 == min2
588 || (min1 && min2
589 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
590 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
591 || operand_equal_p (min1, min2, 0))))
592 && (max1 == max2
593 || (max1 && max2
594 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
595 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
596 || operand_equal_p (max1, max2, 0)))))
597 goto same_types;
598 else
599 goto different_types;
600 }
601 }
602
603 case METHOD_TYPE:
604 /* Method types should belong to the same class. */
605 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
606 state, sccstack, sccstate, sccstate_obstack))
607 goto different_types;
608
609 /* Fallthru */
610
611 case FUNCTION_TYPE:
612 /* Function types are the same if the return type and arguments types
613 are the same. */
614 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
615 state, sccstack, sccstate, sccstate_obstack))
616 goto different_types;
617
618 if (!comp_type_attributes (t1, t2))
619 goto different_types;
620
621 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
622 goto same_types;
623 else
624 {
625 tree parms1, parms2;
626
627 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
628 parms1 && parms2;
629 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
630 {
631 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
632 state, sccstack, sccstate, sccstate_obstack))
633 goto different_types;
634 }
635
636 if (parms1 || parms2)
637 goto different_types;
638
639 goto same_types;
640 }
641
642 case OFFSET_TYPE:
643 {
644 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
645 state, sccstack, sccstate, sccstate_obstack)
646 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
647 TYPE_OFFSET_BASETYPE (t2),
648 state, sccstack, sccstate, sccstate_obstack))
649 goto different_types;
650
651 goto same_types;
652 }
653
654 case POINTER_TYPE:
655 case REFERENCE_TYPE:
656 {
657 /* If the two pointers have different ref-all attributes,
658 they can't be the same type. */
659 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
660 goto different_types;
661
662 /* Otherwise, pointer and reference types are the same if the
663 pointed-to types are the same. */
664 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
665 state, sccstack, sccstate, sccstate_obstack))
666 goto same_types;
667
668 goto different_types;
669 }
670
671 case INTEGER_TYPE:
672 case BOOLEAN_TYPE:
673 {
674 tree min1 = TYPE_MIN_VALUE (t1);
675 tree max1 = TYPE_MAX_VALUE (t1);
676 tree min2 = TYPE_MIN_VALUE (t2);
677 tree max2 = TYPE_MAX_VALUE (t2);
678 bool min_equal_p = false;
679 bool max_equal_p = false;
680
681 /* If either type has a minimum value, the other type must
682 have the same. */
683 if (min1 == NULL_TREE && min2 == NULL_TREE)
684 min_equal_p = true;
685 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
686 min_equal_p = true;
687
688 /* Likewise, if either type has a maximum value, the other
689 type must have the same. */
690 if (max1 == NULL_TREE && max2 == NULL_TREE)
691 max_equal_p = true;
692 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
693 max_equal_p = true;
694
695 if (!min_equal_p || !max_equal_p)
696 goto different_types;
697
698 goto same_types;
699 }
700
701 case ENUMERAL_TYPE:
702 {
703 /* FIXME lto, we cannot check bounds on enumeral types because
704 different front ends will produce different values.
705 In C, enumeral types are integers, while in C++ each element
706 will have its own symbolic value. We should decide how enums
707 are to be represented in GIMPLE and have each front end lower
708 to that. */
709 tree v1, v2;
710
711 /* For enumeral types, all the values must be the same. */
712 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
713 goto same_types;
714
715 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
716 v1 && v2;
717 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
718 {
719 tree c1 = TREE_VALUE (v1);
720 tree c2 = TREE_VALUE (v2);
721
722 if (TREE_CODE (c1) == CONST_DECL)
723 c1 = DECL_INITIAL (c1);
724
725 if (TREE_CODE (c2) == CONST_DECL)
726 c2 = DECL_INITIAL (c2);
727
728 if (tree_int_cst_equal (c1, c2) != 1)
729 goto different_types;
730
731 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
732 goto different_types;
733 }
734
735 /* If one enumeration has more values than the other, they
736 are not the same. */
737 if (v1 || v2)
738 goto different_types;
739
740 goto same_types;
741 }
742
743 case RECORD_TYPE:
744 case UNION_TYPE:
745 case QUAL_UNION_TYPE:
746 {
747 tree f1, f2;
748
749 /* For aggregate types, all the fields must be the same. */
750 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
751 f1 && f2;
752 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
753 {
754 /* Different field kinds are not compatible. */
755 if (TREE_CODE (f1) != TREE_CODE (f2))
756 goto different_types;
757 /* Field decls must have the same name and offset. */
758 if (TREE_CODE (f1) == FIELD_DECL
759 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
760 || !gimple_compare_field_offset (f1, f2)))
761 goto different_types;
762 /* All entities should have the same name and type. */
763 if (DECL_NAME (f1) != DECL_NAME (f2)
764 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
765 state, sccstack, sccstate, sccstate_obstack))
766 goto different_types;
767 }
768
769 /* If one aggregate has more fields than the other, they
770 are not the same. */
771 if (f1 || f2)
772 goto different_types;
773
774 goto same_types;
775 }
776
777 default:
778 gcc_unreachable ();
779 }
780
781 /* Common exit path for types that are not compatible. */
782 different_types:
783 state->u.same_p = 0;
784 goto pop;
785
786 /* Common exit path for types that are compatible. */
787 same_types:
788 gcc_assert (state->u.same_p == 1);
789
790 pop:
791 if (state->low == state->dfsnum)
792 {
793 type_pair_t x;
794
795 /* Pop off the SCC and set its cache values to the final
796 comparison result. */
797 do
798 {
799 struct sccs *cstate;
800 x = sccstack->pop ();
801 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
802 cstate->on_sccstack = false;
803 x->same_p = state->u.same_p;
804 }
805 while (x != p);
806 }
807
808 return state->u.same_p;
809 }
810
811 /* Return true iff T1 and T2 are structurally identical. When
812 FOR_MERGING_P is true the an incomplete type and a complete type
813 are considered different, otherwise they are considered compatible. */
814
815 static bool
816 gimple_types_compatible_p (tree t1, tree t2)
817 {
818 vec<type_pair_t> sccstack = vNULL;
819 struct pointer_map_t *sccstate;
820 struct obstack sccstate_obstack;
821 type_pair_t p = NULL;
822 bool res;
823
824 /* Before starting to set up the SCC machinery handle simple cases. */
825
826 /* Check first for the obvious case of pointer identity. */
827 if (t1 == t2)
828 return true;
829
830 /* Check that we have two types to compare. */
831 if (t1 == NULL_TREE || t2 == NULL_TREE)
832 return false;
833
834 /* Can't be the same type if the types don't have the same code. */
835 if (TREE_CODE (t1) != TREE_CODE (t2))
836 return false;
837
838 /* Can't be the same type if they have different CV qualifiers. */
839 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
840 return false;
841
842 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
843 return false;
844
845 /* Void types and nullptr types are always the same. */
846 if (TREE_CODE (t1) == VOID_TYPE
847 || TREE_CODE (t1) == NULLPTR_TYPE)
848 return true;
849
850 /* Can't be the same type if they have different alignment or mode. */
851 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
852 || TYPE_MODE (t1) != TYPE_MODE (t2))
853 return false;
854
855 /* Do some simple checks before doing three hashtable queries. */
856 if (INTEGRAL_TYPE_P (t1)
857 || SCALAR_FLOAT_TYPE_P (t1)
858 || FIXED_POINT_TYPE_P (t1)
859 || TREE_CODE (t1) == VECTOR_TYPE
860 || TREE_CODE (t1) == COMPLEX_TYPE
861 || TREE_CODE (t1) == OFFSET_TYPE
862 || POINTER_TYPE_P (t1))
863 {
864 /* Can't be the same type if they have different sign or precision. */
865 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
866 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
867 return false;
868
869 if (TREE_CODE (t1) == INTEGER_TYPE
870 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
871 return false;
872
873 /* That's all we need to check for float and fixed-point types. */
874 if (SCALAR_FLOAT_TYPE_P (t1)
875 || FIXED_POINT_TYPE_P (t1))
876 return true;
877
878 /* For other types fall through to more complex checks. */
879 }
880
881 /* If the hash values of t1 and t2 are different the types can't
882 possibly be the same. This helps keeping the type-pair hashtable
883 small, only tracking comparisons for hash collisions. */
884 if (gimple_type_hash (t1) != gimple_type_hash (t2))
885 return false;
886
887 /* If we've visited this type pair before (in the case of aggregates
888 with self-referential types), and we made a decision, return it. */
889 p = lookup_type_pair (t1, t2);
890 if (p->same_p == 0 || p->same_p == 1)
891 {
892 /* We have already decided whether T1 and T2 are the
893 same, return the cached result. */
894 return p->same_p == 1;
895 }
896
897 /* Now set up the SCC machinery for the comparison. */
898 gtc_next_dfs_num = 1;
899 sccstate = pointer_map_create ();
900 gcc_obstack_init (&sccstate_obstack);
901 res = gimple_types_compatible_p_1 (t1, t2, p,
902 &sccstack, sccstate, &sccstate_obstack);
903 sccstack.release ();
904 pointer_map_destroy (sccstate);
905 obstack_free (&sccstate_obstack, NULL);
906
907 return res;
908 }
909
910 static hashval_t
911 iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
912 struct pointer_map_t *, struct obstack *);
913
914 /* DFS visit the edge from the callers type with state *STATE to T.
915 Update the callers type hash V with the hash for T if it is not part
916 of the SCC containing the callers type and return it.
917 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
918
919 static hashval_t
920 visit (tree t, struct sccs *state, hashval_t v,
921 vec<tree> *sccstack,
922 struct pointer_map_t *sccstate,
923 struct obstack *sccstate_obstack)
924 {
925 struct sccs *cstate = NULL;
926 struct tree_int_map m;
927 void **slot;
928
929 /* If there is a hash value recorded for this type then it can't
930 possibly be part of our parent SCC. Simply mix in its hash. */
931 m.base.from = t;
932 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
933 && *slot)
934 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
935
936 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
937 cstate = (struct sccs *)*slot;
938 if (!cstate)
939 {
940 hashval_t tem;
941 /* Not yet visited. DFS recurse. */
942 tem = iterative_hash_gimple_type (t, v,
943 sccstack, sccstate, sccstate_obstack);
944 if (!cstate)
945 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
946 state->low = MIN (state->low, cstate->low);
947 /* If the type is no longer on the SCC stack and thus is not part
948 of the parents SCC mix in its hash value. Otherwise we will
949 ignore the type for hashing purposes and return the unaltered
950 hash value. */
951 if (!cstate->on_sccstack)
952 return tem;
953 }
954 if (cstate->dfsnum < state->dfsnum
955 && cstate->on_sccstack)
956 state->low = MIN (cstate->dfsnum, state->low);
957
958 /* We are part of our parents SCC, skip this type during hashing
959 and return the unaltered hash value. */
960 return v;
961 }
962
963 /* Hash NAME with the previous hash value V and return it. */
964
965 static hashval_t
966 iterative_hash_name (tree name, hashval_t v)
967 {
968 if (!name)
969 return v;
970 v = iterative_hash_hashval_t (TREE_CODE (name), v);
971 if (TREE_CODE (name) == TYPE_DECL)
972 name = DECL_NAME (name);
973 if (!name)
974 return v;
975 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
976 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
977 }
978
979 /* A type, hashvalue pair for sorting SCC members. */
980
981 struct type_hash_pair {
982 tree type;
983 hashval_t hash;
984 };
985
986 /* Compare two type, hashvalue pairs. */
987
988 static int
989 type_hash_pair_compare (const void *p1_, const void *p2_)
990 {
991 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
992 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
993 if (p1->hash < p2->hash)
994 return -1;
995 else if (p1->hash > p2->hash)
996 return 1;
997 return 0;
998 }
999
1000 /* Returning a hash value for gimple type TYPE combined with VAL.
1001 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
1002
1003 To hash a type we end up hashing in types that are reachable.
1004 Through pointers we can end up with cycles which messes up the
1005 required property that we need to compute the same hash value
1006 for structurally equivalent types. To avoid this we have to
1007 hash all types in a cycle (the SCC) in a commutative way. The
1008 easiest way is to not mix in the hashes of the SCC members at
1009 all. To make this work we have to delay setting the hash
1010 values of the SCC until it is complete. */
1011
1012 static hashval_t
1013 iterative_hash_gimple_type (tree type, hashval_t val,
1014 vec<tree> *sccstack,
1015 struct pointer_map_t *sccstate,
1016 struct obstack *sccstate_obstack)
1017 {
1018 hashval_t v;
1019 void **slot;
1020 struct sccs *state;
1021
1022 /* Not visited during this DFS walk. */
1023 gcc_checking_assert (!pointer_map_contains (sccstate, type));
1024 state = XOBNEW (sccstate_obstack, struct sccs);
1025 *pointer_map_insert (sccstate, type) = state;
1026
1027 sccstack->safe_push (type);
1028 state->dfsnum = next_dfs_num++;
1029 state->low = state->dfsnum;
1030 state->on_sccstack = true;
1031
1032 /* Combine a few common features of types so that types are grouped into
1033 smaller sets; when searching for existing matching types to merge,
1034 only existing types having the same features as the new type will be
1035 checked. */
1036 v = iterative_hash_name (TYPE_NAME (type), 0);
1037 if (TYPE_NAME (type)
1038 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1039 && DECL_CONTEXT (TYPE_NAME (type))
1040 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
1041 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
1042 sccstack, sccstate, sccstate_obstack);
1043
1044 /* Factor in the variant structure. */
1045 if (TYPE_MAIN_VARIANT (type) != type)
1046 v = visit (TYPE_MAIN_VARIANT (type), state, v,
1047 sccstack, sccstate, sccstate_obstack);
1048
1049 v = iterative_hash_hashval_t (TREE_CODE (type), v);
1050 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
1051 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
1052
1053 /* Do not hash the types size as this will cause differences in
1054 hash values for the complete vs. the incomplete type variant. */
1055
1056 /* Incorporate common features of numerical types. */
1057 if (INTEGRAL_TYPE_P (type)
1058 || SCALAR_FLOAT_TYPE_P (type)
1059 || FIXED_POINT_TYPE_P (type))
1060 {
1061 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
1062 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
1063 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
1064 }
1065
1066 /* For pointer and reference types, fold in information about the type
1067 pointed to. */
1068 if (POINTER_TYPE_P (type))
1069 v = visit (TREE_TYPE (type), state, v,
1070 sccstack, sccstate, sccstate_obstack);
1071
1072 /* For integer types hash the types min/max values and the string flag. */
1073 if (TREE_CODE (type) == INTEGER_TYPE)
1074 {
1075 /* OMP lowering can introduce error_mark_node in place of
1076 random local decls in types. */
1077 if (TYPE_MIN_VALUE (type) != error_mark_node)
1078 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
1079 if (TYPE_MAX_VALUE (type) != error_mark_node)
1080 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
1081 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1082 }
1083
1084 /* For array types hash the domain and the string flag. */
1085 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1086 {
1087 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
1088 v = visit (TYPE_DOMAIN (type), state, v,
1089 sccstack, sccstate, sccstate_obstack);
1090 }
1091
1092 /* Recurse for aggregates with a single element type. */
1093 if (TREE_CODE (type) == ARRAY_TYPE
1094 || TREE_CODE (type) == COMPLEX_TYPE
1095 || TREE_CODE (type) == VECTOR_TYPE)
1096 v = visit (TREE_TYPE (type), state, v,
1097 sccstack, sccstate, sccstate_obstack);
1098
1099 /* Incorporate function return and argument types. */
1100 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1101 {
1102 unsigned na;
1103 tree p;
1104
1105 /* For method types also incorporate their parent class. */
1106 if (TREE_CODE (type) == METHOD_TYPE)
1107 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
1108 sccstack, sccstate, sccstate_obstack);
1109
1110 /* Check result and argument types. */
1111 v = visit (TREE_TYPE (type), state, v,
1112 sccstack, sccstate, sccstate_obstack);
1113 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
1114 {
1115 v = visit (TREE_VALUE (p), state, v,
1116 sccstack, sccstate, sccstate_obstack);
1117 na++;
1118 }
1119
1120 v = iterative_hash_hashval_t (na, v);
1121 }
1122
1123 if (RECORD_OR_UNION_TYPE_P (type))
1124 {
1125 unsigned nf;
1126 tree f;
1127
1128 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
1129 {
1130 v = iterative_hash_name (DECL_NAME (f), v);
1131 v = visit (TREE_TYPE (f), state, v,
1132 sccstack, sccstate, sccstate_obstack);
1133 nf++;
1134 }
1135
1136 v = iterative_hash_hashval_t (nf, v);
1137 }
1138
1139 /* Record hash for us. */
1140 state->u.hash = v;
1141
1142 /* See if we found an SCC. */
1143 if (state->low == state->dfsnum)
1144 {
1145 tree x;
1146 struct tree_int_map *m;
1147
1148 /* Pop off the SCC and set its hash values. */
1149 x = sccstack->pop ();
1150 /* Optimize SCC size one. */
1151 if (x == type)
1152 {
1153 state->on_sccstack = false;
1154 m = ggc_alloc_cleared_tree_int_map ();
1155 m->base.from = x;
1156 m->to = v;
1157 slot = htab_find_slot (type_hash_cache, m, INSERT);
1158 gcc_assert (!*slot);
1159 *slot = (void *) m;
1160 }
1161 else
1162 {
1163 struct sccs *cstate;
1164 unsigned first, i, size, j;
1165 struct type_hash_pair *pairs;
1166 /* Pop off the SCC and build an array of type, hash pairs. */
1167 first = sccstack->length () - 1;
1168 while ((*sccstack)[first] != type)
1169 --first;
1170 size = sccstack->length () - first + 1;
1171 pairs = XALLOCAVEC (struct type_hash_pair, size);
1172 i = 0;
1173 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1174 cstate->on_sccstack = false;
1175 pairs[i].type = x;
1176 pairs[i].hash = cstate->u.hash;
1177 do
1178 {
1179 x = sccstack->pop ();
1180 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
1181 cstate->on_sccstack = false;
1182 ++i;
1183 pairs[i].type = x;
1184 pairs[i].hash = cstate->u.hash;
1185 }
1186 while (x != type);
1187 gcc_assert (i + 1 == size);
1188 /* Sort the arrays of type, hash pairs so that when we mix in
1189 all members of the SCC the hash value becomes independent on
1190 the order we visited the SCC. Disregard hashes equal to
1191 the hash of the type we mix into because we cannot guarantee
1192 a stable sort for those across different TUs. */
1193 qsort (pairs, size, sizeof (struct type_hash_pair),
1194 type_hash_pair_compare);
1195 for (i = 0; i < size; ++i)
1196 {
1197 hashval_t hash;
1198 m = ggc_alloc_cleared_tree_int_map ();
1199 m->base.from = pairs[i].type;
1200 hash = pairs[i].hash;
1201 /* Skip same hashes. */
1202 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
1203 ;
1204 for (; j < size; ++j)
1205 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1206 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
1207 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
1208 m->to = hash;
1209 if (pairs[i].type == type)
1210 v = hash;
1211 slot = htab_find_slot (type_hash_cache, m, INSERT);
1212 gcc_assert (!*slot);
1213 *slot = (void *) m;
1214 }
1215 }
1216 }
1217
1218 return iterative_hash_hashval_t (v, val);
1219 }
1220
1221 /* Returns a hash value for P (assumed to be a type). The hash value
1222 is computed using some distinguishing features of the type. Note
1223 that we cannot use pointer hashing here as we may be dealing with
1224 two distinct instances of the same type.
1225
1226 This function should produce the same hash value for two compatible
1227 types according to gimple_types_compatible_p. */
1228
1229 static hashval_t
1230 gimple_type_hash (const void *p)
1231 {
1232 const_tree t = (const_tree) p;
1233 vec<tree> sccstack = vNULL;
1234 struct pointer_map_t *sccstate;
1235 struct obstack sccstate_obstack;
1236 hashval_t val;
1237 void **slot;
1238 struct tree_int_map m;
1239
1240 m.base.from = CONST_CAST_TREE (t);
1241 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
1242 && *slot)
1243 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
1244
1245 /* Perform a DFS walk and pre-hash all reachable types. */
1246 next_dfs_num = 1;
1247 sccstate = pointer_map_create ();
1248 gcc_obstack_init (&sccstate_obstack);
1249 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
1250 &sccstack, sccstate, &sccstate_obstack);
1251 sccstack.release ();
1252 pointer_map_destroy (sccstate);
1253 obstack_free (&sccstate_obstack, NULL);
1254
1255 return val;
1256 }
1257
1258 /* Returns nonzero if P1 and P2 are equal. */
1259
1260 static int
1261 gimple_type_eq (const void *p1, const void *p2)
1262 {
1263 const_tree t1 = (const_tree) p1;
1264 const_tree t2 = (const_tree) p2;
1265 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
1266 CONST_CAST_TREE (t2));
1267 }
1268
1269 /* Register type T in the global type table gimple_types. */
1270
1271 static tree
1272 gimple_register_type (tree t)
1273 {
1274 void **slot;
1275
1276 /* See if we already have an equivalent type registered. */
1277 slot = htab_find_slot (gimple_types, t, INSERT);
1278 if (*slot
1279 && *(tree *)slot != t)
1280 return (tree) *((tree *) slot);
1281
1282 /* If not, insert it to the cache and the hash. */
1283 *slot = (void *) t;
1284 return t;
1285 }
1286
1287 /* End of old merging code. */
1288
1289 /* Remember trees that contains references to declarations. */
1290 static GTY(()) vec <tree, va_gc> *tree_with_vars;
1291
1292 #define CHECK_VAR(tt) \
1293 do \
1294 { \
1295 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
1296 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
1297 return true; \
1298 } while (0)
1299
1300 #define CHECK_NO_VAR(tt) \
1301 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
1302
1303 /* Check presence of pointers to decls in fields of a tree_typed T. */
1304
1305 static inline bool
1306 mentions_vars_p_typed (tree t)
1307 {
1308 CHECK_NO_VAR (TREE_TYPE (t));
1309 return false;
1310 }
1311
1312 /* Check presence of pointers to decls in fields of a tree_common T. */
1313
1314 static inline bool
1315 mentions_vars_p_common (tree t)
1316 {
1317 if (mentions_vars_p_typed (t))
1318 return true;
1319 CHECK_NO_VAR (TREE_CHAIN (t));
1320 return false;
1321 }
1322
1323 /* Check presence of pointers to decls in fields of a decl_minimal T. */
1324
1325 static inline bool
1326 mentions_vars_p_decl_minimal (tree t)
1327 {
1328 if (mentions_vars_p_common (t))
1329 return true;
1330 CHECK_NO_VAR (DECL_NAME (t));
1331 CHECK_VAR (DECL_CONTEXT (t));
1332 return false;
1333 }
1334
1335 /* Check presence of pointers to decls in fields of a decl_common T. */
1336
1337 static inline bool
1338 mentions_vars_p_decl_common (tree t)
1339 {
1340 if (mentions_vars_p_decl_minimal (t))
1341 return true;
1342 CHECK_VAR (DECL_SIZE (t));
1343 CHECK_VAR (DECL_SIZE_UNIT (t));
1344 CHECK_VAR (DECL_INITIAL (t));
1345 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
1346 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
1347 return false;
1348 }
1349
1350 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
1351
1352 static inline bool
1353 mentions_vars_p_decl_with_vis (tree t)
1354 {
1355 if (mentions_vars_p_decl_common (t))
1356 return true;
1357
1358 /* Accessor macro has side-effects, use field-name here. */
1359 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
1360 CHECK_NO_VAR (DECL_SECTION_NAME (t));
1361 return false;
1362 }
1363
1364 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1365
1366 static inline bool
1367 mentions_vars_p_decl_non_common (tree t)
1368 {
1369 if (mentions_vars_p_decl_with_vis (t))
1370 return true;
1371 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
1372 CHECK_NO_VAR (DECL_RESULT_FLD (t));
1373 CHECK_NO_VAR (DECL_VINDEX (t));
1374 return false;
1375 }
1376
1377 /* Check presence of pointers to decls in fields of a decl_non_common T. */
1378
1379 static bool
1380 mentions_vars_p_function (tree t)
1381 {
1382 if (mentions_vars_p_decl_non_common (t))
1383 return true;
1384 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
1385 return false;
1386 }
1387
1388 /* Check presence of pointers to decls in fields of a field_decl T. */
1389
1390 static bool
1391 mentions_vars_p_field_decl (tree t)
1392 {
1393 if (mentions_vars_p_decl_common (t))
1394 return true;
1395 CHECK_VAR (DECL_FIELD_OFFSET (t));
1396 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
1397 CHECK_NO_VAR (DECL_QUALIFIER (t));
1398 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
1399 CHECK_NO_VAR (DECL_FCONTEXT (t));
1400 return false;
1401 }
1402
1403 /* Check presence of pointers to decls in fields of a type T. */
1404
1405 static bool
1406 mentions_vars_p_type (tree t)
1407 {
1408 if (mentions_vars_p_common (t))
1409 return true;
1410 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
1411 CHECK_VAR (TYPE_SIZE (t));
1412 CHECK_VAR (TYPE_SIZE_UNIT (t));
1413 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
1414 CHECK_NO_VAR (TYPE_NAME (t));
1415
1416 CHECK_VAR (TYPE_MINVAL (t));
1417 CHECK_VAR (TYPE_MAXVAL (t));
1418
1419 /* Accessor is for derived node types only. */
1420 CHECK_NO_VAR (t->type_non_common.binfo);
1421
1422 CHECK_VAR (TYPE_CONTEXT (t));
1423 CHECK_NO_VAR (TYPE_CANONICAL (t));
1424 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
1425 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
1426 return false;
1427 }
1428
1429 /* Check presence of pointers to decls in fields of a BINFO T. */
1430
1431 static bool
1432 mentions_vars_p_binfo (tree t)
1433 {
1434 unsigned HOST_WIDE_INT i, n;
1435
1436 if (mentions_vars_p_common (t))
1437 return true;
1438 CHECK_VAR (BINFO_VTABLE (t));
1439 CHECK_NO_VAR (BINFO_OFFSET (t));
1440 CHECK_NO_VAR (BINFO_VIRTUALS (t));
1441 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
1442 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
1443 for (i = 0; i < n; i++)
1444 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
1445 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1446 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1447 n = BINFO_N_BASE_BINFOS (t);
1448 for (i = 0; i < n; i++)
1449 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
1450 return false;
1451 }
1452
1453 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
1454
1455 static bool
1456 mentions_vars_p_constructor (tree t)
1457 {
1458 unsigned HOST_WIDE_INT idx;
1459 constructor_elt *ce;
1460
1461 if (mentions_vars_p_typed (t))
1462 return true;
1463
1464 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
1465 {
1466 CHECK_NO_VAR (ce->index);
1467 CHECK_VAR (ce->value);
1468 }
1469 return false;
1470 }
1471
1472 /* Check presence of pointers to decls in fields of an expression tree T. */
1473
1474 static bool
1475 mentions_vars_p_expr (tree t)
1476 {
1477 int i;
1478 if (mentions_vars_p_typed (t))
1479 return true;
1480 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
1481 CHECK_VAR (TREE_OPERAND (t, i));
1482 return false;
1483 }
1484
1485 /* Check presence of pointers to decls that needs later fixup in T. */
1486
1487 static bool
1488 mentions_vars_p (tree t)
1489 {
1490 switch (TREE_CODE (t))
1491 {
1492 case IDENTIFIER_NODE:
1493 break;
1494
1495 case TREE_LIST:
1496 CHECK_VAR (TREE_VALUE (t));
1497 CHECK_VAR (TREE_PURPOSE (t));
1498 CHECK_NO_VAR (TREE_CHAIN (t));
1499 break;
1500
1501 case FIELD_DECL:
1502 return mentions_vars_p_field_decl (t);
1503 break;
1504
1505 case LABEL_DECL:
1506 case CONST_DECL:
1507 case PARM_DECL:
1508 case RESULT_DECL:
1509 case IMPORTED_DECL:
1510 case NAMESPACE_DECL:
1511 return mentions_vars_p_decl_common (t);
1512 break;
1513
1514 case VAR_DECL:
1515 return mentions_vars_p_decl_with_vis (t);
1516 break;
1517
1518 case TYPE_DECL:
1519 return mentions_vars_p_decl_non_common (t);
1520 break;
1521
1522 case FUNCTION_DECL:
1523 return mentions_vars_p_function (t);
1524 break;
1525
1526 case TREE_BINFO:
1527 return mentions_vars_p_binfo (t);
1528 break;
1529
1530 case PLACEHOLDER_EXPR:
1531 return mentions_vars_p_common (t);
1532 break;
1533
1534 case BLOCK:
1535 case TRANSLATION_UNIT_DECL:
1536 case OPTIMIZATION_NODE:
1537 case TARGET_OPTION_NODE:
1538 break;
1539
1540 case CONSTRUCTOR:
1541 return mentions_vars_p_constructor (t);
1542 break;
1543
1544 default:
1545 if (TYPE_P (t))
1546 {
1547 if (mentions_vars_p_type (t))
1548 return true;
1549 }
1550 else if (EXPR_P (t))
1551 {
1552 if (mentions_vars_p_expr (t))
1553 return true;
1554 }
1555 else if (CONSTANT_CLASS_P (t))
1556 CHECK_NO_VAR (TREE_TYPE (t));
1557 else
1558 gcc_unreachable ();
1559 }
1560 return false;
1561 }
1562
1563
1564 /* Return the resolution for the decl with index INDEX from DATA_IN. */
1565
1566 static enum ld_plugin_symbol_resolution
1567 get_resolution (struct data_in *data_in, unsigned index)
1568 {
1569 if (data_in->globals_resolution.exists ())
1570 {
1571 ld_plugin_symbol_resolution_t ret;
1572 /* We can have references to not emitted functions in
1573 DECL_FUNCTION_PERSONALITY at least. So we can and have
1574 to indeed return LDPR_UNKNOWN in some cases. */
1575 if (data_in->globals_resolution.length () <= index)
1576 return LDPR_UNKNOWN;
1577 ret = data_in->globals_resolution[index];
1578 return ret;
1579 }
1580 else
1581 /* Delay resolution finding until decl merging. */
1582 return LDPR_UNKNOWN;
1583 }
1584
1585 /* We need to record resolutions until symbol table is read. */
1586 static void
1587 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1588 enum ld_plugin_symbol_resolution resolution)
1589 {
1590 if (resolution == LDPR_UNKNOWN)
1591 return;
1592 if (!file_data->resolution_map)
1593 file_data->resolution_map = pointer_map_create ();
1594 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1595 }
1596
1597 /* Register DECL with the global symbol table and change its
1598 name if necessary to avoid name clashes for static globals across
1599 different files. */
1600
1601 static void
1602 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1603 unsigned ix)
1604 {
1605 tree context;
1606
1607 /* Variable has file scope, not local. */
1608 if (!TREE_PUBLIC (decl)
1609 && !((context = decl_function_context (decl))
1610 && auto_var_in_fn_p (decl, context)))
1611 rest_of_decl_compilation (decl, 1, 0);
1612
1613 /* If this variable has already been declared, queue the
1614 declaration for merging. */
1615 if (TREE_PUBLIC (decl))
1616 register_resolution (data_in->file_data,
1617 decl, get_resolution (data_in, ix));
1618 }
1619
1620
1621 /* Register DECL with the global symbol table and change its
1622 name if necessary to avoid name clashes for static globals across
1623 different files. DATA_IN contains descriptors and tables for the
1624 file being read. */
1625
1626 static void
1627 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1628 unsigned ix)
1629 {
1630 /* If this variable has already been declared, queue the
1631 declaration for merging. */
1632 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1633 register_resolution (data_in->file_data,
1634 decl, get_resolution (data_in, ix));
1635 }
1636
1637
1638 /* For the type T re-materialize it in the type variant list and
1639 the pointer/reference-to chains. */
1640
1641 static void
1642 lto_fixup_prevailing_type (tree t)
1643 {
1644 /* The following re-creates proper variant lists while fixing up
1645 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1646 variant list state before fixup is broken. */
1647
1648 /* If we are not our own variant leader link us into our new leaders
1649 variant list. */
1650 if (TYPE_MAIN_VARIANT (t) != t)
1651 {
1652 tree mv = TYPE_MAIN_VARIANT (t);
1653 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1654 TYPE_NEXT_VARIANT (mv) = t;
1655 }
1656
1657 /* The following reconstructs the pointer chains
1658 of the new pointed-to type if we are a main variant. We do
1659 not stream those so they are broken before fixup. */
1660 if (TREE_CODE (t) == POINTER_TYPE
1661 && TYPE_MAIN_VARIANT (t) == t)
1662 {
1663 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1664 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1665 }
1666 else if (TREE_CODE (t) == REFERENCE_TYPE
1667 && TYPE_MAIN_VARIANT (t) == t)
1668 {
1669 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1670 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1671 }
1672 }
1673
1674
1675 /* We keep prevailing tree SCCs in a hashtable with manual collision
1676 handling (in case all hashes compare the same) and keep the colliding
1677 entries in the tree_scc->next chain. */
1678
1679 struct tree_scc
1680 {
1681 tree_scc *next;
1682 /* Hash of the whole SCC. */
1683 hashval_t hash;
1684 /* Number of trees in the SCC. */
1685 unsigned len;
1686 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1687 which share the same individual tree hash). */
1688 unsigned entry_len;
1689 /* The members of the SCC.
1690 We only need to remember the first entry node candidate for prevailing
1691 SCCs (but of course have access to all entries for SCCs we are
1692 processing).
1693 ??? For prevailing SCCs we really only need hash and the first
1694 entry candidate, but that's too awkward to implement. */
1695 tree entries[1];
1696 };
1697
1698 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1699 {
1700 typedef tree_scc value_type;
1701 typedef tree_scc compare_type;
1702 static inline hashval_t hash (const value_type *);
1703 static inline bool equal (const value_type *, const compare_type *);
1704 };
1705
1706 hashval_t
1707 tree_scc_hasher::hash (const value_type *scc)
1708 {
1709 return scc->hash;
1710 }
1711
1712 bool
1713 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1714 {
1715 if (scc1->hash != scc2->hash
1716 || scc1->len != scc2->len
1717 || scc1->entry_len != scc2->entry_len)
1718 return false;
1719 return true;
1720 }
1721
1722 static hash_table <tree_scc_hasher> tree_scc_hash;
1723 static struct obstack tree_scc_hash_obstack;
1724
1725 static unsigned long num_merged_types;
1726 static unsigned long num_prevailing_types;
1727 static unsigned long num_not_merged_types;
1728 static unsigned long num_not_merged_types_in_same_scc;
1729 static unsigned long num_not_merged_types_trees;
1730 static unsigned long num_not_merged_types_in_same_scc_trees;
1731 static unsigned long num_type_scc_trees;
1732 static unsigned long total_scc_size;
1733 static unsigned long num_sccs_read;
1734 static unsigned long total_scc_size_merged;
1735 static unsigned long num_sccs_merged;
1736 static unsigned long num_scc_compares;
1737 static unsigned long num_scc_compare_collisions;
1738
1739
1740 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1741 recursing through in-SCC tree edges. Returns true if the SCCs entered
1742 through T1 and T2 are equal and fills in *MAP with the pairs of
1743 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1744
1745 static bool
1746 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1747 {
1748 enum tree_code code;
1749
1750 /* Mark already visited nodes. */
1751 TREE_ASM_WRITTEN (t2) = 1;
1752
1753 /* Push the pair onto map. */
1754 (*map)[0] = t1;
1755 (*map)[1] = t2;
1756 *map = *map + 2;
1757
1758 /* Compare value-fields. */
1759 #define compare_values(X) \
1760 do { \
1761 if (X(t1) != X(t2)) \
1762 return false; \
1763 } while (0)
1764
1765 compare_values (TREE_CODE);
1766 code = TREE_CODE (t1);
1767
1768 if (!TYPE_P (t1))
1769 {
1770 compare_values (TREE_SIDE_EFFECTS);
1771 compare_values (TREE_CONSTANT);
1772 compare_values (TREE_READONLY);
1773 compare_values (TREE_PUBLIC);
1774 }
1775 compare_values (TREE_ADDRESSABLE);
1776 compare_values (TREE_THIS_VOLATILE);
1777 if (DECL_P (t1))
1778 compare_values (DECL_UNSIGNED);
1779 else if (TYPE_P (t1))
1780 compare_values (TYPE_UNSIGNED);
1781 if (TYPE_P (t1))
1782 compare_values (TYPE_ARTIFICIAL);
1783 else
1784 compare_values (TREE_NO_WARNING);
1785 compare_values (TREE_NOTHROW);
1786 compare_values (TREE_STATIC);
1787 if (code != TREE_BINFO)
1788 compare_values (TREE_PRIVATE);
1789 compare_values (TREE_PROTECTED);
1790 compare_values (TREE_DEPRECATED);
1791 if (TYPE_P (t1))
1792 {
1793 compare_values (TYPE_SATURATING);
1794 compare_values (TYPE_ADDR_SPACE);
1795 }
1796 else if (code == SSA_NAME)
1797 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1798
1799 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1800 {
1801 compare_values (TREE_INT_CST_LOW);
1802 compare_values (TREE_INT_CST_HIGH);
1803 }
1804
1805 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1806 {
1807 /* ??? No suitable compare routine available. */
1808 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1809 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1810 if (r1.cl != r2.cl
1811 || r1.decimal != r2.decimal
1812 || r1.sign != r2.sign
1813 || r1.signalling != r2.signalling
1814 || r1.canonical != r2.canonical
1815 || r1.uexp != r2.uexp)
1816 return false;
1817 for (unsigned i = 0; i < SIGSZ; ++i)
1818 if (r1.sig[i] != r2.sig[i])
1819 return false;
1820 }
1821
1822 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1823 if (!fixed_compare (EQ_EXPR,
1824 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1825 return false;
1826
1827
1828 /* We don't want to compare locations, so there is nothing do compare
1829 for TS_DECL_MINIMAL. */
1830
1831 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1832 {
1833 compare_values (DECL_MODE);
1834 compare_values (DECL_NONLOCAL);
1835 compare_values (DECL_VIRTUAL_P);
1836 compare_values (DECL_IGNORED_P);
1837 compare_values (DECL_ABSTRACT);
1838 compare_values (DECL_ARTIFICIAL);
1839 compare_values (DECL_USER_ALIGN);
1840 compare_values (DECL_PRESERVE_P);
1841 compare_values (DECL_EXTERNAL);
1842 compare_values (DECL_GIMPLE_REG_P);
1843 compare_values (DECL_ALIGN);
1844 if (code == LABEL_DECL)
1845 {
1846 compare_values (EH_LANDING_PAD_NR);
1847 compare_values (LABEL_DECL_UID);
1848 }
1849 else if (code == FIELD_DECL)
1850 {
1851 compare_values (DECL_PACKED);
1852 compare_values (DECL_NONADDRESSABLE_P);
1853 compare_values (DECL_OFFSET_ALIGN);
1854 }
1855 else if (code == VAR_DECL)
1856 {
1857 compare_values (DECL_HAS_DEBUG_EXPR_P);
1858 compare_values (DECL_NONLOCAL_FRAME);
1859 }
1860 if (code == RESULT_DECL
1861 || code == PARM_DECL
1862 || code == VAR_DECL)
1863 {
1864 compare_values (DECL_BY_REFERENCE);
1865 if (code == VAR_DECL
1866 || code == PARM_DECL)
1867 compare_values (DECL_HAS_VALUE_EXPR_P);
1868 }
1869 }
1870
1871 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1872 compare_values (DECL_REGISTER);
1873
1874 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1875 {
1876 compare_values (DECL_COMMON);
1877 compare_values (DECL_DLLIMPORT_P);
1878 compare_values (DECL_WEAK);
1879 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1880 compare_values (DECL_COMDAT);
1881 compare_values (DECL_VISIBILITY);
1882 compare_values (DECL_VISIBILITY_SPECIFIED);
1883 if (code == VAR_DECL)
1884 {
1885 compare_values (DECL_HARD_REGISTER);
1886 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1887 compare_values (DECL_IN_CONSTANT_POOL);
1888 compare_values (DECL_TLS_MODEL);
1889 }
1890 if (VAR_OR_FUNCTION_DECL_P (t1))
1891 compare_values (DECL_INIT_PRIORITY);
1892 }
1893
1894 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1895 {
1896 compare_values (DECL_BUILT_IN_CLASS);
1897 compare_values (DECL_STATIC_CONSTRUCTOR);
1898 compare_values (DECL_STATIC_DESTRUCTOR);
1899 compare_values (DECL_UNINLINABLE);
1900 compare_values (DECL_POSSIBLY_INLINED);
1901 compare_values (DECL_IS_NOVOPS);
1902 compare_values (DECL_IS_RETURNS_TWICE);
1903 compare_values (DECL_IS_MALLOC);
1904 compare_values (DECL_IS_OPERATOR_NEW);
1905 compare_values (DECL_DECLARED_INLINE_P);
1906 compare_values (DECL_STATIC_CHAIN);
1907 compare_values (DECL_NO_INLINE_WARNING_P);
1908 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1909 compare_values (DECL_NO_LIMIT_STACK);
1910 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1911 compare_values (DECL_PURE_P);
1912 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1913 compare_values (DECL_FINAL_P);
1914 compare_values (DECL_CXX_CONSTRUCTOR_P);
1915 compare_values (DECL_CXX_DESTRUCTOR_P);
1916 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1917 compare_values (DECL_FUNCTION_CODE);
1918 if (DECL_STATIC_DESTRUCTOR (t1))
1919 compare_values (DECL_FINI_PRIORITY);
1920 }
1921
1922 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1923 {
1924 compare_values (TYPE_MODE);
1925 compare_values (TYPE_STRING_FLAG);
1926 compare_values (TYPE_NO_FORCE_BLK);
1927 compare_values (TYPE_NEEDS_CONSTRUCTING);
1928 if (RECORD_OR_UNION_TYPE_P (t1))
1929 {
1930 compare_values (TYPE_TRANSPARENT_AGGR);
1931 compare_values (TYPE_FINAL_P);
1932 }
1933 else if (code == ARRAY_TYPE)
1934 compare_values (TYPE_NONALIASED_COMPONENT);
1935 compare_values (TYPE_PACKED);
1936 compare_values (TYPE_RESTRICT);
1937 compare_values (TYPE_USER_ALIGN);
1938 compare_values (TYPE_READONLY);
1939 compare_values (TYPE_PRECISION);
1940 compare_values (TYPE_ALIGN);
1941 compare_values (TYPE_ALIAS_SET);
1942 }
1943
1944 /* We don't want to compare locations, so there is nothing do compare
1945 for TS_EXP. */
1946
1947 /* BLOCKs are function local and we don't merge anything there, so
1948 simply refuse to merge. */
1949 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1950 return false;
1951
1952 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1953 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1954 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1955 return false;
1956
1957 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1958 if (memcmp (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2),
1959 sizeof (struct cl_target_option)) != 0)
1960 return false;
1961
1962 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1963 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1964 sizeof (struct cl_optimization)) != 0)
1965 return false;
1966
1967 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1968 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1969 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1970 return false;
1971
1972 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1973 compare_values (CONSTRUCTOR_NELTS);
1974
1975 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1976 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1977 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1978 IDENTIFIER_LENGTH (t1)) != 0)
1979 return false;
1980
1981 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1982 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1983 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1984 TREE_STRING_LENGTH (t1)) != 0)
1985 return false;
1986
1987 #undef compare_values
1988
1989
1990 /* Compare pointer fields. */
1991
1992 /* Recurse. Search & Replaced from DFS_write_tree_body.
1993 Folding the early checks into the compare_tree_edges recursion
1994 macro makes debugging way quicker as you are able to break on
1995 compare_tree_sccs_1 and simply finish until a call returns false
1996 to spot the SCC members with the difference. */
1997 #define compare_tree_edges(E1, E2) \
1998 do { \
1999 tree t1_ = (E1), t2_ = (E2); \
2000 if (t1_ != t2_ \
2001 && (!t1_ || !t2_ \
2002 || !TREE_VISITED (t2_) \
2003 || (!TREE_ASM_WRITTEN (t2_) \
2004 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
2005 return false; \
2006 /* Only non-NULL trees outside of the SCC may compare equal. */ \
2007 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
2008 } while (0)
2009
2010 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
2011 {
2012 if (code != IDENTIFIER_NODE)
2013 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
2014 }
2015
2016 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
2017 {
2018 unsigned i;
2019 /* Note that the number of elements for EXPR has already been emitted
2020 in EXPR's header (see streamer_write_tree_header). */
2021 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
2022 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
2023 }
2024
2025 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
2026 {
2027 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
2028 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
2029 }
2030
2031 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
2032 {
2033 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
2034 /* ??? Global decls from different TUs have non-matching
2035 TRANSLATION_UNIT_DECLs. Only consider a small set of
2036 decls equivalent, we should not end up merging others. */
2037 if ((code == TYPE_DECL
2038 || code == NAMESPACE_DECL
2039 || code == IMPORTED_DECL
2040 || code == CONST_DECL
2041 || (VAR_OR_FUNCTION_DECL_P (t1)
2042 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
2043 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
2044 ;
2045 else
2046 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
2047 }
2048
2049 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2050 {
2051 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
2052 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
2053 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
2054 if ((code == VAR_DECL
2055 || code == PARM_DECL)
2056 && DECL_HAS_VALUE_EXPR_P (t1))
2057 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
2058 if (code == VAR_DECL
2059 && DECL_HAS_DEBUG_EXPR_P (t1))
2060 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
2061 /* LTO specific edges. */
2062 if (code != FUNCTION_DECL
2063 && code != TRANSLATION_UNIT_DECL)
2064 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
2065 }
2066
2067 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2068 {
2069 if (code == FUNCTION_DECL)
2070 {
2071 tree a1, a2;
2072 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
2073 a1 || a2;
2074 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
2075 compare_tree_edges (a1, a2);
2076 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
2077 }
2078 else if (code == TYPE_DECL)
2079 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
2080 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
2081 }
2082
2083 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2084 {
2085 /* Make sure we don't inadvertently set the assembler name. */
2086 if (DECL_ASSEMBLER_NAME_SET_P (t1))
2087 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
2088 DECL_ASSEMBLER_NAME (t2));
2089 compare_tree_edges (DECL_SECTION_NAME (t1), DECL_SECTION_NAME (t2));
2090 compare_tree_edges (DECL_COMDAT_GROUP (t1), DECL_COMDAT_GROUP (t2));
2091 }
2092
2093 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2094 {
2095 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
2096 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
2097 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
2098 DECL_BIT_FIELD_REPRESENTATIVE (t2));
2099 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
2100 DECL_FIELD_BIT_OFFSET (t2));
2101 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
2102 }
2103
2104 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2105 {
2106 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
2107 DECL_FUNCTION_PERSONALITY (t2));
2108 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
2109 DECL_FUNCTION_SPECIFIC_TARGET (t2));
2110 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
2111 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
2112 }
2113
2114 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
2115 {
2116 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
2117 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
2118 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
2119 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
2120 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
2121 reconstructed during fixup. */
2122 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
2123 during fixup. */
2124 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
2125 /* ??? Global types from different TUs have non-matching
2126 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
2127 equal. */
2128 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
2129 ;
2130 else
2131 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
2132 /* TYPE_CANONICAL is re-computed during type merging, so do not
2133 compare it here. */
2134 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
2135 }
2136
2137 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
2138 {
2139 if (code == ENUMERAL_TYPE)
2140 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
2141 else if (code == ARRAY_TYPE)
2142 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
2143 else if (RECORD_OR_UNION_TYPE_P (t1))
2144 {
2145 tree f1, f2;
2146 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
2147 f1 || f2;
2148 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
2149 compare_tree_edges (f1, f2);
2150 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
2151 }
2152 else if (code == FUNCTION_TYPE
2153 || code == METHOD_TYPE)
2154 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
2155 if (!POINTER_TYPE_P (t1))
2156 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
2157 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
2158 }
2159
2160 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
2161 {
2162 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2163 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
2164 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
2165 }
2166
2167 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
2168 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
2169 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
2170
2171 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
2172 {
2173 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
2174 compare_tree_edges (TREE_OPERAND (t1, i),
2175 TREE_OPERAND (t2, i));
2176
2177 /* BLOCKs are function local and we don't merge anything there. */
2178 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
2179 return false;
2180 }
2181
2182 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
2183 {
2184 unsigned i;
2185 tree t;
2186 /* Lengths have already been compared above. */
2187 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
2188 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
2189 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
2190 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
2191 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
2192 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
2193 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
2194 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
2195 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
2196 }
2197
2198 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
2199 {
2200 unsigned i;
2201 tree index, value;
2202 /* Lengths have already been compared above. */
2203 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
2204 {
2205 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
2206 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
2207 }
2208 }
2209
2210 #undef compare_tree_edges
2211
2212 return true;
2213 }
2214
2215 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
2216 out MAP if they are equal. */
2217
2218 static bool
2219 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
2220 tree *map)
2221 {
2222 /* Assume SCC entry hashes are sorted after their cardinality. Which
2223 means we can simply take the first n-tuple of equal hashes
2224 (which is recorded as entry_len) and do n SCC entry candidate
2225 comparisons. */
2226 for (unsigned i = 0; i < pscc->entry_len; ++i)
2227 {
2228 tree *mapp = map;
2229 num_scc_compare_collisions++;
2230 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
2231 {
2232 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
2233 on the scc as all trees will be freed. */
2234 return true;
2235 }
2236 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
2237 the SCC prevails. */
2238 for (unsigned j = 0; j < scc->len; ++j)
2239 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
2240 }
2241
2242 return false;
2243 }
2244
2245 /* QSort sort function to sort a map of two pointers after the 2nd
2246 pointer. */
2247
2248 static int
2249 cmp_tree (const void *p1_, const void *p2_)
2250 {
2251 tree *p1 = (tree *)(const_cast<void *>(p1_));
2252 tree *p2 = (tree *)(const_cast<void *>(p2_));
2253 if (p1[1] == p2[1])
2254 return 0;
2255 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
2256 }
2257
2258 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
2259 hash value SCC_HASH with an already recorded SCC. Return true if
2260 that was successful, otherwise return false. */
2261
2262 static bool
2263 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
2264 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
2265 {
2266 bool unified_p = false;
2267 tree_scc *scc
2268 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
2269 scc->next = NULL;
2270 scc->hash = scc_hash;
2271 scc->len = len;
2272 scc->entry_len = scc_entry_len;
2273 for (unsigned i = 0; i < len; ++i)
2274 {
2275 tree t = streamer_tree_cache_get_tree (cache, from + i);
2276 scc->entries[i] = t;
2277 /* Do not merge SCCs with local entities inside them. Also do
2278 not merge TRANSLATION_UNIT_DECLs. */
2279 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
2280 || (VAR_OR_FUNCTION_DECL_P (t)
2281 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2282 || TREE_CODE (t) == LABEL_DECL)
2283 {
2284 /* Avoid doing any work for these cases and do not worry to
2285 record the SCCs for further merging. */
2286 return false;
2287 }
2288 }
2289
2290 /* Look for the list of candidate SCCs to compare against. */
2291 tree_scc **slot;
2292 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
2293 if (*slot)
2294 {
2295 /* Try unifying against each candidate. */
2296 num_scc_compares++;
2297
2298 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
2299 outside of the scc when following tree edges. Make sure
2300 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
2301 to track whether we visited the SCC member during the compare.
2302 We cannot use TREE_VISITED on the pscc members as the extended
2303 scc and pscc can overlap. */
2304 for (unsigned i = 0; i < scc->len; ++i)
2305 {
2306 TREE_VISITED (scc->entries[i]) = 1;
2307 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
2308 }
2309
2310 tree *map = XALLOCAVEC (tree, 2 * len);
2311 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
2312 {
2313 if (!compare_tree_sccs (pscc, scc, map))
2314 continue;
2315
2316 /* Found an equal SCC. */
2317 unified_p = true;
2318 num_scc_compare_collisions--;
2319 num_sccs_merged++;
2320 total_scc_size_merged += len;
2321
2322 #ifdef ENABLE_CHECKING
2323 for (unsigned i = 0; i < len; ++i)
2324 {
2325 tree t = map[2*i+1];
2326 enum tree_code code = TREE_CODE (t);
2327 /* IDENTIFIER_NODEs should be singletons and are merged by the
2328 streamer. The others should be singletons, too, and we
2329 should not merge them in any way. */
2330 gcc_assert (code != TRANSLATION_UNIT_DECL
2331 && code != IDENTIFIER_NODE
2332 && !streamer_handle_as_builtin_p (t));
2333 }
2334 #endif
2335
2336 /* Fixup the streamer cache with the prevailing nodes according
2337 to the tree node mapping computed by compare_tree_sccs. */
2338 if (len == 1)
2339 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
2340 else
2341 {
2342 tree *map2 = XALLOCAVEC (tree, 2 * len);
2343 for (unsigned i = 0; i < len; ++i)
2344 {
2345 map2[i*2] = (tree)(uintptr_t)(from + i);
2346 map2[i*2+1] = scc->entries[i];
2347 }
2348 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
2349 qsort (map, len, 2 * sizeof (tree), cmp_tree);
2350 for (unsigned i = 0; i < len; ++i)
2351 streamer_tree_cache_replace_tree (cache, map[2*i],
2352 (uintptr_t)map2[2*i]);
2353 }
2354
2355 /* Free the tree nodes from the read SCC. */
2356 for (unsigned i = 0; i < len; ++i)
2357 {
2358 if (TYPE_P (scc->entries[i]))
2359 num_merged_types++;
2360 ggc_free (scc->entries[i]);
2361 }
2362
2363 break;
2364 }
2365
2366 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
2367 if (!unified_p)
2368 for (unsigned i = 0; i < scc->len; ++i)
2369 TREE_VISITED (scc->entries[i]) = 0;
2370 }
2371
2372 /* If we didn't unify it to any candidate duplicate the relevant
2373 pieces to permanent storage and link it into the chain. */
2374 if (!unified_p)
2375 {
2376 tree_scc *pscc
2377 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
2378 memcpy (pscc, scc, sizeof (tree_scc));
2379 pscc->next = (*slot);
2380 *slot = pscc;
2381 }
2382 return unified_p;
2383 }
2384
2385
2386 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
2387 RESOLUTIONS is the set of symbols picked by the linker (read from the
2388 resolution file when the linker plugin is being used). */
2389
2390 static void
2391 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
2392 vec<ld_plugin_symbol_resolution_t> resolutions)
2393 {
2394 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
2395 const int decl_offset = sizeof (struct lto_decl_header);
2396 const int main_offset = decl_offset + header->decl_state_size;
2397 const int string_offset = main_offset + header->main_size;
2398 struct lto_input_block ib_main;
2399 struct data_in *data_in;
2400 unsigned int i;
2401 const uint32_t *data_ptr, *data_end;
2402 uint32_t num_decl_states;
2403
2404 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
2405 header->main_size);
2406
2407 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
2408 header->string_size, resolutions);
2409
2410 /* We do not uniquify the pre-loaded cache entries, those are middle-end
2411 internal types that should not be merged. */
2412
2413 /* Read the global declarations and types. */
2414 while (ib_main.p < ib_main.len)
2415 {
2416 tree t;
2417 unsigned from = data_in->reader_cache->nodes.length ();
2418 /* Read and uniquify SCCs as in the input stream. */
2419 enum LTO_tags tag = streamer_read_record_start (&ib_main);
2420 if (tag == LTO_tree_scc)
2421 {
2422 unsigned len_;
2423 unsigned scc_entry_len;
2424 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
2425 &scc_entry_len);
2426 unsigned len = data_in->reader_cache->nodes.length () - from;
2427 gcc_assert (len == len_);
2428
2429 total_scc_size += len;
2430 num_sccs_read++;
2431
2432 /* We have the special case of size-1 SCCs that are pre-merged
2433 by means of identifier and string sharing for example.
2434 ??? Maybe we should avoid streaming those as SCCs. */
2435 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
2436 from);
2437 if (len == 1
2438 && (TREE_CODE (first) == IDENTIFIER_NODE
2439 || TREE_CODE (first) == INTEGER_CST
2440 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
2441 || streamer_handle_as_builtin_p (first)))
2442 continue;
2443
2444 /* Try to unify the SCC with already existing ones. */
2445 if (!flag_ltrans
2446 && unify_scc (data_in->reader_cache, from,
2447 len, scc_entry_len, scc_hash))
2448 continue;
2449
2450 /* Do remaining fixup tasks for prevailing nodes. */
2451 bool seen_type = false;
2452 bool not_merged_type_same_scc = false;
2453 bool not_merged_type_not_same_scc = false;
2454 for (unsigned i = 0; i < len; ++i)
2455 {
2456 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
2457 from + i);
2458 /* For statistics, see if the old code would have merged
2459 the type. */
2460 if (TYPE_P (t)
2461 && (flag_lto_report || (flag_wpa && flag_lto_report_wpa)))
2462 {
2463 tree newt = gimple_register_type (t);
2464 if (newt != t)
2465 {
2466 num_not_merged_types++;
2467 unsigned j;
2468 /* Check if we can never merge the types because
2469 they are in the same SCC and thus the old
2470 code was broken. */
2471 for (j = 0; j < len; ++j)
2472 if (i != j
2473 && streamer_tree_cache_get_tree
2474 (data_in->reader_cache, from + j) == newt)
2475 {
2476 num_not_merged_types_in_same_scc++;
2477 not_merged_type_same_scc = true;
2478 break;
2479 }
2480 if (j == len)
2481 not_merged_type_not_same_scc = true;
2482 }
2483 }
2484 /* Reconstruct the type variant and pointer-to/reference-to
2485 chains. */
2486 if (TYPE_P (t))
2487 {
2488 seen_type = true;
2489 num_prevailing_types++;
2490 lto_fixup_prevailing_type (t);
2491 }
2492 /* Compute the canonical type of all types.
2493 ??? Should be able to assert that !TYPE_CANONICAL. */
2494 if (TYPE_P (t) && !TYPE_CANONICAL (t))
2495 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
2496 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
2497 type which is also member of this SCC. */
2498 if (TREE_CODE (t) == INTEGER_CST
2499 && !TREE_OVERFLOW (t))
2500 cache_integer_cst (t);
2501 /* Register TYPE_DECLs with the debuginfo machinery. */
2502 if (!flag_wpa
2503 && TREE_CODE (t) == TYPE_DECL)
2504 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
2505 if (!flag_ltrans)
2506 {
2507 /* Register variables and functions with the
2508 symbol table. */
2509 if (TREE_CODE (t) == VAR_DECL)
2510 lto_register_var_decl_in_symtab (data_in, t, from + i);
2511 else if (TREE_CODE (t) == FUNCTION_DECL
2512 && !DECL_BUILT_IN (t))
2513 lto_register_function_decl_in_symtab (data_in, t, from + i);
2514 /* Scan the tree for references to global functions or
2515 variables and record those for later fixup. */
2516 if (mentions_vars_p (t))
2517 vec_safe_push (tree_with_vars, t);
2518 }
2519 }
2520 if (not_merged_type_same_scc)
2521 {
2522 num_not_merged_types_in_same_scc_trees += len;
2523 num_not_merged_types_trees += len;
2524 }
2525 else if (not_merged_type_not_same_scc)
2526 num_not_merged_types_trees += len;
2527 if (seen_type)
2528 num_type_scc_trees += len;
2529 }
2530 else
2531 {
2532 /* Pickle stray references. */
2533 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
2534 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
2535 }
2536 }
2537
2538 /* Read in lto_in_decl_state objects. */
2539 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
2540 data_end =
2541 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
2542 num_decl_states = *data_ptr++;
2543
2544 gcc_assert (num_decl_states > 0);
2545 decl_data->global_decl_state = lto_new_in_decl_state ();
2546 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2547 decl_data->global_decl_state);
2548
2549 /* Read in per-function decl states and enter them in hash table. */
2550 decl_data->function_decl_states =
2551 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
2552
2553 for (i = 1; i < num_decl_states; i++)
2554 {
2555 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2556 void **slot;
2557
2558 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2559 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
2560 gcc_assert (*slot == NULL);
2561 *slot = state;
2562 }
2563
2564 if (data_ptr != data_end)
2565 internal_error ("bytecode stream: garbage at the end of symbols section");
2566
2567 /* Set the current decl state to be the global state. */
2568 decl_data->current_decl_state = decl_data->global_decl_state;
2569
2570 lto_data_in_delete (data_in);
2571 }
2572
2573 /* Custom version of strtoll, which is not portable. */
2574
2575 static HOST_WIDEST_INT
2576 lto_parse_hex (const char *p)
2577 {
2578 HOST_WIDEST_INT ret = 0;
2579
2580 for (; *p != '\0'; ++p)
2581 {
2582 char c = *p;
2583 unsigned char part;
2584 ret <<= 4;
2585 if (c >= '0' && c <= '9')
2586 part = c - '0';
2587 else if (c >= 'a' && c <= 'f')
2588 part = c - 'a' + 10;
2589 else if (c >= 'A' && c <= 'F')
2590 part = c - 'A' + 10;
2591 else
2592 internal_error ("could not parse hex number");
2593 ret |= part;
2594 }
2595
2596 return ret;
2597 }
2598
2599 /* Read resolution for file named FILE_NAME. The resolution is read from
2600 RESOLUTION. */
2601
2602 static void
2603 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2604 {
2605 /* We require that objects in the resolution file are in the same
2606 order as the lto1 command line. */
2607 unsigned int name_len;
2608 char *obj_name;
2609 unsigned int num_symbols;
2610 unsigned int i;
2611 struct lto_file_decl_data *file_data;
2612 splay_tree_node nd = NULL;
2613
2614 if (!resolution)
2615 return;
2616
2617 name_len = strlen (file->filename);
2618 obj_name = XNEWVEC (char, name_len + 1);
2619 fscanf (resolution, " "); /* Read white space. */
2620
2621 fread (obj_name, sizeof (char), name_len, resolution);
2622 obj_name[name_len] = '\0';
2623 if (filename_cmp (obj_name, file->filename) != 0)
2624 internal_error ("unexpected file name %s in linker resolution file. "
2625 "Expected %s", obj_name, file->filename);
2626 if (file->offset != 0)
2627 {
2628 int t;
2629 char offset_p[17];
2630 HOST_WIDEST_INT offset;
2631 t = fscanf (resolution, "@0x%16s", offset_p);
2632 if (t != 1)
2633 internal_error ("could not parse file offset");
2634 offset = lto_parse_hex (offset_p);
2635 if (offset != file->offset)
2636 internal_error ("unexpected offset");
2637 }
2638
2639 free (obj_name);
2640
2641 fscanf (resolution, "%u", &num_symbols);
2642
2643 for (i = 0; i < num_symbols; i++)
2644 {
2645 int t;
2646 unsigned index;
2647 unsigned HOST_WIDE_INT id;
2648 char r_str[27];
2649 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2650 unsigned int j;
2651 unsigned int lto_resolution_str_len =
2652 sizeof (lto_resolution_str) / sizeof (char *);
2653 res_pair rp;
2654
2655 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2656 &index, &id, r_str);
2657 if (t != 3)
2658 internal_error ("invalid line in the resolution file");
2659
2660 for (j = 0; j < lto_resolution_str_len; j++)
2661 {
2662 if (strcmp (lto_resolution_str[j], r_str) == 0)
2663 {
2664 r = (enum ld_plugin_symbol_resolution) j;
2665 break;
2666 }
2667 }
2668 if (j == lto_resolution_str_len)
2669 internal_error ("invalid resolution in the resolution file");
2670
2671 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2672 {
2673 nd = lto_splay_tree_lookup (file_ids, id);
2674 if (nd == NULL)
2675 internal_error ("resolution sub id %wx not in object file", id);
2676 }
2677
2678 file_data = (struct lto_file_decl_data *)nd->value;
2679 /* The indexes are very sparse. To save memory save them in a compact
2680 format that is only unpacked later when the subfile is processed. */
2681 rp.res = r;
2682 rp.index = index;
2683 file_data->respairs.safe_push (rp);
2684 if (file_data->max_index < index)
2685 file_data->max_index = index;
2686 }
2687 }
2688
2689 /* List of file_decl_datas */
2690 struct file_data_list
2691 {
2692 struct lto_file_decl_data *first, *last;
2693 };
2694
2695 /* Is the name for a id'ed LTO section? */
2696
2697 static int
2698 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2699 {
2700 const char *s;
2701
2702 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2703 return 0;
2704 s = strrchr (name, '.');
2705 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2706 }
2707
2708 /* Create file_data of each sub file id */
2709
2710 static int
2711 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2712 struct file_data_list *list)
2713 {
2714 struct lto_section_slot s_slot, *new_slot;
2715 unsigned HOST_WIDE_INT id;
2716 splay_tree_node nd;
2717 void **hash_slot;
2718 char *new_name;
2719 struct lto_file_decl_data *file_data;
2720
2721 if (!lto_section_with_id (ls->name, &id))
2722 return 1;
2723
2724 /* Find hash table of sub module id */
2725 nd = lto_splay_tree_lookup (file_ids, id);
2726 if (nd != NULL)
2727 {
2728 file_data = (struct lto_file_decl_data *)nd->value;
2729 }
2730 else
2731 {
2732 file_data = ggc_alloc_lto_file_decl_data ();
2733 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2734 file_data->id = id;
2735 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2736 lto_splay_tree_insert (file_ids, id, file_data);
2737
2738 /* Maintain list in linker order */
2739 if (!list->first)
2740 list->first = file_data;
2741 if (list->last)
2742 list->last->next = file_data;
2743 list->last = file_data;
2744 }
2745
2746 /* Copy section into sub module hash table */
2747 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2748 s_slot.name = new_name;
2749 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2750 gcc_assert (*hash_slot == NULL);
2751
2752 new_slot = XDUP (struct lto_section_slot, ls);
2753 new_slot->name = new_name;
2754 *hash_slot = new_slot;
2755 return 1;
2756 }
2757
2758 /* Read declarations and other initializations for a FILE_DATA. */
2759
2760 static void
2761 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2762 {
2763 const char *data;
2764 size_t len;
2765 vec<ld_plugin_symbol_resolution_t>
2766 resolutions = vNULL;
2767 int i;
2768 res_pair *rp;
2769
2770 /* Create vector for fast access of resolution. We do this lazily
2771 to save memory. */
2772 resolutions.safe_grow_cleared (file_data->max_index + 1);
2773 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2774 resolutions[rp->index] = rp->res;
2775 file_data->respairs.release ();
2776
2777 file_data->renaming_hash_table = lto_create_renaming_table ();
2778 file_data->file_name = file->filename;
2779 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2780 if (data == NULL)
2781 {
2782 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2783 return;
2784 }
2785 /* Frees resolutions */
2786 lto_read_decls (file_data, data, resolutions);
2787 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2788 }
2789
2790 /* Finalize FILE_DATA in FILE and increase COUNT. */
2791
2792 static int
2793 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2794 int *count)
2795 {
2796 lto_file_finalize (file_data, file);
2797 if (cgraph_dump_file)
2798 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2799 file_data->file_name, file_data->id);
2800 (*count)++;
2801 return 0;
2802 }
2803
2804 /* Generate a TREE representation for all types and external decls
2805 entities in FILE.
2806
2807 Read all of the globals out of the file. Then read the cgraph
2808 and process the .o index into the cgraph nodes so that it can open
2809 the .o file to load the functions and ipa information. */
2810
2811 static struct lto_file_decl_data *
2812 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2813 {
2814 struct lto_file_decl_data *file_data = NULL;
2815 splay_tree file_ids;
2816 htab_t section_hash_table;
2817 struct lto_section_slot *section;
2818 struct file_data_list file_list;
2819 struct lto_section_list section_list;
2820
2821 memset (&section_list, 0, sizeof (struct lto_section_list));
2822 section_hash_table = lto_obj_build_section_table (file, &section_list);
2823
2824 /* Find all sub modules in the object and put their sections into new hash
2825 tables in a splay tree. */
2826 file_ids = lto_splay_tree_new ();
2827 memset (&file_list, 0, sizeof (struct file_data_list));
2828 for (section = section_list.first; section != NULL; section = section->next)
2829 create_subid_section_table (section, file_ids, &file_list);
2830
2831 /* Add resolutions to file ids */
2832 lto_resolution_read (file_ids, resolution_file, file);
2833
2834 /* Finalize each lto file for each submodule in the merged object */
2835 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2836 lto_create_files_from_ids (file, file_data, count);
2837
2838 splay_tree_delete (file_ids);
2839 htab_delete (section_hash_table);
2840
2841 return file_list.first;
2842 }
2843
2844 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2845 #define LTO_MMAP_IO 1
2846 #endif
2847
2848 #if LTO_MMAP_IO
2849 /* Page size of machine is used for mmap and munmap calls. */
2850 static size_t page_mask;
2851 #endif
2852
2853 /* Get the section data of length LEN from FILENAME starting at
2854 OFFSET. The data segment must be freed by the caller when the
2855 caller is finished. Returns NULL if all was not well. */
2856
2857 static char *
2858 lto_read_section_data (struct lto_file_decl_data *file_data,
2859 intptr_t offset, size_t len)
2860 {
2861 char *result;
2862 static int fd = -1;
2863 static char *fd_name;
2864 #if LTO_MMAP_IO
2865 intptr_t computed_len;
2866 intptr_t computed_offset;
2867 intptr_t diff;
2868 #endif
2869
2870 /* Keep a single-entry file-descriptor cache. The last file we
2871 touched will get closed at exit.
2872 ??? Eventually we want to add a more sophisticated larger cache
2873 or rather fix function body streaming to not stream them in
2874 practically random order. */
2875 if (fd != -1
2876 && filename_cmp (fd_name, file_data->file_name) != 0)
2877 {
2878 free (fd_name);
2879 close (fd);
2880 fd = -1;
2881 }
2882 if (fd == -1)
2883 {
2884 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2885 if (fd == -1)
2886 {
2887 fatal_error ("Cannot open %s", file_data->file_name);
2888 return NULL;
2889 }
2890 fd_name = xstrdup (file_data->file_name);
2891 }
2892
2893 #if LTO_MMAP_IO
2894 if (!page_mask)
2895 {
2896 size_t page_size = sysconf (_SC_PAGE_SIZE);
2897 page_mask = ~(page_size - 1);
2898 }
2899
2900 computed_offset = offset & page_mask;
2901 diff = offset - computed_offset;
2902 computed_len = len + diff;
2903
2904 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2905 fd, computed_offset);
2906 if (result == MAP_FAILED)
2907 {
2908 fatal_error ("Cannot map %s", file_data->file_name);
2909 return NULL;
2910 }
2911
2912 return result + diff;
2913 #else
2914 result = (char *) xmalloc (len);
2915 if (lseek (fd, offset, SEEK_SET) != offset
2916 || read (fd, result, len) != (ssize_t) len)
2917 {
2918 free (result);
2919 fatal_error ("Cannot read %s", file_data->file_name);
2920 result = NULL;
2921 }
2922 #ifdef __MINGW32__
2923 /* Native windows doesn't supports delayed unlink on opened file. So
2924 we close file here again. This produces higher I/O load, but at least
2925 it prevents to have dangling file handles preventing unlink. */
2926 free (fd_name);
2927 fd_name = NULL;
2928 close (fd);
2929 fd = -1;
2930 #endif
2931 return result;
2932 #endif
2933 }
2934
2935
2936 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2937 NAME will be NULL unless the section type is for a function
2938 body. */
2939
2940 static const char *
2941 get_section_data (struct lto_file_decl_data *file_data,
2942 enum lto_section_type section_type,
2943 const char *name,
2944 size_t *len)
2945 {
2946 htab_t section_hash_table = file_data->section_hash_table;
2947 struct lto_section_slot *f_slot;
2948 struct lto_section_slot s_slot;
2949 const char *section_name = lto_get_section_name (section_type, name, file_data);
2950 char *data = NULL;
2951
2952 *len = 0;
2953 s_slot.name = section_name;
2954 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2955 if (f_slot)
2956 {
2957 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2958 *len = f_slot->len;
2959 }
2960
2961 free (CONST_CAST (char *, section_name));
2962 return data;
2963 }
2964
2965
2966 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2967 starts at OFFSET and has LEN bytes. */
2968
2969 static void
2970 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2971 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2972 const char *name ATTRIBUTE_UNUSED,
2973 const char *offset, size_t len ATTRIBUTE_UNUSED)
2974 {
2975 #if LTO_MMAP_IO
2976 intptr_t computed_len;
2977 intptr_t computed_offset;
2978 intptr_t diff;
2979 #endif
2980
2981 #if LTO_MMAP_IO
2982 computed_offset = ((intptr_t) offset) & page_mask;
2983 diff = (intptr_t) offset - computed_offset;
2984 computed_len = len + diff;
2985
2986 munmap ((caddr_t) computed_offset, computed_len);
2987 #else
2988 free (CONST_CAST(char *, offset));
2989 #endif
2990 }
2991
2992 static lto_file *current_lto_file;
2993
2994 /* Helper for qsort; compare partitions and return one with smaller size.
2995 We sort from greatest to smallest so parallel build doesn't stale on the
2996 longest compilation being executed too late. */
2997
2998 static int
2999 cmp_partitions_size (const void *a, const void *b)
3000 {
3001 const struct ltrans_partition_def *pa
3002 = *(struct ltrans_partition_def *const *)a;
3003 const struct ltrans_partition_def *pb
3004 = *(struct ltrans_partition_def *const *)b;
3005 return pb->insns - pa->insns;
3006 }
3007
3008 /* Helper for qsort; compare partitions and return one with smaller order. */
3009
3010 static int
3011 cmp_partitions_order (const void *a, const void *b)
3012 {
3013 const struct ltrans_partition_def *pa
3014 = *(struct ltrans_partition_def *const *)a;
3015 const struct ltrans_partition_def *pb
3016 = *(struct ltrans_partition_def *const *)b;
3017 int ordera = -1, orderb = -1;
3018
3019 if (lto_symtab_encoder_size (pa->encoder))
3020 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->symbol.order;
3021 if (lto_symtab_encoder_size (pb->encoder))
3022 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->symbol.order;
3023 return orderb - ordera;
3024 }
3025
3026 /* Write all output files in WPA mode and the file with the list of
3027 LTRANS units. */
3028
3029 static void
3030 lto_wpa_write_files (void)
3031 {
3032 unsigned i, n_sets;
3033 lto_file *file;
3034 ltrans_partition part;
3035 FILE *ltrans_output_list_stream;
3036 char *temp_filename;
3037 size_t blen;
3038
3039 /* Open the LTRANS output list. */
3040 if (!ltrans_output_list)
3041 fatal_error ("no LTRANS output list filename provided");
3042 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
3043 if (ltrans_output_list_stream == NULL)
3044 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
3045
3046 timevar_push (TV_WHOPR_WPA);
3047
3048 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
3049 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
3050
3051 /* Find out statics that need to be promoted
3052 to globals with hidden visibility because they are accessed from multiple
3053 partitions. */
3054 lto_promote_cross_file_statics ();
3055
3056 timevar_pop (TV_WHOPR_WPA);
3057
3058 timevar_push (TV_WHOPR_WPA_IO);
3059
3060 /* Generate a prefix for the LTRANS unit files. */
3061 blen = strlen (ltrans_output_list);
3062 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
3063 strcpy (temp_filename, ltrans_output_list);
3064 if (blen > sizeof (".out")
3065 && strcmp (temp_filename + blen - sizeof (".out") + 1,
3066 ".out") == 0)
3067 temp_filename[blen - sizeof (".out") + 1] = '\0';
3068 blen = strlen (temp_filename);
3069
3070 n_sets = ltrans_partitions.length ();
3071
3072 /* Sort partitions by size so small ones are compiled last.
3073 FIXME: Even when not reordering we may want to output one list for parallel make
3074 and other for final link command. */
3075 ltrans_partitions.qsort (flag_toplevel_reorder
3076 ? cmp_partitions_size
3077 : cmp_partitions_order);
3078 for (i = 0; i < n_sets; i++)
3079 {
3080 size_t len;
3081 ltrans_partition part = ltrans_partitions[i];
3082
3083 /* Write all the nodes in SET. */
3084 sprintf (temp_filename + blen, "%u.o", i);
3085 file = lto_obj_file_open (temp_filename, true);
3086 if (!file)
3087 fatal_error ("lto_obj_file_open() failed");
3088
3089 if (!quiet_flag)
3090 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
3091 if (cgraph_dump_file)
3092 {
3093 lto_symtab_encoder_iterator lsei;
3094
3095 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
3096 part->name, temp_filename, part->insns);
3097 fprintf (cgraph_dump_file, " Symbols in partition: ");
3098 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
3099 lsei_next_in_partition (&lsei))
3100 {
3101 symtab_node node = lsei_node (lsei);
3102 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3103 }
3104 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
3105 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
3106 lsei_next (&lsei))
3107 {
3108 symtab_node node = lsei_node (lsei);
3109 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
3110 {
3111 fprintf (cgraph_dump_file, "%s ", symtab_node_asm_name (node));
3112 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
3113 if (cnode
3114 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
3115 fprintf (cgraph_dump_file, "(body included)");
3116 else
3117 {
3118 varpool_node *vnode = dyn_cast <varpool_node> (node);
3119 if (vnode
3120 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
3121 fprintf (cgraph_dump_file, "(initializer included)");
3122 }
3123 }
3124 }
3125 fprintf (cgraph_dump_file, "\n");
3126 }
3127 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
3128
3129 lto_set_current_out_file (file);
3130
3131 ipa_write_optimization_summaries (part->encoder);
3132
3133 lto_set_current_out_file (NULL);
3134 lto_obj_file_close (file);
3135 free (file);
3136 part->encoder = NULL;
3137
3138 len = strlen (temp_filename);
3139 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
3140 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
3141 fatal_error ("writing to LTRANS output list %s: %m",
3142 ltrans_output_list);
3143 }
3144
3145 lto_stats.num_output_files += n_sets;
3146
3147 /* Close the LTRANS output list. */
3148 if (fclose (ltrans_output_list_stream))
3149 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
3150
3151 free_ltrans_partitions();
3152 free (temp_filename);
3153
3154 timevar_pop (TV_WHOPR_WPA_IO);
3155 }
3156
3157
3158 /* If TT is a variable or function decl replace it with its
3159 prevailing variant. */
3160 #define LTO_SET_PREVAIL(tt) \
3161 do {\
3162 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
3163 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
3164 { \
3165 tt = lto_symtab_prevailing_decl (tt); \
3166 fixed = true; \
3167 } \
3168 } while (0)
3169
3170 /* Ensure that TT isn't a replacable var of function decl. */
3171 #define LTO_NO_PREVAIL(tt) \
3172 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
3173
3174 /* Given a tree T replace all fields referring to variables or functions
3175 with their prevailing variant. */
3176 static void
3177 lto_fixup_prevailing_decls (tree t)
3178 {
3179 enum tree_code code = TREE_CODE (t);
3180 bool fixed = false;
3181
3182 gcc_checking_assert (code != CONSTRUCTOR && code != TREE_BINFO);
3183 LTO_NO_PREVAIL (TREE_TYPE (t));
3184 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
3185 LTO_NO_PREVAIL (TREE_CHAIN (t));
3186 if (DECL_P (t))
3187 {
3188 LTO_NO_PREVAIL (DECL_NAME (t));
3189 LTO_SET_PREVAIL (DECL_CONTEXT (t));
3190 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
3191 {
3192 LTO_SET_PREVAIL (DECL_SIZE (t));
3193 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
3194 LTO_SET_PREVAIL (DECL_INITIAL (t));
3195 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
3196 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
3197 }
3198 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
3199 {
3200 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
3201 LTO_NO_PREVAIL (DECL_SECTION_NAME (t));
3202 }
3203 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
3204 {
3205 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
3206 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
3207 LTO_NO_PREVAIL (DECL_VINDEX (t));
3208 }
3209 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
3210 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
3211 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
3212 {
3213 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
3214 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
3215 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
3216 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
3217 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
3218 }
3219 }
3220 else if (TYPE_P (t))
3221 {
3222 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
3223 LTO_SET_PREVAIL (TYPE_SIZE (t));
3224 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
3225 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
3226 LTO_NO_PREVAIL (TYPE_NAME (t));
3227
3228 LTO_SET_PREVAIL (TYPE_MINVAL (t));
3229 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
3230 LTO_NO_PREVAIL (t->type_non_common.binfo);
3231
3232 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
3233
3234 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
3235 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
3236 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
3237 }
3238 else if (EXPR_P (t))
3239 {
3240 int i;
3241 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3242 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
3243 }
3244 else
3245 {
3246 switch (code)
3247 {
3248 case TREE_LIST:
3249 LTO_SET_PREVAIL (TREE_VALUE (t));
3250 LTO_SET_PREVAIL (TREE_PURPOSE (t));
3251 LTO_NO_PREVAIL (TREE_PURPOSE (t));
3252 break;
3253 default:
3254 gcc_unreachable ();
3255 }
3256 }
3257 /* If we fixed nothing, then we missed something seen by
3258 mentions_vars_p. */
3259 gcc_checking_assert (fixed);
3260 }
3261 #undef LTO_SET_PREVAIL
3262 #undef LTO_NO_PREVAIL
3263
3264 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
3265 replaces var and function decls with the corresponding prevailing def. */
3266
3267 static void
3268 lto_fixup_state (struct lto_in_decl_state *state)
3269 {
3270 unsigned i, si;
3271 struct lto_tree_ref_table *table;
3272
3273 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
3274 we still need to walk from all DECLs to find the reachable
3275 FUNCTION_DECLs and VAR_DECLs. */
3276 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
3277 {
3278 table = &state->streams[si];
3279 for (i = 0; i < table->size; i++)
3280 {
3281 tree *tp = table->trees + i;
3282 if (VAR_OR_FUNCTION_DECL_P (*tp)
3283 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
3284 *tp = lto_symtab_prevailing_decl (*tp);
3285 }
3286 }
3287 }
3288
3289 /* A callback of htab_traverse. Just extracts a state from SLOT
3290 and calls lto_fixup_state. */
3291
3292 static int
3293 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
3294 {
3295 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
3296 lto_fixup_state (state);
3297 return 1;
3298 }
3299
3300 /* Fix the decls from all FILES. Replaces each decl with the corresponding
3301 prevailing one. */
3302
3303 static void
3304 lto_fixup_decls (struct lto_file_decl_data **files)
3305 {
3306 unsigned int i;
3307 tree t;
3308
3309 if (tree_with_vars)
3310 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
3311 lto_fixup_prevailing_decls (t);
3312
3313 for (i = 0; files[i]; i++)
3314 {
3315 struct lto_file_decl_data *file = files[i];
3316 struct lto_in_decl_state *state = file->global_decl_state;
3317 lto_fixup_state (state);
3318
3319 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
3320 }
3321 }
3322
3323 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
3324
3325 /* Turn file datas for sub files into a single array, so that they look
3326 like separate files for further passes. */
3327
3328 static void
3329 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
3330 {
3331 struct lto_file_decl_data *n, *next;
3332 int i, k;
3333
3334 lto_stats.num_input_files = count;
3335 all_file_decl_data
3336 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
3337 /* Set the hooks so that all of the ipa passes can read in their data. */
3338 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3339 for (i = 0, k = 0; i < last_file_ix; i++)
3340 {
3341 for (n = orig[i]; n != NULL; n = next)
3342 {
3343 all_file_decl_data[k++] = n;
3344 next = n->next;
3345 n->next = NULL;
3346 }
3347 }
3348 all_file_decl_data[k] = NULL;
3349 gcc_assert (k == count);
3350 }
3351
3352 /* Input file data before flattening (i.e. splitting them to subfiles to support
3353 incremental linking. */
3354 static int real_file_count;
3355 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
3356
3357 static void print_lto_report_1 (void);
3358
3359 /* Read all the symbols from the input files FNAMES. NFILES is the
3360 number of files requested in the command line. Instantiate a
3361 global call graph by aggregating all the sub-graphs found in each
3362 file. */
3363
3364 static void
3365 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
3366 {
3367 unsigned int i, last_file_ix;
3368 FILE *resolution;
3369 int count = 0;
3370 struct lto_file_decl_data **decl_data;
3371 void **res;
3372 symtab_node snode;
3373
3374 init_cgraph ();
3375
3376 timevar_push (TV_IPA_LTO_DECL_IN);
3377
3378 real_file_decl_data
3379 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
3380 real_file_count = nfiles;
3381
3382 /* Read the resolution file. */
3383 resolution = NULL;
3384 if (resolution_file_name)
3385 {
3386 int t;
3387 unsigned num_objects;
3388
3389 resolution = fopen (resolution_file_name, "r");
3390 if (resolution == NULL)
3391 fatal_error ("could not open symbol resolution file: %m");
3392
3393 t = fscanf (resolution, "%u", &num_objects);
3394 gcc_assert (t == 1);
3395
3396 /* True, since the plugin splits the archives. */
3397 gcc_assert (num_objects == nfiles);
3398 }
3399 cgraph_state = CGRAPH_LTO_STREAMING;
3400
3401 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3402 tree_int_map_eq, NULL);
3403 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3404 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
3405 gcc_obstack_init (&tree_scc_hash_obstack);
3406 tree_scc_hash.create (4096);
3407
3408 if (!quiet_flag)
3409 fprintf (stderr, "Reading object files:");
3410
3411 /* Read all of the object files specified on the command line. */
3412 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
3413 {
3414 struct lto_file_decl_data *file_data = NULL;
3415 if (!quiet_flag)
3416 {
3417 fprintf (stderr, " %s", fnames[i]);
3418 fflush (stderr);
3419 }
3420
3421 current_lto_file = lto_obj_file_open (fnames[i], false);
3422 if (!current_lto_file)
3423 break;
3424
3425 file_data = lto_file_read (current_lto_file, resolution, &count);
3426 if (!file_data)
3427 {
3428 lto_obj_file_close (current_lto_file);
3429 free (current_lto_file);
3430 current_lto_file = NULL;
3431 break;
3432 }
3433
3434 decl_data[last_file_ix++] = file_data;
3435
3436 lto_obj_file_close (current_lto_file);
3437 free (current_lto_file);
3438 current_lto_file = NULL;
3439 }
3440
3441 lto_flatten_files (decl_data, count, last_file_ix);
3442 lto_stats.num_input_files = count;
3443 ggc_free(decl_data);
3444 real_file_decl_data = NULL;
3445
3446 if (resolution_file_name)
3447 fclose (resolution);
3448
3449 /* Show the LTO report before launching LTRANS. */
3450 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3451 print_lto_report_1 ();
3452
3453 /* Free gimple type merging datastructures. */
3454 htab_delete (gimple_types);
3455 gimple_types = NULL;
3456 htab_delete (type_hash_cache);
3457 type_hash_cache = NULL;
3458 free (type_pair_cache);
3459 type_pair_cache = NULL;
3460 tree_scc_hash.dispose ();
3461 obstack_free (&tree_scc_hash_obstack, NULL);
3462 free_gimple_type_tables ();
3463 ggc_collect ();
3464
3465 /* Set the hooks so that all of the ipa passes can read in their data. */
3466 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3467
3468 timevar_pop (TV_IPA_LTO_DECL_IN);
3469
3470 if (!quiet_flag)
3471 fprintf (stderr, "\nReading the callgraph\n");
3472
3473 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3474 /* Read the symtab. */
3475 input_symtab ();
3476
3477 /* Store resolutions into the symbol table. */
3478
3479 FOR_EACH_SYMBOL (snode)
3480 if (symtab_real_symbol_p (snode)
3481 && snode->symbol.lto_file_data
3482 && snode->symbol.lto_file_data->resolution_map
3483 && (res = pointer_map_contains (snode->symbol.lto_file_data->resolution_map,
3484 snode->symbol.decl)))
3485 snode->symbol.resolution
3486 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3487 for (i = 0; all_file_decl_data[i]; i++)
3488 if (all_file_decl_data[i]->resolution_map)
3489 {
3490 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3491 all_file_decl_data[i]->resolution_map = NULL;
3492 }
3493
3494 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3495
3496 if (!quiet_flag)
3497 fprintf (stderr, "Merging declarations\n");
3498
3499 timevar_push (TV_IPA_LTO_DECL_MERGE);
3500 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3501 need to care about resolving symbols again, we only need to replace
3502 duplicated declarations read from the callgraph and from function
3503 sections. */
3504 if (!flag_ltrans)
3505 {
3506 lto_symtab_merge_decls ();
3507
3508 /* If there were errors during symbol merging bail out, we have no
3509 good way to recover here. */
3510 if (seen_error ())
3511 fatal_error ("errors during merging of translation units");
3512
3513 /* Fixup all decls. */
3514 lto_fixup_decls (all_file_decl_data);
3515 }
3516 if (tree_with_vars)
3517 ggc_free (tree_with_vars);
3518 tree_with_vars = NULL;
3519 ggc_collect ();
3520
3521 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3522 /* Each pass will set the appropriate timer. */
3523
3524 if (!quiet_flag)
3525 fprintf (stderr, "Reading summaries\n");
3526
3527 /* Read the IPA summary data. */
3528 if (flag_ltrans)
3529 ipa_read_optimization_summaries ();
3530 else
3531 ipa_read_summaries ();
3532
3533 for (i = 0; all_file_decl_data[i]; i++)
3534 {
3535 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3536 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3537 all_file_decl_data[i]->symtab_node_encoder = NULL;
3538 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3539 all_file_decl_data[i]->global_decl_state = NULL;
3540 all_file_decl_data[i]->current_decl_state = NULL;
3541 }
3542
3543 /* Finally merge the cgraph according to the decl merging decisions. */
3544 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3545 if (cgraph_dump_file)
3546 {
3547 fprintf (cgraph_dump_file, "Before merging:\n");
3548 dump_symtab (cgraph_dump_file);
3549 }
3550 lto_symtab_merge_symbols ();
3551 ggc_collect ();
3552 cgraph_state = CGRAPH_STATE_IPA_SSA;
3553
3554 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3555
3556 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3557
3558 /* Indicate that the cgraph is built and ready. */
3559 cgraph_function_flags_ready = true;
3560
3561 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3562 ggc_free (all_file_decl_data);
3563 all_file_decl_data = NULL;
3564 }
3565
3566
3567 /* Materialize all the bodies for all the nodes in the callgraph. */
3568
3569 static void
3570 materialize_cgraph (void)
3571 {
3572 tree decl;
3573 struct cgraph_node *node;
3574 unsigned i;
3575 timevar_id_t lto_timer;
3576
3577 if (!quiet_flag)
3578 fprintf (stderr,
3579 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3580
3581 /* Now that we have input the cgraph, we need to clear all of the aux
3582 nodes and read the functions if we are not running in WPA mode. */
3583 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3584
3585 FOR_EACH_FUNCTION (node)
3586 {
3587 if (node->symbol.lto_file_data)
3588 {
3589 lto_materialize_function (node);
3590 lto_stats.num_input_cgraph_nodes++;
3591 }
3592 }
3593
3594 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3595
3596 /* Start the appropriate timer depending on the mode that we are
3597 operating in. */
3598 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3599 : (flag_ltrans) ? TV_WHOPR_LTRANS
3600 : TV_LTO;
3601 timevar_push (lto_timer);
3602
3603 current_function_decl = NULL;
3604 set_cfun (NULL);
3605
3606 /* Inform the middle end about the global variables we have seen. */
3607 FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
3608 rest_of_decl_compilation (decl, 1, 0);
3609
3610 if (!quiet_flag)
3611 fprintf (stderr, "\n");
3612
3613 timevar_pop (lto_timer);
3614 }
3615
3616
3617 /* Show various memory usage statistics related to LTO. */
3618 static void
3619 print_lto_report_1 (void)
3620 {
3621 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3622 fprintf (stderr, "%s statistics\n", pfx);
3623
3624 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3625 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3626 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3627 if (flag_wpa && tree_scc_hash.is_created ())
3628 {
3629 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3630 "collision ratio: %f\n", pfx,
3631 (long) tree_scc_hash.size (),
3632 (long) tree_scc_hash.elements (),
3633 tree_scc_hash.collisions ());
3634 hash_table<tree_scc_hasher>::iterator hiter;
3635 tree_scc *scc, *max_scc = NULL;
3636 unsigned max_length = 0;
3637 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3638 {
3639 unsigned length = 0;
3640 tree_scc *s = scc;
3641 for (; s; s = s->next)
3642 length++;
3643 if (length > max_length)
3644 {
3645 max_length = length;
3646 max_scc = scc;
3647 }
3648 }
3649 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3650 pfx, max_length, max_scc->len);
3651 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3652 num_scc_compares, num_scc_compare_collisions,
3653 num_scc_compare_collisions / (double) num_scc_compares);
3654 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3655 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3656 total_scc_size_merged);
3657 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3658 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3659 pfx, num_prevailing_types, num_type_scc_trees);
3660 fprintf (stderr, "[%s] Old merging code merges an additional %lu types"
3661 " of which %lu are in the same SCC with their "
3662 "prevailing variant (%lu and %lu associated trees)\n",
3663 pfx, num_not_merged_types, num_not_merged_types_in_same_scc,
3664 num_not_merged_types_trees,
3665 num_not_merged_types_in_same_scc_trees);
3666 }
3667
3668 print_gimple_types_stats (pfx);
3669 print_lto_report (pfx);
3670 }
3671
3672 /* Perform whole program analysis (WPA) on the callgraph and write out the
3673 optimization plan. */
3674
3675 static void
3676 do_whole_program_analysis (void)
3677 {
3678 symtab_node node;
3679
3680 timevar_start (TV_PHASE_OPT_GEN);
3681
3682 /* Note that since we are in WPA mode, materialize_cgraph will not
3683 actually read in all the function bodies. It only materializes
3684 the decls and cgraph nodes so that analysis can be performed. */
3685 materialize_cgraph ();
3686
3687 /* Reading in the cgraph uses different timers, start timing WPA now. */
3688 timevar_push (TV_WHOPR_WPA);
3689
3690 if (pre_ipa_mem_report)
3691 {
3692 fprintf (stderr, "Memory consumption before IPA\n");
3693 dump_memory_report (false);
3694 }
3695
3696 cgraph_function_flags_ready = true;
3697
3698 if (cgraph_dump_file)
3699 dump_symtab (cgraph_dump_file);
3700 bitmap_obstack_initialize (NULL);
3701 cgraph_state = CGRAPH_STATE_IPA_SSA;
3702
3703 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3704 symtab_remove_unreachable_nodes (false, dump_file);
3705
3706 if (cgraph_dump_file)
3707 {
3708 fprintf (cgraph_dump_file, "Optimized ");
3709 dump_symtab (cgraph_dump_file);
3710 }
3711 #ifdef ENABLE_CHECKING
3712 verify_cgraph ();
3713 #endif
3714 bitmap_obstack_release (NULL);
3715
3716 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3717 timevar_pop (TV_WHOPR_WPA);
3718
3719 timevar_push (TV_WHOPR_PARTITIONING);
3720 if (flag_lto_partition_1to1)
3721 lto_1_to_1_map ();
3722 else if (flag_lto_partition_max)
3723 lto_max_map ();
3724 else
3725 lto_balanced_map ();
3726
3727 /* AUX pointers are used by partitioning code to bookkeep number of
3728 partitions symbol is in. This is no longer needed. */
3729 FOR_EACH_SYMBOL (node)
3730 node->symbol.aux = NULL;
3731
3732 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3733 timevar_pop (TV_WHOPR_PARTITIONING);
3734
3735 timevar_stop (TV_PHASE_OPT_GEN);
3736 timevar_start (TV_PHASE_STREAM_OUT);
3737
3738 if (!quiet_flag)
3739 {
3740 fprintf (stderr, "\nStreaming out");
3741 fflush (stderr);
3742 }
3743 lto_wpa_write_files ();
3744 if (!quiet_flag)
3745 fprintf (stderr, "\n");
3746
3747 timevar_stop (TV_PHASE_STREAM_OUT);
3748
3749 ggc_collect ();
3750 if (post_ipa_mem_report)
3751 {
3752 fprintf (stderr, "Memory consumption after IPA\n");
3753 dump_memory_report (false);
3754 }
3755
3756 /* Show the LTO report before launching LTRANS. */
3757 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3758 print_lto_report_1 ();
3759 if (mem_report_wpa)
3760 dump_memory_report (true);
3761 }
3762
3763
3764 static GTY(()) tree lto_eh_personality_decl;
3765
3766 /* Return the LTO personality function decl. */
3767
3768 tree
3769 lto_eh_personality (void)
3770 {
3771 if (!lto_eh_personality_decl)
3772 {
3773 /* Use the first personality DECL for our personality if we don't
3774 support multiple ones. This ensures that we don't artificially
3775 create the need for them in a single-language program. */
3776 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3777 lto_eh_personality_decl = first_personality_decl;
3778 else
3779 lto_eh_personality_decl = lhd_gcc_personality ();
3780 }
3781
3782 return lto_eh_personality_decl;
3783 }
3784
3785 /* Set the process name based on the LTO mode. */
3786
3787 static void
3788 lto_process_name (void)
3789 {
3790 if (flag_lto)
3791 setproctitle ("lto1-lto");
3792 if (flag_wpa)
3793 setproctitle ("lto1-wpa");
3794 if (flag_ltrans)
3795 setproctitle ("lto1-ltrans");
3796 }
3797
3798
3799 /* Initialize the LTO front end. */
3800
3801 static void
3802 lto_init (void)
3803 {
3804 lto_process_name ();
3805 lto_streamer_hooks_init ();
3806 lto_reader_init ();
3807 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3808 memset (&lto_stats, 0, sizeof (lto_stats));
3809 bitmap_obstack_initialize (NULL);
3810 gimple_register_cfg_hooks ();
3811 }
3812
3813
3814 /* Main entry point for the GIMPLE front end. This front end has
3815 three main personalities:
3816
3817 - LTO (-flto). All the object files on the command line are
3818 loaded in memory and processed as a single translation unit.
3819 This is the traditional link-time optimization behavior.
3820
3821 - WPA (-fwpa). Only the callgraph and summary information for
3822 files in the command file are loaded. A single callgraph
3823 (without function bodies) is instantiated for the whole set of
3824 files. IPA passes are only allowed to analyze the call graph
3825 and make transformation decisions. The callgraph is
3826 partitioned, each partition is written to a new object file
3827 together with the transformation decisions.
3828
3829 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3830 summary files from running again. Since WPA computed summary
3831 information and decided what transformations to apply, LTRANS
3832 simply applies them. */
3833
3834 void
3835 lto_main (void)
3836 {
3837 /* LTO is called as a front end, even though it is not a front end.
3838 Because it is called as a front end, TV_PHASE_PARSING and
3839 TV_PARSE_GLOBAL are active, and we need to turn them off while
3840 doing LTO. Later we turn them back on so they are active up in
3841 toplev.c. */
3842 timevar_pop (TV_PARSE_GLOBAL);
3843 timevar_stop (TV_PHASE_PARSING);
3844
3845 timevar_start (TV_PHASE_SETUP);
3846
3847 /* Initialize the LTO front end. */
3848 lto_init ();
3849
3850 timevar_stop (TV_PHASE_SETUP);
3851 timevar_start (TV_PHASE_STREAM_IN);
3852
3853 /* Read all the symbols and call graph from all the files in the
3854 command line. */
3855 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3856
3857 timevar_stop (TV_PHASE_STREAM_IN);
3858
3859 if (!seen_error ())
3860 {
3861 /* If WPA is enabled analyze the whole call graph and create an
3862 optimization plan. Otherwise, read in all the function
3863 bodies and continue with optimization. */
3864 if (flag_wpa)
3865 do_whole_program_analysis ();
3866 else
3867 {
3868 struct varpool_node *vnode;
3869
3870 timevar_start (TV_PHASE_OPT_GEN);
3871
3872 materialize_cgraph ();
3873 if (!flag_ltrans)
3874 lto_promote_statics_nonwpa ();
3875
3876 /* Let the middle end know that we have read and merged all of
3877 the input files. */
3878 compile ();
3879
3880 timevar_stop (TV_PHASE_OPT_GEN);
3881
3882 /* FIXME lto, if the processes spawned by WPA fail, we miss
3883 the chance to print WPA's report, so WPA will call
3884 print_lto_report before launching LTRANS. If LTRANS was
3885 launched directly by the driver we would not need to do
3886 this. */
3887 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3888 print_lto_report_1 ();
3889
3890 /* Record the global variables. */
3891 FOR_EACH_DEFINED_VARIABLE (vnode)
3892 vec_safe_push (lto_global_var_decls, vnode->symbol.decl);
3893 }
3894 }
3895
3896 /* Here we make LTO pretend to be a parser. */
3897 timevar_start (TV_PHASE_PARSING);
3898 timevar_push (TV_PARSE_GLOBAL);
3899 }
3900
3901 #include "gt-lto-lto.h"