cgraph.h (struct symtab_node): Add field in_init_priority_hash (set_init_priority...
[gcc.git] / gcc / lto / lto.c
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2014 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "opts.h"
25 #include "toplev.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "diagnostic-core.h"
29 #include "tm.h"
30 #include "cgraph.h"
31 #include "tree-ssa-operands.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
34 #include "bitmap.h"
35 #include "ipa-prop.h"
36 #include "common.h"
37 #include "debug.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "lto.h"
44 #include "lto-tree.h"
45 #include "lto-streamer.h"
46 #include "lto-section-names.h"
47 #include "tree-streamer.h"
48 #include "splay-tree.h"
49 #include "lto-partition.h"
50 #include "data-streamer.h"
51 #include "context.h"
52 #include "pass_manager.h"
53 #include "ipa-inline.h"
54 #include "params.h"
55
56
57 /* Number of parallel tasks to run, -1 if we want to use GNU Make jobserver. */
58 static int lto_parallelism;
59
60 static GTY(()) tree first_personality_decl;
61
62 /* Returns a hash code for P. */
63
64 static hashval_t
65 hash_name (const void *p)
66 {
67 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
68 return (hashval_t) htab_hash_string (ds->name);
69 }
70
71
72 /* Returns nonzero if P1 and P2 are equal. */
73
74 static int
75 eq_name (const void *p1, const void *p2)
76 {
77 const struct lto_section_slot *s1 =
78 (const struct lto_section_slot *) p1;
79 const struct lto_section_slot *s2 =
80 (const struct lto_section_slot *) p2;
81
82 return strcmp (s1->name, s2->name) == 0;
83 }
84
85 /* Free lto_section_slot */
86
87 static void
88 free_with_string (void *arg)
89 {
90 struct lto_section_slot *s = (struct lto_section_slot *)arg;
91
92 free (CONST_CAST (char *, s->name));
93 free (arg);
94 }
95
96 /* Create section hash table */
97
98 htab_t
99 lto_obj_create_section_hash_table (void)
100 {
101 return htab_create (37, hash_name, eq_name, free_with_string);
102 }
103
104 /* Delete an allocated integer KEY in the splay tree. */
105
106 static void
107 lto_splay_tree_delete_id (splay_tree_key key)
108 {
109 free ((void *) key);
110 }
111
112 /* Compare splay tree node ids A and B. */
113
114 static int
115 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
116 {
117 unsigned HOST_WIDE_INT ai;
118 unsigned HOST_WIDE_INT bi;
119
120 ai = *(unsigned HOST_WIDE_INT *) a;
121 bi = *(unsigned HOST_WIDE_INT *) b;
122
123 if (ai < bi)
124 return -1;
125 else if (ai > bi)
126 return 1;
127 return 0;
128 }
129
130 /* Look up splay tree node by ID in splay tree T. */
131
132 static splay_tree_node
133 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
134 {
135 return splay_tree_lookup (t, (splay_tree_key) &id);
136 }
137
138 /* Check if KEY has ID. */
139
140 static bool
141 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
142 {
143 return *(unsigned HOST_WIDE_INT *) key == id;
144 }
145
146 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
147 The ID is allocated separately because we need HOST_WIDE_INTs which may
148 be wider than a splay_tree_key. */
149
150 static void
151 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
152 struct lto_file_decl_data *file_data)
153 {
154 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
155 *idp = id;
156 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
157 }
158
159 /* Create a splay tree. */
160
161 static splay_tree
162 lto_splay_tree_new (void)
163 {
164 return splay_tree_new (lto_splay_tree_compare_ids,
165 lto_splay_tree_delete_id,
166 NULL);
167 }
168
169 /* Return true when NODE has a clone that is analyzed (i.e. we need
170 to load its body even if the node itself is not needed). */
171
172 static bool
173 has_analyzed_clone_p (struct cgraph_node *node)
174 {
175 struct cgraph_node *orig = node;
176 node = node->clones;
177 if (node)
178 while (node != orig)
179 {
180 if (node->analyzed)
181 return true;
182 if (node->clones)
183 node = node->clones;
184 else if (node->next_sibling_clone)
185 node = node->next_sibling_clone;
186 else
187 {
188 while (node != orig && !node->next_sibling_clone)
189 node = node->clone_of;
190 if (node != orig)
191 node = node->next_sibling_clone;
192 }
193 }
194 return false;
195 }
196
197 /* Read the function body for the function associated with NODE. */
198
199 static void
200 lto_materialize_function (struct cgraph_node *node)
201 {
202 tree decl;
203
204 decl = node->decl;
205 /* Read in functions with body (analyzed nodes)
206 and also functions that are needed to produce virtual clones. */
207 if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
208 || node->used_as_abstract_origin
209 || has_analyzed_clone_p (node))
210 {
211 /* Clones don't need to be read. */
212 if (node->clone_of)
213 return;
214 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
215 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
216 }
217
218 /* Let the middle end know about the function. */
219 rest_of_decl_compilation (decl, 1, 0);
220 }
221
222
223 /* Decode the content of memory pointed to by DATA in the in decl
224 state object STATE. DATA_IN points to a data_in structure for
225 decoding. Return the address after the decoded object in the
226 input. */
227
228 static const uint32_t *
229 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
230 struct lto_in_decl_state *state)
231 {
232 uint32_t ix;
233 tree decl;
234 uint32_t i, j;
235
236 ix = *data++;
237 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
238 if (TREE_CODE (decl) != FUNCTION_DECL)
239 {
240 gcc_assert (decl == void_type_node);
241 decl = NULL_TREE;
242 }
243 state->fn_decl = decl;
244
245 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
246 {
247 uint32_t size = *data++;
248 tree *decls = ggc_vec_alloc<tree> (size);
249
250 for (j = 0; j < size; j++)
251 decls[j] = streamer_tree_cache_get_tree (data_in->reader_cache, data[j]);
252
253 state->streams[i].size = size;
254 state->streams[i].trees = decls;
255 data += size;
256 }
257
258 return data;
259 }
260
261
262 /* Global canonical type table. */
263 static htab_t gimple_canonical_types;
264 static pointer_map <hashval_t> *canonical_type_hash_cache;
265 static unsigned long num_canonical_type_hash_entries;
266 static unsigned long num_canonical_type_hash_queries;
267
268 static hashval_t iterative_hash_canonical_type (tree type, hashval_t val);
269 static hashval_t gimple_canonical_type_hash (const void *p);
270 static void gimple_register_canonical_type_1 (tree t, hashval_t hash);
271
272 /* Returning a hash value for gimple type TYPE.
273
274 The hash value returned is equal for types considered compatible
275 by gimple_canonical_types_compatible_p. */
276
277 static hashval_t
278 hash_canonical_type (tree type)
279 {
280 hashval_t v;
281
282 /* Combine a few common features of types so that types are grouped into
283 smaller sets; when searching for existing matching types to merge,
284 only existing types having the same features as the new type will be
285 checked. */
286 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
287 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
288
289 /* Incorporate common features of numerical types. */
290 if (INTEGRAL_TYPE_P (type)
291 || SCALAR_FLOAT_TYPE_P (type)
292 || FIXED_POINT_TYPE_P (type)
293 || TREE_CODE (type) == OFFSET_TYPE
294 || POINTER_TYPE_P (type))
295 {
296 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
297 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
298 }
299
300 if (VECTOR_TYPE_P (type))
301 {
302 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
303 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
304 }
305
306 if (TREE_CODE (type) == COMPLEX_TYPE)
307 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
308
309 /* For pointer and reference types, fold in information about the type
310 pointed to but do not recurse to the pointed-to type. */
311 if (POINTER_TYPE_P (type))
312 {
313 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
314 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
315 }
316
317 /* For integer types hash only the string flag. */
318 if (TREE_CODE (type) == INTEGER_TYPE)
319 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
320
321 /* For array types hash the domain bounds and the string flag. */
322 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
323 {
324 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
325 /* OMP lowering can introduce error_mark_node in place of
326 random local decls in types. */
327 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
328 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
329 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
330 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
331 }
332
333 /* Recurse for aggregates with a single element type. */
334 if (TREE_CODE (type) == ARRAY_TYPE
335 || TREE_CODE (type) == COMPLEX_TYPE
336 || TREE_CODE (type) == VECTOR_TYPE)
337 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
338
339 /* Incorporate function return and argument types. */
340 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
341 {
342 unsigned na;
343 tree p;
344
345 /* For method types also incorporate their parent class. */
346 if (TREE_CODE (type) == METHOD_TYPE)
347 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
348
349 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
350
351 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
352 {
353 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
354 na++;
355 }
356
357 v = iterative_hash_hashval_t (na, v);
358 }
359
360 if (RECORD_OR_UNION_TYPE_P (type))
361 {
362 unsigned nf;
363 tree f;
364
365 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
366 if (TREE_CODE (f) == FIELD_DECL)
367 {
368 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
369 nf++;
370 }
371
372 v = iterative_hash_hashval_t (nf, v);
373 }
374
375 return v;
376 }
377
378 /* Returning a hash value for gimple type TYPE combined with VAL. */
379
380 static hashval_t
381 iterative_hash_canonical_type (tree type, hashval_t val)
382 {
383 hashval_t v;
384 /* An already processed type. */
385 if (TYPE_CANONICAL (type))
386 {
387 type = TYPE_CANONICAL (type);
388 v = gimple_canonical_type_hash (type);
389 }
390 else
391 {
392 /* Canonical types should not be able to form SCCs by design, this
393 recursion is just because we do not register canonical types in
394 optimal order. To avoid quadratic behavior also register the
395 type here. */
396 v = hash_canonical_type (type);
397 gimple_register_canonical_type_1 (type, v);
398 }
399 return iterative_hash_hashval_t (v, val);
400 }
401
402 /* Returns the hash for a canonical type P. */
403
404 static hashval_t
405 gimple_canonical_type_hash (const void *p)
406 {
407 num_canonical_type_hash_queries++;
408 hashval_t *slot
409 = canonical_type_hash_cache->contains (CONST_CAST_TREE ((const_tree) p));
410 gcc_assert (slot != NULL);
411 return *slot;
412 }
413
414
415 /* The TYPE_CANONICAL merging machinery. It should closely resemble
416 the middle-end types_compatible_p function. It needs to avoid
417 claiming types are different for types that should be treated
418 the same with respect to TBAA. Canonical types are also used
419 for IL consistency checks via the useless_type_conversion_p
420 predicate which does not handle all type kinds itself but falls
421 back to pointer-comparison of TYPE_CANONICAL for aggregates
422 for example. */
423
424 /* Return true iff T1 and T2 are structurally identical for what
425 TBAA is concerned. */
426
427 static bool
428 gimple_canonical_types_compatible_p (tree t1, tree t2)
429 {
430 /* Before starting to set up the SCC machinery handle simple cases. */
431
432 /* Check first for the obvious case of pointer identity. */
433 if (t1 == t2)
434 return true;
435
436 /* Check that we have two types to compare. */
437 if (t1 == NULL_TREE || t2 == NULL_TREE)
438 return false;
439
440 /* If the types have been previously registered and found equal
441 they still are. */
442 if (TYPE_CANONICAL (t1)
443 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
444 return true;
445
446 /* Can't be the same type if the types don't have the same code. */
447 if (TREE_CODE (t1) != TREE_CODE (t2))
448 return false;
449
450 /* Qualifiers do not matter for canonical type comparison purposes. */
451
452 /* Void types and nullptr types are always the same. */
453 if (TREE_CODE (t1) == VOID_TYPE
454 || TREE_CODE (t1) == NULLPTR_TYPE)
455 return true;
456
457 /* Can't be the same type if they have different mode. */
458 if (TYPE_MODE (t1) != TYPE_MODE (t2))
459 return false;
460
461 /* Non-aggregate types can be handled cheaply. */
462 if (INTEGRAL_TYPE_P (t1)
463 || SCALAR_FLOAT_TYPE_P (t1)
464 || FIXED_POINT_TYPE_P (t1)
465 || TREE_CODE (t1) == VECTOR_TYPE
466 || TREE_CODE (t1) == COMPLEX_TYPE
467 || TREE_CODE (t1) == OFFSET_TYPE
468 || POINTER_TYPE_P (t1))
469 {
470 /* Can't be the same type if they have different sign or precision. */
471 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
472 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
473 return false;
474
475 if (TREE_CODE (t1) == INTEGER_TYPE
476 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
477 return false;
478
479 /* For canonical type comparisons we do not want to build SCCs
480 so we cannot compare pointed-to types. But we can, for now,
481 require the same pointed-to type kind and match what
482 useless_type_conversion_p would do. */
483 if (POINTER_TYPE_P (t1))
484 {
485 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
486 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
487 return false;
488
489 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
490 return false;
491 }
492
493 /* Tail-recurse to components. */
494 if (TREE_CODE (t1) == VECTOR_TYPE
495 || TREE_CODE (t1) == COMPLEX_TYPE)
496 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
497 TREE_TYPE (t2));
498
499 return true;
500 }
501
502 /* Do type-specific comparisons. */
503 switch (TREE_CODE (t1))
504 {
505 case ARRAY_TYPE:
506 /* Array types are the same if the element types are the same and
507 the number of elements are the same. */
508 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
509 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
510 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
511 return false;
512 else
513 {
514 tree i1 = TYPE_DOMAIN (t1);
515 tree i2 = TYPE_DOMAIN (t2);
516
517 /* For an incomplete external array, the type domain can be
518 NULL_TREE. Check this condition also. */
519 if (i1 == NULL_TREE && i2 == NULL_TREE)
520 return true;
521 else if (i1 == NULL_TREE || i2 == NULL_TREE)
522 return false;
523 else
524 {
525 tree min1 = TYPE_MIN_VALUE (i1);
526 tree min2 = TYPE_MIN_VALUE (i2);
527 tree max1 = TYPE_MAX_VALUE (i1);
528 tree max2 = TYPE_MAX_VALUE (i2);
529
530 /* The minimum/maximum values have to be the same. */
531 if ((min1 == min2
532 || (min1 && min2
533 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
534 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
535 || operand_equal_p (min1, min2, 0))))
536 && (max1 == max2
537 || (max1 && max2
538 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
539 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
540 || operand_equal_p (max1, max2, 0)))))
541 return true;
542 else
543 return false;
544 }
545 }
546
547 case METHOD_TYPE:
548 case FUNCTION_TYPE:
549 /* Function types are the same if the return type and arguments types
550 are the same. */
551 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
552 return false;
553
554 if (!comp_type_attributes (t1, t2))
555 return false;
556
557 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
558 return true;
559 else
560 {
561 tree parms1, parms2;
562
563 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
564 parms1 && parms2;
565 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
566 {
567 if (!gimple_canonical_types_compatible_p
568 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
569 return false;
570 }
571
572 if (parms1 || parms2)
573 return false;
574
575 return true;
576 }
577
578 case RECORD_TYPE:
579 case UNION_TYPE:
580 case QUAL_UNION_TYPE:
581 {
582 tree f1, f2;
583
584 /* For aggregate types, all the fields must be the same. */
585 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
586 f1 || f2;
587 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
588 {
589 /* Skip non-fields. */
590 while (f1 && TREE_CODE (f1) != FIELD_DECL)
591 f1 = TREE_CHAIN (f1);
592 while (f2 && TREE_CODE (f2) != FIELD_DECL)
593 f2 = TREE_CHAIN (f2);
594 if (!f1 || !f2)
595 break;
596 /* The fields must have the same name, offset and type. */
597 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
598 || !gimple_compare_field_offset (f1, f2)
599 || !gimple_canonical_types_compatible_p
600 (TREE_TYPE (f1), TREE_TYPE (f2)))
601 return false;
602 }
603
604 /* If one aggregate has more fields than the other, they
605 are not the same. */
606 if (f1 || f2)
607 return false;
608
609 return true;
610 }
611
612 default:
613 gcc_unreachable ();
614 }
615 }
616
617
618 /* Returns nonzero if P1 and P2 are equal. */
619
620 static int
621 gimple_canonical_type_eq (const void *p1, const void *p2)
622 {
623 const_tree t1 = (const_tree) p1;
624 const_tree t2 = (const_tree) p2;
625 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
626 CONST_CAST_TREE (t2));
627 }
628
629 /* Main worker for gimple_register_canonical_type. */
630
631 static void
632 gimple_register_canonical_type_1 (tree t, hashval_t hash)
633 {
634 void **slot;
635
636 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t));
637
638 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
639 if (*slot)
640 {
641 tree new_type = (tree)(*slot);
642 gcc_checking_assert (new_type != t);
643 TYPE_CANONICAL (t) = new_type;
644 }
645 else
646 {
647 TYPE_CANONICAL (t) = t;
648 *slot = (void *) t;
649 /* Cache the just computed hash value. */
650 num_canonical_type_hash_entries++;
651 bool existed_p;
652 hashval_t *hslot = canonical_type_hash_cache->insert (t, &existed_p);
653 gcc_assert (!existed_p);
654 *hslot = hash;
655 }
656 }
657
658 /* Register type T in the global type table gimple_types and set
659 TYPE_CANONICAL of T accordingly.
660 This is used by LTO to merge structurally equivalent types for
661 type-based aliasing purposes across different TUs and languages.
662
663 ??? This merging does not exactly match how the tree.c middle-end
664 functions will assign TYPE_CANONICAL when new types are created
665 during optimization (which at least happens for pointer and array
666 types). */
667
668 static void
669 gimple_register_canonical_type (tree t)
670 {
671 if (TYPE_CANONICAL (t))
672 return;
673
674 gimple_register_canonical_type_1 (t, hash_canonical_type (t));
675 }
676
677 /* Re-compute TYPE_CANONICAL for NODE and related types. */
678
679 static void
680 lto_register_canonical_types (tree node, bool first_p)
681 {
682 if (!node
683 || !TYPE_P (node))
684 return;
685
686 if (first_p)
687 TYPE_CANONICAL (node) = NULL_TREE;
688
689 if (POINTER_TYPE_P (node)
690 || TREE_CODE (node) == COMPLEX_TYPE
691 || TREE_CODE (node) == ARRAY_TYPE)
692 lto_register_canonical_types (TREE_TYPE (node), first_p);
693
694 if (!first_p)
695 gimple_register_canonical_type (node);
696 }
697
698
699 /* Remember trees that contains references to declarations. */
700 static GTY(()) vec <tree, va_gc> *tree_with_vars;
701
702 #define CHECK_VAR(tt) \
703 do \
704 { \
705 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
706 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
707 return true; \
708 } while (0)
709
710 #define CHECK_NO_VAR(tt) \
711 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
712
713 /* Check presence of pointers to decls in fields of a tree_typed T. */
714
715 static inline bool
716 mentions_vars_p_typed (tree t)
717 {
718 CHECK_NO_VAR (TREE_TYPE (t));
719 return false;
720 }
721
722 /* Check presence of pointers to decls in fields of a tree_common T. */
723
724 static inline bool
725 mentions_vars_p_common (tree t)
726 {
727 if (mentions_vars_p_typed (t))
728 return true;
729 CHECK_NO_VAR (TREE_CHAIN (t));
730 return false;
731 }
732
733 /* Check presence of pointers to decls in fields of a decl_minimal T. */
734
735 static inline bool
736 mentions_vars_p_decl_minimal (tree t)
737 {
738 if (mentions_vars_p_common (t))
739 return true;
740 CHECK_NO_VAR (DECL_NAME (t));
741 CHECK_VAR (DECL_CONTEXT (t));
742 return false;
743 }
744
745 /* Check presence of pointers to decls in fields of a decl_common T. */
746
747 static inline bool
748 mentions_vars_p_decl_common (tree t)
749 {
750 if (mentions_vars_p_decl_minimal (t))
751 return true;
752 CHECK_VAR (DECL_SIZE (t));
753 CHECK_VAR (DECL_SIZE_UNIT (t));
754 CHECK_VAR (DECL_INITIAL (t));
755 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
756 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
757 return false;
758 }
759
760 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
761
762 static inline bool
763 mentions_vars_p_decl_with_vis (tree t)
764 {
765 if (mentions_vars_p_decl_common (t))
766 return true;
767
768 /* Accessor macro has side-effects, use field-name here. */
769 CHECK_NO_VAR (t->decl_with_vis.assembler_name);
770 return false;
771 }
772
773 /* Check presence of pointers to decls in fields of a decl_non_common T. */
774
775 static inline bool
776 mentions_vars_p_decl_non_common (tree t)
777 {
778 if (mentions_vars_p_decl_with_vis (t))
779 return true;
780 CHECK_NO_VAR (DECL_ARGUMENT_FLD (t));
781 CHECK_NO_VAR (DECL_RESULT_FLD (t));
782 CHECK_NO_VAR (DECL_VINDEX (t));
783 return false;
784 }
785
786 /* Check presence of pointers to decls in fields of a decl_non_common T. */
787
788 static bool
789 mentions_vars_p_function (tree t)
790 {
791 if (mentions_vars_p_decl_non_common (t))
792 return true;
793 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
794 return false;
795 }
796
797 /* Check presence of pointers to decls in fields of a field_decl T. */
798
799 static bool
800 mentions_vars_p_field_decl (tree t)
801 {
802 if (mentions_vars_p_decl_common (t))
803 return true;
804 CHECK_VAR (DECL_FIELD_OFFSET (t));
805 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
806 CHECK_NO_VAR (DECL_QUALIFIER (t));
807 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
808 CHECK_NO_VAR (DECL_FCONTEXT (t));
809 return false;
810 }
811
812 /* Check presence of pointers to decls in fields of a type T. */
813
814 static bool
815 mentions_vars_p_type (tree t)
816 {
817 if (mentions_vars_p_common (t))
818 return true;
819 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
820 CHECK_VAR (TYPE_SIZE (t));
821 CHECK_VAR (TYPE_SIZE_UNIT (t));
822 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
823 CHECK_NO_VAR (TYPE_NAME (t));
824
825 CHECK_VAR (TYPE_MINVAL (t));
826 CHECK_VAR (TYPE_MAXVAL (t));
827
828 /* Accessor is for derived node types only. */
829 CHECK_NO_VAR (t->type_non_common.binfo);
830
831 CHECK_VAR (TYPE_CONTEXT (t));
832 CHECK_NO_VAR (TYPE_CANONICAL (t));
833 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
834 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
835 return false;
836 }
837
838 /* Check presence of pointers to decls in fields of a BINFO T. */
839
840 static bool
841 mentions_vars_p_binfo (tree t)
842 {
843 unsigned HOST_WIDE_INT i, n;
844
845 if (mentions_vars_p_common (t))
846 return true;
847 CHECK_VAR (BINFO_VTABLE (t));
848 CHECK_NO_VAR (BINFO_OFFSET (t));
849 CHECK_NO_VAR (BINFO_VIRTUALS (t));
850 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
851 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
852 for (i = 0; i < n; i++)
853 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
854 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
855 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
856 n = BINFO_N_BASE_BINFOS (t);
857 for (i = 0; i < n; i++)
858 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
859 return false;
860 }
861
862 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
863
864 static bool
865 mentions_vars_p_constructor (tree t)
866 {
867 unsigned HOST_WIDE_INT idx;
868 constructor_elt *ce;
869
870 if (mentions_vars_p_typed (t))
871 return true;
872
873 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
874 {
875 CHECK_NO_VAR (ce->index);
876 CHECK_VAR (ce->value);
877 }
878 return false;
879 }
880
881 /* Check presence of pointers to decls in fields of an expression tree T. */
882
883 static bool
884 mentions_vars_p_expr (tree t)
885 {
886 int i;
887 if (mentions_vars_p_typed (t))
888 return true;
889 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
890 CHECK_VAR (TREE_OPERAND (t, i));
891 return false;
892 }
893
894 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
895
896 static bool
897 mentions_vars_p_omp_clause (tree t)
898 {
899 int i;
900 if (mentions_vars_p_common (t))
901 return true;
902 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
903 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
904 return false;
905 }
906
907 /* Check presence of pointers to decls that needs later fixup in T. */
908
909 static bool
910 mentions_vars_p (tree t)
911 {
912 switch (TREE_CODE (t))
913 {
914 case IDENTIFIER_NODE:
915 break;
916
917 case TREE_LIST:
918 CHECK_VAR (TREE_VALUE (t));
919 CHECK_VAR (TREE_PURPOSE (t));
920 CHECK_NO_VAR (TREE_CHAIN (t));
921 break;
922
923 case FIELD_DECL:
924 return mentions_vars_p_field_decl (t);
925
926 case LABEL_DECL:
927 case CONST_DECL:
928 case PARM_DECL:
929 case RESULT_DECL:
930 case IMPORTED_DECL:
931 case NAMESPACE_DECL:
932 case NAMELIST_DECL:
933 return mentions_vars_p_decl_common (t);
934
935 case VAR_DECL:
936 return mentions_vars_p_decl_with_vis (t);
937
938 case TYPE_DECL:
939 return mentions_vars_p_decl_non_common (t);
940
941 case FUNCTION_DECL:
942 return mentions_vars_p_function (t);
943
944 case TREE_BINFO:
945 return mentions_vars_p_binfo (t);
946
947 case PLACEHOLDER_EXPR:
948 return mentions_vars_p_common (t);
949
950 case BLOCK:
951 case TRANSLATION_UNIT_DECL:
952 case OPTIMIZATION_NODE:
953 case TARGET_OPTION_NODE:
954 break;
955
956 case CONSTRUCTOR:
957 return mentions_vars_p_constructor (t);
958
959 case OMP_CLAUSE:
960 return mentions_vars_p_omp_clause (t);
961
962 default:
963 if (TYPE_P (t))
964 {
965 if (mentions_vars_p_type (t))
966 return true;
967 }
968 else if (EXPR_P (t))
969 {
970 if (mentions_vars_p_expr (t))
971 return true;
972 }
973 else if (CONSTANT_CLASS_P (t))
974 CHECK_NO_VAR (TREE_TYPE (t));
975 else
976 gcc_unreachable ();
977 }
978 return false;
979 }
980
981
982 /* Return the resolution for the decl with index INDEX from DATA_IN. */
983
984 static enum ld_plugin_symbol_resolution
985 get_resolution (struct data_in *data_in, unsigned index)
986 {
987 if (data_in->globals_resolution.exists ())
988 {
989 ld_plugin_symbol_resolution_t ret;
990 /* We can have references to not emitted functions in
991 DECL_FUNCTION_PERSONALITY at least. So we can and have
992 to indeed return LDPR_UNKNOWN in some cases. */
993 if (data_in->globals_resolution.length () <= index)
994 return LDPR_UNKNOWN;
995 ret = data_in->globals_resolution[index];
996 return ret;
997 }
998 else
999 /* Delay resolution finding until decl merging. */
1000 return LDPR_UNKNOWN;
1001 }
1002
1003 /* We need to record resolutions until symbol table is read. */
1004 static void
1005 register_resolution (struct lto_file_decl_data *file_data, tree decl,
1006 enum ld_plugin_symbol_resolution resolution)
1007 {
1008 if (resolution == LDPR_UNKNOWN)
1009 return;
1010 if (!file_data->resolution_map)
1011 file_data->resolution_map = pointer_map_create ();
1012 *pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
1013 }
1014
1015 /* Register DECL with the global symbol table and change its
1016 name if necessary to avoid name clashes for static globals across
1017 different files. */
1018
1019 static void
1020 lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl,
1021 unsigned ix)
1022 {
1023 tree context;
1024
1025 /* Variable has file scope, not local. */
1026 if (!TREE_PUBLIC (decl)
1027 && !((context = decl_function_context (decl))
1028 && auto_var_in_fn_p (decl, context)))
1029 rest_of_decl_compilation (decl, 1, 0);
1030
1031 /* If this variable has already been declared, queue the
1032 declaration for merging. */
1033 if (TREE_PUBLIC (decl))
1034 register_resolution (data_in->file_data,
1035 decl, get_resolution (data_in, ix));
1036 }
1037
1038
1039 /* Register DECL with the global symbol table and change its
1040 name if necessary to avoid name clashes for static globals across
1041 different files. DATA_IN contains descriptors and tables for the
1042 file being read. */
1043
1044 static void
1045 lto_register_function_decl_in_symtab (struct data_in *data_in, tree decl,
1046 unsigned ix)
1047 {
1048 /* If this variable has already been declared, queue the
1049 declaration for merging. */
1050 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT (decl))
1051 register_resolution (data_in->file_data,
1052 decl, get_resolution (data_in, ix));
1053 }
1054
1055
1056 /* For the type T re-materialize it in the type variant list and
1057 the pointer/reference-to chains. */
1058
1059 static void
1060 lto_fixup_prevailing_type (tree t)
1061 {
1062 /* The following re-creates proper variant lists while fixing up
1063 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1064 variant list state before fixup is broken. */
1065
1066 /* If we are not our own variant leader link us into our new leaders
1067 variant list. */
1068 if (TYPE_MAIN_VARIANT (t) != t)
1069 {
1070 tree mv = TYPE_MAIN_VARIANT (t);
1071 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1072 TYPE_NEXT_VARIANT (mv) = t;
1073 }
1074
1075 /* The following reconstructs the pointer chains
1076 of the new pointed-to type if we are a main variant. We do
1077 not stream those so they are broken before fixup. */
1078 if (TREE_CODE (t) == POINTER_TYPE
1079 && TYPE_MAIN_VARIANT (t) == t)
1080 {
1081 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1082 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1083 }
1084 else if (TREE_CODE (t) == REFERENCE_TYPE
1085 && TYPE_MAIN_VARIANT (t) == t)
1086 {
1087 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1088 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1089 }
1090 }
1091
1092
1093 /* We keep prevailing tree SCCs in a hashtable with manual collision
1094 handling (in case all hashes compare the same) and keep the colliding
1095 entries in the tree_scc->next chain. */
1096
1097 struct tree_scc
1098 {
1099 tree_scc *next;
1100 /* Hash of the whole SCC. */
1101 hashval_t hash;
1102 /* Number of trees in the SCC. */
1103 unsigned len;
1104 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1105 which share the same individual tree hash). */
1106 unsigned entry_len;
1107 /* The members of the SCC.
1108 We only need to remember the first entry node candidate for prevailing
1109 SCCs (but of course have access to all entries for SCCs we are
1110 processing).
1111 ??? For prevailing SCCs we really only need hash and the first
1112 entry candidate, but that's too awkward to implement. */
1113 tree entries[1];
1114 };
1115
1116 struct tree_scc_hasher : typed_noop_remove <tree_scc>
1117 {
1118 typedef tree_scc value_type;
1119 typedef tree_scc compare_type;
1120 static inline hashval_t hash (const value_type *);
1121 static inline bool equal (const value_type *, const compare_type *);
1122 };
1123
1124 hashval_t
1125 tree_scc_hasher::hash (const value_type *scc)
1126 {
1127 return scc->hash;
1128 }
1129
1130 bool
1131 tree_scc_hasher::equal (const value_type *scc1, const compare_type *scc2)
1132 {
1133 if (scc1->hash != scc2->hash
1134 || scc1->len != scc2->len
1135 || scc1->entry_len != scc2->entry_len)
1136 return false;
1137 return true;
1138 }
1139
1140 static hash_table <tree_scc_hasher> tree_scc_hash;
1141 static struct obstack tree_scc_hash_obstack;
1142
1143 static unsigned long num_merged_types;
1144 static unsigned long num_prevailing_types;
1145 static unsigned long num_type_scc_trees;
1146 static unsigned long total_scc_size;
1147 static unsigned long num_sccs_read;
1148 static unsigned long total_scc_size_merged;
1149 static unsigned long num_sccs_merged;
1150 static unsigned long num_scc_compares;
1151 static unsigned long num_scc_compare_collisions;
1152
1153
1154 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1155 recursing through in-SCC tree edges. Returns true if the SCCs entered
1156 through T1 and T2 are equal and fills in *MAP with the pairs of
1157 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1158
1159 static bool
1160 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1161 {
1162 enum tree_code code;
1163
1164 /* Mark already visited nodes. */
1165 TREE_ASM_WRITTEN (t2) = 1;
1166
1167 /* Push the pair onto map. */
1168 (*map)[0] = t1;
1169 (*map)[1] = t2;
1170 *map = *map + 2;
1171
1172 /* Compare value-fields. */
1173 #define compare_values(X) \
1174 do { \
1175 if (X(t1) != X(t2)) \
1176 return false; \
1177 } while (0)
1178
1179 compare_values (TREE_CODE);
1180 code = TREE_CODE (t1);
1181
1182 if (!TYPE_P (t1))
1183 {
1184 compare_values (TREE_SIDE_EFFECTS);
1185 compare_values (TREE_CONSTANT);
1186 compare_values (TREE_READONLY);
1187 compare_values (TREE_PUBLIC);
1188 }
1189 compare_values (TREE_ADDRESSABLE);
1190 compare_values (TREE_THIS_VOLATILE);
1191 if (DECL_P (t1))
1192 compare_values (DECL_UNSIGNED);
1193 else if (TYPE_P (t1))
1194 compare_values (TYPE_UNSIGNED);
1195 if (TYPE_P (t1))
1196 compare_values (TYPE_ARTIFICIAL);
1197 else
1198 compare_values (TREE_NO_WARNING);
1199 compare_values (TREE_NOTHROW);
1200 compare_values (TREE_STATIC);
1201 if (code != TREE_BINFO)
1202 compare_values (TREE_PRIVATE);
1203 compare_values (TREE_PROTECTED);
1204 compare_values (TREE_DEPRECATED);
1205 if (TYPE_P (t1))
1206 {
1207 compare_values (TYPE_SATURATING);
1208 compare_values (TYPE_ADDR_SPACE);
1209 }
1210 else if (code == SSA_NAME)
1211 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1212
1213 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1214 {
1215 if (!wi::eq_p (t1, t2))
1216 return false;
1217 }
1218
1219 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1220 {
1221 /* ??? No suitable compare routine available. */
1222 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1223 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1224 if (r1.cl != r2.cl
1225 || r1.decimal != r2.decimal
1226 || r1.sign != r2.sign
1227 || r1.signalling != r2.signalling
1228 || r1.canonical != r2.canonical
1229 || r1.uexp != r2.uexp)
1230 return false;
1231 for (unsigned i = 0; i < SIGSZ; ++i)
1232 if (r1.sig[i] != r2.sig[i])
1233 return false;
1234 }
1235
1236 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1237 if (!fixed_compare (EQ_EXPR,
1238 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1239 return false;
1240
1241
1242 /* We don't want to compare locations, so there is nothing do compare
1243 for TS_DECL_MINIMAL. */
1244
1245 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1246 {
1247 compare_values (DECL_MODE);
1248 compare_values (DECL_NONLOCAL);
1249 compare_values (DECL_VIRTUAL_P);
1250 compare_values (DECL_IGNORED_P);
1251 compare_values (DECL_ABSTRACT);
1252 compare_values (DECL_ARTIFICIAL);
1253 compare_values (DECL_USER_ALIGN);
1254 compare_values (DECL_PRESERVE_P);
1255 compare_values (DECL_EXTERNAL);
1256 compare_values (DECL_GIMPLE_REG_P);
1257 compare_values (DECL_ALIGN);
1258 if (code == LABEL_DECL)
1259 {
1260 compare_values (EH_LANDING_PAD_NR);
1261 compare_values (LABEL_DECL_UID);
1262 }
1263 else if (code == FIELD_DECL)
1264 {
1265 compare_values (DECL_PACKED);
1266 compare_values (DECL_NONADDRESSABLE_P);
1267 compare_values (DECL_OFFSET_ALIGN);
1268 }
1269 else if (code == VAR_DECL)
1270 {
1271 compare_values (DECL_HAS_DEBUG_EXPR_P);
1272 compare_values (DECL_NONLOCAL_FRAME);
1273 }
1274 if (code == RESULT_DECL
1275 || code == PARM_DECL
1276 || code == VAR_DECL)
1277 {
1278 compare_values (DECL_BY_REFERENCE);
1279 if (code == VAR_DECL
1280 || code == PARM_DECL)
1281 compare_values (DECL_HAS_VALUE_EXPR_P);
1282 }
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1286 compare_values (DECL_REGISTER);
1287
1288 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1289 {
1290 compare_values (DECL_COMMON);
1291 compare_values (DECL_DLLIMPORT_P);
1292 compare_values (DECL_WEAK);
1293 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1294 compare_values (DECL_COMDAT);
1295 compare_values (DECL_VISIBILITY);
1296 compare_values (DECL_VISIBILITY_SPECIFIED);
1297 if (code == VAR_DECL)
1298 {
1299 compare_values (DECL_HARD_REGISTER);
1300 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1301 compare_values (DECL_IN_CONSTANT_POOL);
1302 }
1303 }
1304
1305 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1306 {
1307 compare_values (DECL_BUILT_IN_CLASS);
1308 compare_values (DECL_STATIC_CONSTRUCTOR);
1309 compare_values (DECL_STATIC_DESTRUCTOR);
1310 compare_values (DECL_UNINLINABLE);
1311 compare_values (DECL_POSSIBLY_INLINED);
1312 compare_values (DECL_IS_NOVOPS);
1313 compare_values (DECL_IS_RETURNS_TWICE);
1314 compare_values (DECL_IS_MALLOC);
1315 compare_values (DECL_IS_OPERATOR_NEW);
1316 compare_values (DECL_DECLARED_INLINE_P);
1317 compare_values (DECL_STATIC_CHAIN);
1318 compare_values (DECL_NO_INLINE_WARNING_P);
1319 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1320 compare_values (DECL_NO_LIMIT_STACK);
1321 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1322 compare_values (DECL_PURE_P);
1323 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1324 compare_values (DECL_FINAL_P);
1325 compare_values (DECL_CXX_CONSTRUCTOR_P);
1326 compare_values (DECL_CXX_DESTRUCTOR_P);
1327 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1328 compare_values (DECL_FUNCTION_CODE);
1329 }
1330
1331 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1332 {
1333 compare_values (TYPE_MODE);
1334 compare_values (TYPE_STRING_FLAG);
1335 compare_values (TYPE_NO_FORCE_BLK);
1336 compare_values (TYPE_NEEDS_CONSTRUCTING);
1337 if (RECORD_OR_UNION_TYPE_P (t1))
1338 {
1339 compare_values (TYPE_TRANSPARENT_AGGR);
1340 compare_values (TYPE_FINAL_P);
1341 }
1342 else if (code == ARRAY_TYPE)
1343 compare_values (TYPE_NONALIASED_COMPONENT);
1344 compare_values (TYPE_PACKED);
1345 compare_values (TYPE_RESTRICT);
1346 compare_values (TYPE_USER_ALIGN);
1347 compare_values (TYPE_READONLY);
1348 compare_values (TYPE_PRECISION);
1349 compare_values (TYPE_ALIGN);
1350 compare_values (TYPE_ALIAS_SET);
1351 }
1352
1353 /* We don't want to compare locations, so there is nothing do compare
1354 for TS_EXP. */
1355
1356 /* BLOCKs are function local and we don't merge anything there, so
1357 simply refuse to merge. */
1358 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1359 return false;
1360
1361 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1362 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1363 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1364 return false;
1365
1366 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1367 gcc_unreachable ();
1368
1369 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1370 if (memcmp (TREE_OPTIMIZATION (t1), TREE_OPTIMIZATION (t2),
1371 sizeof (struct cl_optimization)) != 0)
1372 return false;
1373
1374 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1375 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1376 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1377 return false;
1378
1379 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1380 compare_values (CONSTRUCTOR_NELTS);
1381
1382 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1383 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1384 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1385 IDENTIFIER_LENGTH (t1)) != 0)
1386 return false;
1387
1388 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1389 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1390 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1391 TREE_STRING_LENGTH (t1)) != 0)
1392 return false;
1393
1394 if (code == OMP_CLAUSE)
1395 {
1396 compare_values (OMP_CLAUSE_CODE);
1397 switch (OMP_CLAUSE_CODE (t1))
1398 {
1399 case OMP_CLAUSE_DEFAULT:
1400 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1401 break;
1402 case OMP_CLAUSE_SCHEDULE:
1403 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1404 break;
1405 case OMP_CLAUSE_DEPEND:
1406 compare_values (OMP_CLAUSE_DEPEND_KIND);
1407 break;
1408 case OMP_CLAUSE_MAP:
1409 compare_values (OMP_CLAUSE_MAP_KIND);
1410 break;
1411 case OMP_CLAUSE_PROC_BIND:
1412 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1413 break;
1414 case OMP_CLAUSE_REDUCTION:
1415 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1416 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1417 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1418 break;
1419 default:
1420 break;
1421 }
1422 }
1423
1424 #undef compare_values
1425
1426
1427 /* Compare pointer fields. */
1428
1429 /* Recurse. Search & Replaced from DFS_write_tree_body.
1430 Folding the early checks into the compare_tree_edges recursion
1431 macro makes debugging way quicker as you are able to break on
1432 compare_tree_sccs_1 and simply finish until a call returns false
1433 to spot the SCC members with the difference. */
1434 #define compare_tree_edges(E1, E2) \
1435 do { \
1436 tree t1_ = (E1), t2_ = (E2); \
1437 if (t1_ != t2_ \
1438 && (!t1_ || !t2_ \
1439 || !TREE_VISITED (t2_) \
1440 || (!TREE_ASM_WRITTEN (t2_) \
1441 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1442 return false; \
1443 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1444 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1445 } while (0)
1446
1447 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1448 {
1449 if (code != IDENTIFIER_NODE)
1450 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1451 }
1452
1453 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1454 {
1455 unsigned i;
1456 /* Note that the number of elements for EXPR has already been emitted
1457 in EXPR's header (see streamer_write_tree_header). */
1458 for (i = 0; i < VECTOR_CST_NELTS (t1); ++i)
1459 compare_tree_edges (VECTOR_CST_ELT (t1, i), VECTOR_CST_ELT (t2, i));
1460 }
1461
1462 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1463 {
1464 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1465 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1466 }
1467
1468 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1469 {
1470 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1471 /* ??? Global decls from different TUs have non-matching
1472 TRANSLATION_UNIT_DECLs. Only consider a small set of
1473 decls equivalent, we should not end up merging others. */
1474 if ((code == TYPE_DECL
1475 || code == NAMESPACE_DECL
1476 || code == IMPORTED_DECL
1477 || code == CONST_DECL
1478 || (VAR_OR_FUNCTION_DECL_P (t1)
1479 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1480 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1481 ;
1482 else
1483 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1484 }
1485
1486 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1487 {
1488 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1489 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1490 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1491 if ((code == VAR_DECL
1492 || code == PARM_DECL)
1493 && DECL_HAS_VALUE_EXPR_P (t1))
1494 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1495 if (code == VAR_DECL
1496 && DECL_HAS_DEBUG_EXPR_P (t1))
1497 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1498 /* LTO specific edges. */
1499 if (code != FUNCTION_DECL
1500 && code != TRANSLATION_UNIT_DECL)
1501 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1502 }
1503
1504 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1505 {
1506 if (code == FUNCTION_DECL)
1507 {
1508 tree a1, a2;
1509 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1510 a1 || a2;
1511 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1512 compare_tree_edges (a1, a2);
1513 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1514 }
1515 else if (code == TYPE_DECL)
1516 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1517 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1518 }
1519
1520 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1521 {
1522 /* Make sure we don't inadvertently set the assembler name. */
1523 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1524 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1525 DECL_ASSEMBLER_NAME (t2));
1526 }
1527
1528 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1529 {
1530 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1531 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1532 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1533 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1534 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1535 DECL_FIELD_BIT_OFFSET (t2));
1536 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1537 }
1538
1539 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1540 {
1541 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1542 DECL_FUNCTION_PERSONALITY (t2));
1543 /* DECL_FUNCTION_SPECIFIC_TARGET is not yet created. We compare
1544 the attribute list instead. */
1545 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1546 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1547 }
1548
1549 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1550 {
1551 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1552 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1553 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1554 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1555 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1556 reconstructed during fixup. */
1557 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1558 during fixup. */
1559 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1560 /* ??? Global types from different TUs have non-matching
1561 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1562 equal. */
1563 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1564 ;
1565 else
1566 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1567 /* TYPE_CANONICAL is re-computed during type merging, so do not
1568 compare it here. */
1569 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1570 }
1571
1572 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1573 {
1574 if (code == ENUMERAL_TYPE)
1575 compare_tree_edges (TYPE_VALUES (t1), TYPE_VALUES (t2));
1576 else if (code == ARRAY_TYPE)
1577 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1578 else if (RECORD_OR_UNION_TYPE_P (t1))
1579 {
1580 tree f1, f2;
1581 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1582 f1 || f2;
1583 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1584 compare_tree_edges (f1, f2);
1585 compare_tree_edges (TYPE_BINFO (t1), TYPE_BINFO (t2));
1586 }
1587 else if (code == FUNCTION_TYPE
1588 || code == METHOD_TYPE)
1589 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1590 if (!POINTER_TYPE_P (t1))
1591 compare_tree_edges (TYPE_MINVAL (t1), TYPE_MINVAL (t2));
1592 compare_tree_edges (TYPE_MAXVAL (t1), TYPE_MAXVAL (t2));
1593 }
1594
1595 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1596 {
1597 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1598 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1599 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1600 }
1601
1602 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1603 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1604 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1605
1606 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1607 {
1608 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1609 compare_tree_edges (TREE_OPERAND (t1, i),
1610 TREE_OPERAND (t2, i));
1611
1612 /* BLOCKs are function local and we don't merge anything there. */
1613 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1614 return false;
1615 }
1616
1617 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1618 {
1619 unsigned i;
1620 tree t;
1621 /* Lengths have already been compared above. */
1622 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1623 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1624 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1625 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1626 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1627 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1628 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1629 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1630 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1631 }
1632
1633 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1634 {
1635 unsigned i;
1636 tree index, value;
1637 /* Lengths have already been compared above. */
1638 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1639 {
1640 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1641 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1642 }
1643 }
1644
1645 if (code == OMP_CLAUSE)
1646 {
1647 int i;
1648
1649 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1650 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1651 OMP_CLAUSE_OPERAND (t2, i));
1652 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1653 }
1654
1655 #undef compare_tree_edges
1656
1657 return true;
1658 }
1659
1660 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1661 out MAP if they are equal. */
1662
1663 static bool
1664 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1665 tree *map)
1666 {
1667 /* Assume SCC entry hashes are sorted after their cardinality. Which
1668 means we can simply take the first n-tuple of equal hashes
1669 (which is recorded as entry_len) and do n SCC entry candidate
1670 comparisons. */
1671 for (unsigned i = 0; i < pscc->entry_len; ++i)
1672 {
1673 tree *mapp = map;
1674 num_scc_compare_collisions++;
1675 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1676 {
1677 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1678 on the scc as all trees will be freed. */
1679 return true;
1680 }
1681 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1682 the SCC prevails. */
1683 for (unsigned j = 0; j < scc->len; ++j)
1684 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1685 }
1686
1687 return false;
1688 }
1689
1690 /* QSort sort function to sort a map of two pointers after the 2nd
1691 pointer. */
1692
1693 static int
1694 cmp_tree (const void *p1_, const void *p2_)
1695 {
1696 tree *p1 = (tree *)(const_cast<void *>(p1_));
1697 tree *p2 = (tree *)(const_cast<void *>(p2_));
1698 if (p1[1] == p2[1])
1699 return 0;
1700 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1701 }
1702
1703 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1704 hash value SCC_HASH with an already recorded SCC. Return true if
1705 that was successful, otherwise return false. */
1706
1707 static bool
1708 unify_scc (struct streamer_tree_cache_d *cache, unsigned from,
1709 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1710 {
1711 bool unified_p = false;
1712 tree_scc *scc
1713 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1714 scc->next = NULL;
1715 scc->hash = scc_hash;
1716 scc->len = len;
1717 scc->entry_len = scc_entry_len;
1718 for (unsigned i = 0; i < len; ++i)
1719 {
1720 tree t = streamer_tree_cache_get_tree (cache, from + i);
1721 scc->entries[i] = t;
1722 /* Do not merge SCCs with local entities inside them. Also do
1723 not merge TRANSLATION_UNIT_DECLs. */
1724 if (TREE_CODE (t) == TRANSLATION_UNIT_DECL
1725 || (VAR_OR_FUNCTION_DECL_P (t)
1726 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1727 || TREE_CODE (t) == LABEL_DECL)
1728 {
1729 /* Avoid doing any work for these cases and do not worry to
1730 record the SCCs for further merging. */
1731 return false;
1732 }
1733 }
1734
1735 /* Look for the list of candidate SCCs to compare against. */
1736 tree_scc **slot;
1737 slot = tree_scc_hash.find_slot_with_hash (scc, scc_hash, INSERT);
1738 if (*slot)
1739 {
1740 /* Try unifying against each candidate. */
1741 num_scc_compares++;
1742
1743 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1744 outside of the scc when following tree edges. Make sure
1745 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1746 to track whether we visited the SCC member during the compare.
1747 We cannot use TREE_VISITED on the pscc members as the extended
1748 scc and pscc can overlap. */
1749 for (unsigned i = 0; i < scc->len; ++i)
1750 {
1751 TREE_VISITED (scc->entries[i]) = 1;
1752 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1753 }
1754
1755 tree *map = XALLOCAVEC (tree, 2 * len);
1756 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1757 {
1758 if (!compare_tree_sccs (pscc, scc, map))
1759 continue;
1760
1761 /* Found an equal SCC. */
1762 unified_p = true;
1763 num_scc_compare_collisions--;
1764 num_sccs_merged++;
1765 total_scc_size_merged += len;
1766
1767 #ifdef ENABLE_CHECKING
1768 for (unsigned i = 0; i < len; ++i)
1769 {
1770 tree t = map[2*i+1];
1771 enum tree_code code = TREE_CODE (t);
1772 /* IDENTIFIER_NODEs should be singletons and are merged by the
1773 streamer. The others should be singletons, too, and we
1774 should not merge them in any way. */
1775 gcc_assert (code != TRANSLATION_UNIT_DECL
1776 && code != IDENTIFIER_NODE
1777 && !streamer_handle_as_builtin_p (t));
1778 }
1779 #endif
1780
1781 /* Fixup the streamer cache with the prevailing nodes according
1782 to the tree node mapping computed by compare_tree_sccs. */
1783 if (len == 1)
1784 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1785 else
1786 {
1787 tree *map2 = XALLOCAVEC (tree, 2 * len);
1788 for (unsigned i = 0; i < len; ++i)
1789 {
1790 map2[i*2] = (tree)(uintptr_t)(from + i);
1791 map2[i*2+1] = scc->entries[i];
1792 }
1793 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1794 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1795 for (unsigned i = 0; i < len; ++i)
1796 streamer_tree_cache_replace_tree (cache, map[2*i],
1797 (uintptr_t)map2[2*i]);
1798 }
1799
1800 /* Free the tree nodes from the read SCC. */
1801 for (unsigned i = 0; i < len; ++i)
1802 {
1803 enum tree_code code;
1804 if (TYPE_P (scc->entries[i]))
1805 num_merged_types++;
1806 code = TREE_CODE (scc->entries[i]);
1807 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1808 vec_free (CONSTRUCTOR_ELTS (scc->entries[i]));
1809 ggc_free (scc->entries[i]);
1810 }
1811
1812 break;
1813 }
1814
1815 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1816 if (!unified_p)
1817 for (unsigned i = 0; i < scc->len; ++i)
1818 TREE_VISITED (scc->entries[i]) = 0;
1819 }
1820
1821 /* If we didn't unify it to any candidate duplicate the relevant
1822 pieces to permanent storage and link it into the chain. */
1823 if (!unified_p)
1824 {
1825 tree_scc *pscc
1826 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1827 memcpy (pscc, scc, sizeof (tree_scc));
1828 pscc->next = (*slot);
1829 *slot = pscc;
1830 }
1831 return unified_p;
1832 }
1833
1834
1835 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1836 RESOLUTIONS is the set of symbols picked by the linker (read from the
1837 resolution file when the linker plugin is being used). */
1838
1839 static void
1840 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1841 vec<ld_plugin_symbol_resolution_t> resolutions)
1842 {
1843 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1844 const int decl_offset = sizeof (struct lto_decl_header);
1845 const int main_offset = decl_offset + header->decl_state_size;
1846 const int string_offset = main_offset + header->main_size;
1847 struct lto_input_block ib_main;
1848 struct data_in *data_in;
1849 unsigned int i;
1850 const uint32_t *data_ptr, *data_end;
1851 uint32_t num_decl_states;
1852
1853 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1854 header->main_size);
1855
1856 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1857 header->string_size, resolutions);
1858
1859 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1860 internal types that should not be merged. */
1861
1862 /* Read the global declarations and types. */
1863 while (ib_main.p < ib_main.len)
1864 {
1865 tree t;
1866 unsigned from = data_in->reader_cache->nodes.length ();
1867 /* Read and uniquify SCCs as in the input stream. */
1868 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1869 if (tag == LTO_tree_scc)
1870 {
1871 unsigned len_;
1872 unsigned scc_entry_len;
1873 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1874 &scc_entry_len);
1875 unsigned len = data_in->reader_cache->nodes.length () - from;
1876 gcc_assert (len == len_);
1877
1878 total_scc_size += len;
1879 num_sccs_read++;
1880
1881 /* We have the special case of size-1 SCCs that are pre-merged
1882 by means of identifier and string sharing for example.
1883 ??? Maybe we should avoid streaming those as SCCs. */
1884 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1885 from);
1886 if (len == 1
1887 && (TREE_CODE (first) == IDENTIFIER_NODE
1888 || TREE_CODE (first) == INTEGER_CST
1889 || TREE_CODE (first) == TRANSLATION_UNIT_DECL
1890 || streamer_handle_as_builtin_p (first)))
1891 continue;
1892
1893 /* Try to unify the SCC with already existing ones. */
1894 if (!flag_ltrans
1895 && unify_scc (data_in->reader_cache, from,
1896 len, scc_entry_len, scc_hash))
1897 continue;
1898
1899 /* Do remaining fixup tasks for prevailing nodes. */
1900 bool seen_type = false;
1901 for (unsigned i = 0; i < len; ++i)
1902 {
1903 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1904 from + i);
1905 /* Reconstruct the type variant and pointer-to/reference-to
1906 chains. */
1907 if (TYPE_P (t))
1908 {
1909 seen_type = true;
1910 num_prevailing_types++;
1911 lto_fixup_prevailing_type (t);
1912 }
1913 /* Compute the canonical type of all types.
1914 ??? Should be able to assert that !TYPE_CANONICAL. */
1915 if (TYPE_P (t) && !TYPE_CANONICAL (t))
1916 gimple_register_canonical_type (t);
1917 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1918 type which is also member of this SCC. */
1919 if (TREE_CODE (t) == INTEGER_CST
1920 && !TREE_OVERFLOW (t))
1921 cache_integer_cst (t);
1922 /* Re-build DECL_FUNCTION_SPECIFIC_TARGET, we need that
1923 for both WPA and LTRANS stage. */
1924 if (TREE_CODE (t) == FUNCTION_DECL)
1925 {
1926 tree attr = lookup_attribute ("target", DECL_ATTRIBUTES (t));
1927 if (attr)
1928 targetm.target_option.valid_attribute_p
1929 (t, NULL_TREE, TREE_VALUE (attr), 0);
1930 }
1931 /* Register TYPE_DECLs with the debuginfo machinery. */
1932 if (!flag_wpa
1933 && TREE_CODE (t) == TYPE_DECL)
1934 debug_hooks->type_decl (t, !DECL_FILE_SCOPE_P (t));
1935 if (!flag_ltrans)
1936 {
1937 /* Register variables and functions with the
1938 symbol table. */
1939 if (TREE_CODE (t) == VAR_DECL)
1940 lto_register_var_decl_in_symtab (data_in, t, from + i);
1941 else if (TREE_CODE (t) == FUNCTION_DECL
1942 && !DECL_BUILT_IN (t))
1943 lto_register_function_decl_in_symtab (data_in, t, from + i);
1944 /* Scan the tree for references to global functions or
1945 variables and record those for later fixup. */
1946 if (mentions_vars_p (t))
1947 vec_safe_push (tree_with_vars, t);
1948 }
1949 }
1950 if (seen_type)
1951 num_type_scc_trees += len;
1952 }
1953 else
1954 {
1955 /* Pickle stray references. */
1956 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1957 gcc_assert (t && data_in->reader_cache->nodes.length () == from);
1958 }
1959 }
1960
1961 /* Read in lto_in_decl_state objects. */
1962 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1963 data_end =
1964 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1965 num_decl_states = *data_ptr++;
1966
1967 gcc_assert (num_decl_states > 0);
1968 decl_data->global_decl_state = lto_new_in_decl_state ();
1969 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
1970 decl_data->global_decl_state);
1971
1972 /* Read in per-function decl states and enter them in hash table. */
1973 decl_data->function_decl_states =
1974 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
1975
1976 for (i = 1; i < num_decl_states; i++)
1977 {
1978 struct lto_in_decl_state *state = lto_new_in_decl_state ();
1979 void **slot;
1980
1981 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
1982 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
1983 gcc_assert (*slot == NULL);
1984 *slot = state;
1985 }
1986
1987 if (data_ptr != data_end)
1988 internal_error ("bytecode stream: garbage at the end of symbols section");
1989
1990 /* Set the current decl state to be the global state. */
1991 decl_data->current_decl_state = decl_data->global_decl_state;
1992
1993 lto_data_in_delete (data_in);
1994 }
1995
1996 /* Custom version of strtoll, which is not portable. */
1997
1998 static int64_t
1999 lto_parse_hex (const char *p)
2000 {
2001 int64_t ret = 0;
2002
2003 for (; *p != '\0'; ++p)
2004 {
2005 char c = *p;
2006 unsigned char part;
2007 ret <<= 4;
2008 if (c >= '0' && c <= '9')
2009 part = c - '0';
2010 else if (c >= 'a' && c <= 'f')
2011 part = c - 'a' + 10;
2012 else if (c >= 'A' && c <= 'F')
2013 part = c - 'A' + 10;
2014 else
2015 internal_error ("could not parse hex number");
2016 ret |= part;
2017 }
2018
2019 return ret;
2020 }
2021
2022 /* Read resolution for file named FILE_NAME. The resolution is read from
2023 RESOLUTION. */
2024
2025 static void
2026 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2027 {
2028 /* We require that objects in the resolution file are in the same
2029 order as the lto1 command line. */
2030 unsigned int name_len;
2031 char *obj_name;
2032 unsigned int num_symbols;
2033 unsigned int i;
2034 struct lto_file_decl_data *file_data;
2035 splay_tree_node nd = NULL;
2036
2037 if (!resolution)
2038 return;
2039
2040 name_len = strlen (file->filename);
2041 obj_name = XNEWVEC (char, name_len + 1);
2042 fscanf (resolution, " "); /* Read white space. */
2043
2044 fread (obj_name, sizeof (char), name_len, resolution);
2045 obj_name[name_len] = '\0';
2046 if (filename_cmp (obj_name, file->filename) != 0)
2047 internal_error ("unexpected file name %s in linker resolution file. "
2048 "Expected %s", obj_name, file->filename);
2049 if (file->offset != 0)
2050 {
2051 int t;
2052 char offset_p[17];
2053 int64_t offset;
2054 t = fscanf (resolution, "@0x%16s", offset_p);
2055 if (t != 1)
2056 internal_error ("could not parse file offset");
2057 offset = lto_parse_hex (offset_p);
2058 if (offset != file->offset)
2059 internal_error ("unexpected offset");
2060 }
2061
2062 free (obj_name);
2063
2064 fscanf (resolution, "%u", &num_symbols);
2065
2066 for (i = 0; i < num_symbols; i++)
2067 {
2068 int t;
2069 unsigned index;
2070 unsigned HOST_WIDE_INT id;
2071 char r_str[27];
2072 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2073 unsigned int j;
2074 unsigned int lto_resolution_str_len =
2075 sizeof (lto_resolution_str) / sizeof (char *);
2076 res_pair rp;
2077
2078 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE " %26s %*[^\n]\n",
2079 &index, &id, r_str);
2080 if (t != 3)
2081 internal_error ("invalid line in the resolution file");
2082
2083 for (j = 0; j < lto_resolution_str_len; j++)
2084 {
2085 if (strcmp (lto_resolution_str[j], r_str) == 0)
2086 {
2087 r = (enum ld_plugin_symbol_resolution) j;
2088 break;
2089 }
2090 }
2091 if (j == lto_resolution_str_len)
2092 internal_error ("invalid resolution in the resolution file");
2093
2094 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2095 {
2096 nd = lto_splay_tree_lookup (file_ids, id);
2097 if (nd == NULL)
2098 internal_error ("resolution sub id %wx not in object file", id);
2099 }
2100
2101 file_data = (struct lto_file_decl_data *)nd->value;
2102 /* The indexes are very sparse. To save memory save them in a compact
2103 format that is only unpacked later when the subfile is processed. */
2104 rp.res = r;
2105 rp.index = index;
2106 file_data->respairs.safe_push (rp);
2107 if (file_data->max_index < index)
2108 file_data->max_index = index;
2109 }
2110 }
2111
2112 /* List of file_decl_datas */
2113 struct file_data_list
2114 {
2115 struct lto_file_decl_data *first, *last;
2116 };
2117
2118 /* Is the name for a id'ed LTO section? */
2119
2120 static int
2121 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2122 {
2123 const char *s;
2124
2125 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
2126 return 0;
2127 s = strrchr (name, '.');
2128 return s && sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2129 }
2130
2131 /* Create file_data of each sub file id */
2132
2133 static int
2134 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2135 struct file_data_list *list)
2136 {
2137 struct lto_section_slot s_slot, *new_slot;
2138 unsigned HOST_WIDE_INT id;
2139 splay_tree_node nd;
2140 void **hash_slot;
2141 char *new_name;
2142 struct lto_file_decl_data *file_data;
2143
2144 if (!lto_section_with_id (ls->name, &id))
2145 return 1;
2146
2147 /* Find hash table of sub module id */
2148 nd = lto_splay_tree_lookup (file_ids, id);
2149 if (nd != NULL)
2150 {
2151 file_data = (struct lto_file_decl_data *)nd->value;
2152 }
2153 else
2154 {
2155 file_data = ggc_alloc<lto_file_decl_data> ();
2156 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2157 file_data->id = id;
2158 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
2159 lto_splay_tree_insert (file_ids, id, file_data);
2160
2161 /* Maintain list in linker order */
2162 if (!list->first)
2163 list->first = file_data;
2164 if (list->last)
2165 list->last->next = file_data;
2166 list->last = file_data;
2167 }
2168
2169 /* Copy section into sub module hash table */
2170 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2171 s_slot.name = new_name;
2172 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2173 gcc_assert (*hash_slot == NULL);
2174
2175 new_slot = XDUP (struct lto_section_slot, ls);
2176 new_slot->name = new_name;
2177 *hash_slot = new_slot;
2178 return 1;
2179 }
2180
2181 /* Read declarations and other initializations for a FILE_DATA. */
2182
2183 static void
2184 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
2185 {
2186 const char *data;
2187 size_t len;
2188 vec<ld_plugin_symbol_resolution_t>
2189 resolutions = vNULL;
2190 int i;
2191 res_pair *rp;
2192
2193 /* Create vector for fast access of resolution. We do this lazily
2194 to save memory. */
2195 resolutions.safe_grow_cleared (file_data->max_index + 1);
2196 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2197 resolutions[rp->index] = rp->res;
2198 file_data->respairs.release ();
2199
2200 file_data->renaming_hash_table = lto_create_renaming_table ();
2201 file_data->file_name = file->filename;
2202 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
2203 if (data == NULL)
2204 {
2205 internal_error ("cannot read LTO decls from %s", file_data->file_name);
2206 return;
2207 }
2208 /* Frees resolutions */
2209 lto_read_decls (file_data, data, resolutions);
2210 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2211 }
2212
2213 /* Finalize FILE_DATA in FILE and increase COUNT. */
2214
2215 static int
2216 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2217 int *count)
2218 {
2219 lto_file_finalize (file_data, file);
2220 if (cgraph_dump_file)
2221 fprintf (cgraph_dump_file, "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2222 file_data->file_name, file_data->id);
2223 (*count)++;
2224 return 0;
2225 }
2226
2227 /* Generate a TREE representation for all types and external decls
2228 entities in FILE.
2229
2230 Read all of the globals out of the file. Then read the cgraph
2231 and process the .o index into the cgraph nodes so that it can open
2232 the .o file to load the functions and ipa information. */
2233
2234 static struct lto_file_decl_data *
2235 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2236 {
2237 struct lto_file_decl_data *file_data = NULL;
2238 splay_tree file_ids;
2239 htab_t section_hash_table;
2240 struct lto_section_slot *section;
2241 struct file_data_list file_list;
2242 struct lto_section_list section_list;
2243
2244 memset (&section_list, 0, sizeof (struct lto_section_list));
2245 section_hash_table = lto_obj_build_section_table (file, &section_list);
2246
2247 /* Find all sub modules in the object and put their sections into new hash
2248 tables in a splay tree. */
2249 file_ids = lto_splay_tree_new ();
2250 memset (&file_list, 0, sizeof (struct file_data_list));
2251 for (section = section_list.first; section != NULL; section = section->next)
2252 create_subid_section_table (section, file_ids, &file_list);
2253
2254 /* Add resolutions to file ids */
2255 lto_resolution_read (file_ids, resolution_file, file);
2256
2257 /* Finalize each lto file for each submodule in the merged object */
2258 for (file_data = file_list.first; file_data != NULL; file_data = file_data->next)
2259 lto_create_files_from_ids (file, file_data, count);
2260
2261 splay_tree_delete (file_ids);
2262 htab_delete (section_hash_table);
2263
2264 return file_list.first;
2265 }
2266
2267 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2268 #define LTO_MMAP_IO 1
2269 #endif
2270
2271 #if LTO_MMAP_IO
2272 /* Page size of machine is used for mmap and munmap calls. */
2273 static size_t page_mask;
2274 #endif
2275
2276 /* Get the section data of length LEN from FILENAME starting at
2277 OFFSET. The data segment must be freed by the caller when the
2278 caller is finished. Returns NULL if all was not well. */
2279
2280 static char *
2281 lto_read_section_data (struct lto_file_decl_data *file_data,
2282 intptr_t offset, size_t len)
2283 {
2284 char *result;
2285 static int fd = -1;
2286 static char *fd_name;
2287 #if LTO_MMAP_IO
2288 intptr_t computed_len;
2289 intptr_t computed_offset;
2290 intptr_t diff;
2291 #endif
2292
2293 /* Keep a single-entry file-descriptor cache. The last file we
2294 touched will get closed at exit.
2295 ??? Eventually we want to add a more sophisticated larger cache
2296 or rather fix function body streaming to not stream them in
2297 practically random order. */
2298 if (fd != -1
2299 && filename_cmp (fd_name, file_data->file_name) != 0)
2300 {
2301 free (fd_name);
2302 close (fd);
2303 fd = -1;
2304 }
2305 if (fd == -1)
2306 {
2307 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2308 if (fd == -1)
2309 {
2310 fatal_error ("Cannot open %s", file_data->file_name);
2311 return NULL;
2312 }
2313 fd_name = xstrdup (file_data->file_name);
2314 }
2315
2316 #if LTO_MMAP_IO
2317 if (!page_mask)
2318 {
2319 size_t page_size = sysconf (_SC_PAGE_SIZE);
2320 page_mask = ~(page_size - 1);
2321 }
2322
2323 computed_offset = offset & page_mask;
2324 diff = offset - computed_offset;
2325 computed_len = len + diff;
2326
2327 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2328 fd, computed_offset);
2329 if (result == MAP_FAILED)
2330 {
2331 fatal_error ("Cannot map %s", file_data->file_name);
2332 return NULL;
2333 }
2334
2335 return result + diff;
2336 #else
2337 result = (char *) xmalloc (len);
2338 if (lseek (fd, offset, SEEK_SET) != offset
2339 || read (fd, result, len) != (ssize_t) len)
2340 {
2341 free (result);
2342 fatal_error ("Cannot read %s", file_data->file_name);
2343 result = NULL;
2344 }
2345 #ifdef __MINGW32__
2346 /* Native windows doesn't supports delayed unlink on opened file. So
2347 we close file here again. This produces higher I/O load, but at least
2348 it prevents to have dangling file handles preventing unlink. */
2349 free (fd_name);
2350 fd_name = NULL;
2351 close (fd);
2352 fd = -1;
2353 #endif
2354 return result;
2355 #endif
2356 }
2357
2358
2359 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2360 NAME will be NULL unless the section type is for a function
2361 body. */
2362
2363 static const char *
2364 get_section_data (struct lto_file_decl_data *file_data,
2365 enum lto_section_type section_type,
2366 const char *name,
2367 size_t *len)
2368 {
2369 htab_t section_hash_table = file_data->section_hash_table;
2370 struct lto_section_slot *f_slot;
2371 struct lto_section_slot s_slot;
2372 const char *section_name = lto_get_section_name (section_type, name, file_data);
2373 char *data = NULL;
2374
2375 *len = 0;
2376 s_slot.name = section_name;
2377 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2378 if (f_slot)
2379 {
2380 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2381 *len = f_slot->len;
2382 }
2383
2384 free (CONST_CAST (char *, section_name));
2385 return data;
2386 }
2387
2388
2389 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2390 starts at OFFSET and has LEN bytes. */
2391
2392 static void
2393 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2394 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2395 const char *name ATTRIBUTE_UNUSED,
2396 const char *offset, size_t len ATTRIBUTE_UNUSED)
2397 {
2398 #if LTO_MMAP_IO
2399 intptr_t computed_len;
2400 intptr_t computed_offset;
2401 intptr_t diff;
2402 #endif
2403
2404 #if LTO_MMAP_IO
2405 computed_offset = ((intptr_t) offset) & page_mask;
2406 diff = (intptr_t) offset - computed_offset;
2407 computed_len = len + diff;
2408
2409 munmap ((caddr_t) computed_offset, computed_len);
2410 #else
2411 free (CONST_CAST(char *, offset));
2412 #endif
2413 }
2414
2415 static lto_file *current_lto_file;
2416
2417 /* Helper for qsort; compare partitions and return one with smaller size.
2418 We sort from greatest to smallest so parallel build doesn't stale on the
2419 longest compilation being executed too late. */
2420
2421 static int
2422 cmp_partitions_size (const void *a, const void *b)
2423 {
2424 const struct ltrans_partition_def *pa
2425 = *(struct ltrans_partition_def *const *)a;
2426 const struct ltrans_partition_def *pb
2427 = *(struct ltrans_partition_def *const *)b;
2428 return pb->insns - pa->insns;
2429 }
2430
2431 /* Helper for qsort; compare partitions and return one with smaller order. */
2432
2433 static int
2434 cmp_partitions_order (const void *a, const void *b)
2435 {
2436 const struct ltrans_partition_def *pa
2437 = *(struct ltrans_partition_def *const *)a;
2438 const struct ltrans_partition_def *pb
2439 = *(struct ltrans_partition_def *const *)b;
2440 int ordera = -1, orderb = -1;
2441
2442 if (lto_symtab_encoder_size (pa->encoder))
2443 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
2444 if (lto_symtab_encoder_size (pb->encoder))
2445 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
2446 return orderb - ordera;
2447 }
2448
2449 /* Actually stream out ENCODER into TEMP_FILENAME. */
2450
2451 static void
2452 do_stream_out (char *temp_filename, lto_symtab_encoder_t encoder)
2453 {
2454 lto_file *file = lto_obj_file_open (temp_filename, true);
2455 if (!file)
2456 fatal_error ("lto_obj_file_open() failed");
2457 lto_set_current_out_file (file);
2458
2459 ipa_write_optimization_summaries (encoder);
2460
2461 lto_set_current_out_file (NULL);
2462 lto_obj_file_close (file);
2463 free (file);
2464 }
2465
2466 /* Wait for forked process and signal errors. */
2467 #ifdef HAVE_WORKING_FORK
2468 static void
2469 wait_for_child ()
2470 {
2471 int status;
2472 do
2473 {
2474 #ifndef WCONTINUED
2475 #define WCONTINUED 0
2476 #endif
2477 int w = waitpid (0, &status, WUNTRACED | WCONTINUED);
2478 if (w == -1)
2479 fatal_error ("waitpid failed");
2480
2481 if (WIFEXITED (status) && WEXITSTATUS (status))
2482 fatal_error ("streaming subprocess failed");
2483 else if (WIFSIGNALED (status))
2484 fatal_error ("streaming subprocess was killed by signal");
2485 }
2486 while (!WIFEXITED (status) && !WIFSIGNALED (status));
2487 }
2488 #endif
2489
2490 /* Stream out ENCODER into TEMP_FILENAME
2491 Fork if that seems to help. */
2492
2493 static void
2494 stream_out (char *temp_filename, lto_symtab_encoder_t encoder, bool last)
2495 {
2496 #ifdef HAVE_WORKING_FORK
2497 static int nruns;
2498
2499 if (lto_parallelism <= 1)
2500 {
2501 do_stream_out (temp_filename, encoder);
2502 return;
2503 }
2504
2505 /* Do not run more than LTO_PARALLELISM streamings
2506 FIXME: we ignore limits on jobserver. */
2507 if (lto_parallelism > 0 && nruns >= lto_parallelism)
2508 {
2509 wait_for_child ();
2510 nruns --;
2511 }
2512 /* If this is not the last parallel partition, execute new
2513 streaming process. */
2514 if (!last)
2515 {
2516 pid_t cpid = fork ();
2517
2518 if (!cpid)
2519 {
2520 setproctitle ("lto1-wpa-streaming");
2521 do_stream_out (temp_filename, encoder);
2522 exit (0);
2523 }
2524 /* Fork failed; lets do the job ourseleves. */
2525 else if (cpid == -1)
2526 do_stream_out (temp_filename, encoder);
2527 else
2528 nruns++;
2529 }
2530 /* Last partition; stream it and wait for all children to die. */
2531 else
2532 {
2533 int i;
2534 do_stream_out (temp_filename, encoder);
2535 for (i = 0; i < nruns; i++)
2536 wait_for_child ();
2537 }
2538 asm_nodes_output = true;
2539 #else
2540 do_stream_out (temp_filename, encoder);
2541 #endif
2542 }
2543
2544 /* Write all output files in WPA mode and the file with the list of
2545 LTRANS units. */
2546
2547 static void
2548 lto_wpa_write_files (void)
2549 {
2550 unsigned i, n_sets;
2551 ltrans_partition part;
2552 FILE *ltrans_output_list_stream;
2553 char *temp_filename;
2554 vec <char *>temp_filenames = vNULL;
2555 size_t blen;
2556
2557 /* Open the LTRANS output list. */
2558 if (!ltrans_output_list)
2559 fatal_error ("no LTRANS output list filename provided");
2560
2561 timevar_push (TV_WHOPR_WPA);
2562
2563 FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
2564 lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
2565
2566 timevar_pop (TV_WHOPR_WPA);
2567
2568 timevar_push (TV_WHOPR_WPA_IO);
2569
2570 /* Generate a prefix for the LTRANS unit files. */
2571 blen = strlen (ltrans_output_list);
2572 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
2573 strcpy (temp_filename, ltrans_output_list);
2574 if (blen > sizeof (".out")
2575 && strcmp (temp_filename + blen - sizeof (".out") + 1,
2576 ".out") == 0)
2577 temp_filename[blen - sizeof (".out") + 1] = '\0';
2578 blen = strlen (temp_filename);
2579
2580 n_sets = ltrans_partitions.length ();
2581
2582 /* Sort partitions by size so small ones are compiled last.
2583 FIXME: Even when not reordering we may want to output one list for parallel make
2584 and other for final link command. */
2585
2586 if (!flag_profile_reorder_functions || !flag_profile_use)
2587 ltrans_partitions.qsort (flag_toplevel_reorder
2588 ? cmp_partitions_size
2589 : cmp_partitions_order);
2590
2591 for (i = 0; i < n_sets; i++)
2592 {
2593 ltrans_partition part = ltrans_partitions[i];
2594
2595 /* Write all the nodes in SET. */
2596 sprintf (temp_filename + blen, "%u.o", i);
2597
2598 if (!quiet_flag)
2599 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
2600 if (cgraph_dump_file)
2601 {
2602 lto_symtab_encoder_iterator lsei;
2603
2604 fprintf (cgraph_dump_file, "Writing partition %s to file %s, %i insns\n",
2605 part->name, temp_filename, part->insns);
2606 fprintf (cgraph_dump_file, " Symbols in partition: ");
2607 for (lsei = lsei_start_in_partition (part->encoder); !lsei_end_p (lsei);
2608 lsei_next_in_partition (&lsei))
2609 {
2610 symtab_node *node = lsei_node (lsei);
2611 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2612 }
2613 fprintf (cgraph_dump_file, "\n Symbols in boundary: ");
2614 for (lsei = lsei_start (part->encoder); !lsei_end_p (lsei);
2615 lsei_next (&lsei))
2616 {
2617 symtab_node *node = lsei_node (lsei);
2618 if (!lto_symtab_encoder_in_partition_p (part->encoder, node))
2619 {
2620 fprintf (cgraph_dump_file, "%s ", node->asm_name ());
2621 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
2622 if (cnode
2623 && lto_symtab_encoder_encode_body_p (part->encoder, cnode))
2624 fprintf (cgraph_dump_file, "(body included)");
2625 else
2626 {
2627 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2628 if (vnode
2629 && lto_symtab_encoder_encode_initializer_p (part->encoder, vnode))
2630 fprintf (cgraph_dump_file, "(initializer included)");
2631 }
2632 }
2633 }
2634 fprintf (cgraph_dump_file, "\n");
2635 }
2636 gcc_checking_assert (lto_symtab_encoder_size (part->encoder) || !i);
2637
2638 stream_out (temp_filename, part->encoder, i == n_sets - 1);
2639
2640 part->encoder = NULL;
2641
2642 temp_filenames.safe_push (xstrdup (temp_filename));
2643 }
2644 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
2645 if (ltrans_output_list_stream == NULL)
2646 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
2647 for (i = 0; i < n_sets; i++)
2648 {
2649 unsigned int len = strlen (temp_filenames[i]);
2650 if (fwrite (temp_filenames[i], 1, len, ltrans_output_list_stream) < len
2651 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
2652 fatal_error ("writing to LTRANS output list %s: %m",
2653 ltrans_output_list);
2654 free (temp_filenames[i]);
2655 }
2656 temp_filenames.release();
2657
2658 lto_stats.num_output_files += n_sets;
2659
2660 /* Close the LTRANS output list. */
2661 if (fclose (ltrans_output_list_stream))
2662 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
2663
2664 free_ltrans_partitions();
2665 free (temp_filename);
2666
2667 timevar_pop (TV_WHOPR_WPA_IO);
2668 }
2669
2670
2671 /* If TT is a variable or function decl replace it with its
2672 prevailing variant. */
2673 #define LTO_SET_PREVAIL(tt) \
2674 do {\
2675 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2676 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2677 { \
2678 tt = lto_symtab_prevailing_decl (tt); \
2679 fixed = true; \
2680 } \
2681 } while (0)
2682
2683 /* Ensure that TT isn't a replacable var of function decl. */
2684 #define LTO_NO_PREVAIL(tt) \
2685 gcc_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2686
2687 /* Given a tree T replace all fields referring to variables or functions
2688 with their prevailing variant. */
2689 static void
2690 lto_fixup_prevailing_decls (tree t)
2691 {
2692 enum tree_code code = TREE_CODE (t);
2693 bool fixed = false;
2694
2695 gcc_checking_assert (code != TREE_BINFO);
2696 LTO_NO_PREVAIL (TREE_TYPE (t));
2697 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
2698 LTO_NO_PREVAIL (TREE_CHAIN (t));
2699 if (DECL_P (t))
2700 {
2701 LTO_NO_PREVAIL (DECL_NAME (t));
2702 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2703 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2704 {
2705 LTO_SET_PREVAIL (DECL_SIZE (t));
2706 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2707 LTO_SET_PREVAIL (DECL_INITIAL (t));
2708 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2709 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2710 }
2711 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2712 {
2713 LTO_NO_PREVAIL (t->decl_with_vis.assembler_name);
2714 }
2715 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2716 {
2717 LTO_NO_PREVAIL (DECL_ARGUMENT_FLD (t));
2718 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2719 LTO_NO_PREVAIL (DECL_VINDEX (t));
2720 }
2721 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2722 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2723 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2724 {
2725 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2726 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2727 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2728 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2729 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2730 }
2731 }
2732 else if (TYPE_P (t))
2733 {
2734 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2735 LTO_SET_PREVAIL (TYPE_SIZE (t));
2736 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2737 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2738 LTO_NO_PREVAIL (TYPE_NAME (t));
2739
2740 LTO_SET_PREVAIL (TYPE_MINVAL (t));
2741 LTO_SET_PREVAIL (TYPE_MAXVAL (t));
2742 LTO_NO_PREVAIL (t->type_non_common.binfo);
2743
2744 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2745
2746 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2747 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2748 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2749 }
2750 else if (EXPR_P (t))
2751 {
2752 int i;
2753 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2754 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2755 }
2756 else if (TREE_CODE (t) == CONSTRUCTOR)
2757 {
2758 unsigned i;
2759 tree val;
2760 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2761 LTO_SET_PREVAIL (val);
2762 }
2763 else
2764 {
2765 switch (code)
2766 {
2767 case TREE_LIST:
2768 LTO_SET_PREVAIL (TREE_VALUE (t));
2769 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2770 LTO_NO_PREVAIL (TREE_PURPOSE (t));
2771 break;
2772 default:
2773 gcc_unreachable ();
2774 }
2775 }
2776 /* If we fixed nothing, then we missed something seen by
2777 mentions_vars_p. */
2778 gcc_checking_assert (fixed);
2779 }
2780 #undef LTO_SET_PREVAIL
2781 #undef LTO_NO_PREVAIL
2782
2783 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2784 replaces var and function decls with the corresponding prevailing def. */
2785
2786 static void
2787 lto_fixup_state (struct lto_in_decl_state *state)
2788 {
2789 unsigned i, si;
2790 struct lto_tree_ref_table *table;
2791
2792 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2793 we still need to walk from all DECLs to find the reachable
2794 FUNCTION_DECLs and VAR_DECLs. */
2795 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2796 {
2797 table = &state->streams[si];
2798 for (i = 0; i < table->size; i++)
2799 {
2800 tree *tp = table->trees + i;
2801 if (VAR_OR_FUNCTION_DECL_P (*tp)
2802 && (TREE_PUBLIC (*tp) || DECL_EXTERNAL (*tp)))
2803 *tp = lto_symtab_prevailing_decl (*tp);
2804 }
2805 }
2806 }
2807
2808 /* A callback of htab_traverse. Just extracts a state from SLOT
2809 and calls lto_fixup_state. */
2810
2811 static int
2812 lto_fixup_state_aux (void **slot, void *aux ATTRIBUTE_UNUSED)
2813 {
2814 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
2815 lto_fixup_state (state);
2816 return 1;
2817 }
2818
2819 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2820 prevailing one. */
2821
2822 static void
2823 lto_fixup_decls (struct lto_file_decl_data **files)
2824 {
2825 unsigned int i;
2826 tree t;
2827
2828 if (tree_with_vars)
2829 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2830 lto_fixup_prevailing_decls (t);
2831
2832 for (i = 0; files[i]; i++)
2833 {
2834 struct lto_file_decl_data *file = files[i];
2835 struct lto_in_decl_state *state = file->global_decl_state;
2836 lto_fixup_state (state);
2837
2838 htab_traverse (file->function_decl_states, lto_fixup_state_aux, NULL);
2839 }
2840 }
2841
2842 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2843
2844 /* Turn file datas for sub files into a single array, so that they look
2845 like separate files for further passes. */
2846
2847 static void
2848 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2849 {
2850 struct lto_file_decl_data *n, *next;
2851 int i, k;
2852
2853 lto_stats.num_input_files = count;
2854 all_file_decl_data
2855 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2856 /* Set the hooks so that all of the ipa passes can read in their data. */
2857 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2858 for (i = 0, k = 0; i < last_file_ix; i++)
2859 {
2860 for (n = orig[i]; n != NULL; n = next)
2861 {
2862 all_file_decl_data[k++] = n;
2863 next = n->next;
2864 n->next = NULL;
2865 }
2866 }
2867 all_file_decl_data[k] = NULL;
2868 gcc_assert (k == count);
2869 }
2870
2871 /* Input file data before flattening (i.e. splitting them to subfiles to support
2872 incremental linking. */
2873 static int real_file_count;
2874 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2875
2876 static void print_lto_report_1 (void);
2877
2878 /* Read all the symbols from the input files FNAMES. NFILES is the
2879 number of files requested in the command line. Instantiate a
2880 global call graph by aggregating all the sub-graphs found in each
2881 file. */
2882
2883 static void
2884 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2885 {
2886 unsigned int i, last_file_ix;
2887 FILE *resolution;
2888 int count = 0;
2889 struct lto_file_decl_data **decl_data;
2890 void **res;
2891 symtab_node *snode;
2892
2893 init_cgraph ();
2894
2895 timevar_push (TV_IPA_LTO_DECL_IN);
2896
2897 real_file_decl_data
2898 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2899 real_file_count = nfiles;
2900
2901 /* Read the resolution file. */
2902 resolution = NULL;
2903 if (resolution_file_name)
2904 {
2905 int t;
2906 unsigned num_objects;
2907
2908 resolution = fopen (resolution_file_name, "r");
2909 if (resolution == NULL)
2910 fatal_error ("could not open symbol resolution file: %m");
2911
2912 t = fscanf (resolution, "%u", &num_objects);
2913 gcc_assert (t == 1);
2914
2915 /* True, since the plugin splits the archives. */
2916 gcc_assert (num_objects == nfiles);
2917 }
2918 cgraph_state = CGRAPH_LTO_STREAMING;
2919
2920 canonical_type_hash_cache = new pointer_map <hashval_t>;
2921 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
2922 gimple_canonical_type_eq, 0);
2923 gcc_obstack_init (&tree_scc_hash_obstack);
2924 tree_scc_hash.create (4096);
2925
2926 /* Register the common node types with the canonical type machinery so
2927 we properly share alias-sets across languages and TUs. Do not
2928 expose the common nodes as type merge target - those that should be
2929 are already exposed so by pre-loading the LTO streamer caches.
2930 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2931 for (i = 0; i < itk_none; ++i)
2932 lto_register_canonical_types (integer_types[i], true);
2933 for (i = 0; i < stk_type_kind_last; ++i)
2934 lto_register_canonical_types (sizetype_tab[i], true);
2935 for (i = 0; i < TI_MAX; ++i)
2936 lto_register_canonical_types (global_trees[i], true);
2937 for (i = 0; i < itk_none; ++i)
2938 lto_register_canonical_types (integer_types[i], false);
2939 for (i = 0; i < stk_type_kind_last; ++i)
2940 lto_register_canonical_types (sizetype_tab[i], false);
2941 for (i = 0; i < TI_MAX; ++i)
2942 lto_register_canonical_types (global_trees[i], false);
2943
2944 if (!quiet_flag)
2945 fprintf (stderr, "Reading object files:");
2946
2947 /* Read all of the object files specified on the command line. */
2948 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2949 {
2950 struct lto_file_decl_data *file_data = NULL;
2951 if (!quiet_flag)
2952 {
2953 fprintf (stderr, " %s", fnames[i]);
2954 fflush (stderr);
2955 }
2956
2957 current_lto_file = lto_obj_file_open (fnames[i], false);
2958 if (!current_lto_file)
2959 break;
2960
2961 file_data = lto_file_read (current_lto_file, resolution, &count);
2962 if (!file_data)
2963 {
2964 lto_obj_file_close (current_lto_file);
2965 free (current_lto_file);
2966 current_lto_file = NULL;
2967 break;
2968 }
2969
2970 decl_data[last_file_ix++] = file_data;
2971
2972 lto_obj_file_close (current_lto_file);
2973 free (current_lto_file);
2974 current_lto_file = NULL;
2975 }
2976
2977 lto_flatten_files (decl_data, count, last_file_ix);
2978 lto_stats.num_input_files = count;
2979 ggc_free(decl_data);
2980 real_file_decl_data = NULL;
2981
2982 if (resolution_file_name)
2983 fclose (resolution);
2984
2985 /* Show the LTO report before launching LTRANS. */
2986 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2987 print_lto_report_1 ();
2988
2989 /* Free gimple type merging datastructures. */
2990 tree_scc_hash.dispose ();
2991 obstack_free (&tree_scc_hash_obstack, NULL);
2992 htab_delete (gimple_canonical_types);
2993 gimple_canonical_types = NULL;
2994 delete canonical_type_hash_cache;
2995 canonical_type_hash_cache = NULL;
2996
2997 /* At this stage we know that majority of GGC memory is reachable.
2998 Growing the limits prevents unnecesary invocation of GGC. */
2999 ggc_grow ();
3000 ggc_collect ();
3001
3002 /* Set the hooks so that all of the ipa passes can read in their data. */
3003 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
3004
3005 timevar_pop (TV_IPA_LTO_DECL_IN);
3006
3007 if (!quiet_flag)
3008 fprintf (stderr, "\nReading the callgraph\n");
3009
3010 timevar_push (TV_IPA_LTO_CGRAPH_IO);
3011 /* Read the symtab. */
3012 input_symtab ();
3013
3014 /* Store resolutions into the symbol table. */
3015
3016 FOR_EACH_SYMBOL (snode)
3017 if (symtab_real_symbol_p (snode)
3018 && snode->lto_file_data
3019 && snode->lto_file_data->resolution_map
3020 && (res = pointer_map_contains (snode->lto_file_data->resolution_map,
3021 snode->decl)))
3022 snode->resolution
3023 = (enum ld_plugin_symbol_resolution)(size_t)*res;
3024 for (i = 0; all_file_decl_data[i]; i++)
3025 if (all_file_decl_data[i]->resolution_map)
3026 {
3027 pointer_map_destroy (all_file_decl_data[i]->resolution_map);
3028 all_file_decl_data[i]->resolution_map = NULL;
3029 }
3030
3031 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
3032
3033 if (!quiet_flag)
3034 fprintf (stderr, "Merging declarations\n");
3035
3036 timevar_push (TV_IPA_LTO_DECL_MERGE);
3037 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
3038 need to care about resolving symbols again, we only need to replace
3039 duplicated declarations read from the callgraph and from function
3040 sections. */
3041 if (!flag_ltrans)
3042 {
3043 lto_symtab_merge_decls ();
3044
3045 /* If there were errors during symbol merging bail out, we have no
3046 good way to recover here. */
3047 if (seen_error ())
3048 fatal_error ("errors during merging of translation units");
3049
3050 /* Fixup all decls. */
3051 lto_fixup_decls (all_file_decl_data);
3052 }
3053 if (tree_with_vars)
3054 ggc_free (tree_with_vars);
3055 tree_with_vars = NULL;
3056 ggc_collect ();
3057
3058 timevar_pop (TV_IPA_LTO_DECL_MERGE);
3059 /* Each pass will set the appropriate timer. */
3060
3061 if (!quiet_flag)
3062 fprintf (stderr, "Reading summaries\n");
3063
3064 /* Read the IPA summary data. */
3065 if (flag_ltrans)
3066 ipa_read_optimization_summaries ();
3067 else
3068 ipa_read_summaries ();
3069
3070 for (i = 0; all_file_decl_data[i]; i++)
3071 {
3072 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
3073 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
3074 all_file_decl_data[i]->symtab_node_encoder = NULL;
3075 lto_free_function_in_decl_state (all_file_decl_data[i]->global_decl_state);
3076 all_file_decl_data[i]->global_decl_state = NULL;
3077 all_file_decl_data[i]->current_decl_state = NULL;
3078 }
3079
3080 /* Finally merge the cgraph according to the decl merging decisions. */
3081 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
3082 if (cgraph_dump_file)
3083 {
3084 fprintf (cgraph_dump_file, "Before merging:\n");
3085 dump_symtab (cgraph_dump_file);
3086 }
3087 lto_symtab_merge_symbols ();
3088 /* Removal of unreacable symbols is needed to make verify_symtab to pass;
3089 we are still having duplicated comdat groups containing local statics.
3090 We could also just remove them while merging. */
3091 symtab_remove_unreachable_nodes (false, dump_file);
3092 ggc_collect ();
3093 cgraph_state = CGRAPH_STATE_IPA_SSA;
3094
3095 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
3096
3097 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
3098
3099 /* Indicate that the cgraph is built and ready. */
3100 cgraph_function_flags_ready = true;
3101
3102 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
3103 ggc_free (all_file_decl_data);
3104 all_file_decl_data = NULL;
3105 }
3106
3107
3108 /* Materialize all the bodies for all the nodes in the callgraph. */
3109
3110 static void
3111 materialize_cgraph (void)
3112 {
3113 struct cgraph_node *node;
3114 timevar_id_t lto_timer;
3115
3116 if (!quiet_flag)
3117 fprintf (stderr,
3118 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
3119
3120 /* Now that we have input the cgraph, we need to clear all of the aux
3121 nodes and read the functions if we are not running in WPA mode. */
3122 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3123
3124 FOR_EACH_FUNCTION (node)
3125 {
3126 if (node->lto_file_data)
3127 {
3128 lto_materialize_function (node);
3129 lto_stats.num_input_cgraph_nodes++;
3130 }
3131 }
3132
3133 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3134
3135 /* Start the appropriate timer depending on the mode that we are
3136 operating in. */
3137 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
3138 : (flag_ltrans) ? TV_WHOPR_LTRANS
3139 : TV_LTO;
3140 timevar_push (lto_timer);
3141
3142 current_function_decl = NULL;
3143 set_cfun (NULL);
3144
3145 if (!quiet_flag)
3146 fprintf (stderr, "\n");
3147
3148 timevar_pop (lto_timer);
3149 }
3150
3151
3152 /* Show various memory usage statistics related to LTO. */
3153 static void
3154 print_lto_report_1 (void)
3155 {
3156 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3157 fprintf (stderr, "%s statistics\n", pfx);
3158
3159 fprintf (stderr, "[%s] read %lu SCCs of average size %f\n",
3160 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3161 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx, total_scc_size);
3162 if (flag_wpa && tree_scc_hash.is_created ())
3163 {
3164 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3165 "collision ratio: %f\n", pfx,
3166 (long) tree_scc_hash.size (),
3167 (long) tree_scc_hash.elements (),
3168 tree_scc_hash.collisions ());
3169 hash_table<tree_scc_hasher>::iterator hiter;
3170 tree_scc *scc, *max_scc = NULL;
3171 unsigned max_length = 0;
3172 FOR_EACH_HASH_TABLE_ELEMENT (tree_scc_hash, scc, x, hiter)
3173 {
3174 unsigned length = 0;
3175 tree_scc *s = scc;
3176 for (; s; s = s->next)
3177 length++;
3178 if (length > max_length)
3179 {
3180 max_length = length;
3181 max_scc = scc;
3182 }
3183 }
3184 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3185 pfx, max_length, max_scc->len);
3186 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3187 num_scc_compares, num_scc_compare_collisions,
3188 num_scc_compare_collisions / (double) num_scc_compares);
3189 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3190 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3191 total_scc_size_merged);
3192 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3193 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3194 pfx, num_prevailing_types, num_type_scc_trees);
3195 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3196 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3197 (long) htab_size (gimple_canonical_types),
3198 (long) htab_elements (gimple_canonical_types),
3199 (long) gimple_canonical_types->searches,
3200 (long) gimple_canonical_types->collisions,
3201 htab_collisions (gimple_canonical_types));
3202 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3203 "%lu elements, %ld searches\n", pfx,
3204 num_canonical_type_hash_entries,
3205 num_canonical_type_hash_queries);
3206 }
3207
3208 print_lto_report (pfx);
3209 }
3210
3211 /* Perform whole program analysis (WPA) on the callgraph and write out the
3212 optimization plan. */
3213
3214 static void
3215 do_whole_program_analysis (void)
3216 {
3217 symtab_node *node;
3218
3219 lto_parallelism = 1;
3220
3221 /* TODO: jobserver communicatoin is not supported, yet. */
3222 if (!strcmp (flag_wpa, "jobserver"))
3223 lto_parallelism = -1;
3224 else
3225 {
3226 lto_parallelism = atoi (flag_wpa);
3227 if (lto_parallelism <= 0)
3228 lto_parallelism = 0;
3229 }
3230
3231 timevar_start (TV_PHASE_OPT_GEN);
3232
3233 /* Note that since we are in WPA mode, materialize_cgraph will not
3234 actually read in all the function bodies. It only materializes
3235 the decls and cgraph nodes so that analysis can be performed. */
3236 materialize_cgraph ();
3237
3238 /* Reading in the cgraph uses different timers, start timing WPA now. */
3239 timevar_push (TV_WHOPR_WPA);
3240
3241 if (pre_ipa_mem_report)
3242 {
3243 fprintf (stderr, "Memory consumption before IPA\n");
3244 dump_memory_report (false);
3245 }
3246
3247 cgraph_function_flags_ready = true;
3248
3249 if (cgraph_dump_file)
3250 dump_symtab (cgraph_dump_file);
3251 bitmap_obstack_initialize (NULL);
3252 cgraph_state = CGRAPH_STATE_IPA_SSA;
3253
3254 execute_ipa_pass_list (g->get_passes ()->all_regular_ipa_passes);
3255 symtab_remove_unreachable_nodes (false, dump_file);
3256
3257 if (cgraph_dump_file)
3258 {
3259 fprintf (cgraph_dump_file, "Optimized ");
3260 dump_symtab (cgraph_dump_file);
3261 }
3262 #ifdef ENABLE_CHECKING
3263 verify_symtab ();
3264 #endif
3265 bitmap_obstack_release (NULL);
3266
3267 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
3268 timevar_pop (TV_WHOPR_WPA);
3269
3270 timevar_push (TV_WHOPR_PARTITIONING);
3271 if (flag_lto_partition == LTO_PARTITION_1TO1)
3272 lto_1_to_1_map ();
3273 else if (flag_lto_partition == LTO_PARTITION_MAX)
3274 lto_max_map ();
3275 else if (flag_lto_partition == LTO_PARTITION_ONE)
3276 lto_balanced_map (1);
3277 else if (flag_lto_partition == LTO_PARTITION_BALANCED)
3278 lto_balanced_map (PARAM_VALUE (PARAM_LTO_PARTITIONS));
3279 else
3280 gcc_unreachable ();
3281
3282 /* Inline summaries are needed for balanced partitioning. Free them now so
3283 the memory can be used for streamer caches. */
3284 inline_free_summary ();
3285
3286 /* AUX pointers are used by partitioning code to bookkeep number of
3287 partitions symbol is in. This is no longer needed. */
3288 FOR_EACH_SYMBOL (node)
3289 node->aux = NULL;
3290
3291 lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
3292
3293 /* Find out statics that need to be promoted
3294 to globals with hidden visibility because they are accessed from multiple
3295 partitions. */
3296 lto_promote_cross_file_statics ();
3297 timevar_pop (TV_WHOPR_PARTITIONING);
3298
3299 timevar_stop (TV_PHASE_OPT_GEN);
3300
3301 /* Collect a last time - in lto_wpa_write_files we may end up forking
3302 with the idea that this doesn't increase memory usage. So we
3303 absoultely do not want to collect after that. */
3304 ggc_collect ();
3305
3306 timevar_start (TV_PHASE_STREAM_OUT);
3307 if (!quiet_flag)
3308 {
3309 fprintf (stderr, "\nStreaming out");
3310 fflush (stderr);
3311 }
3312 lto_wpa_write_files ();
3313 if (!quiet_flag)
3314 fprintf (stderr, "\n");
3315 timevar_stop (TV_PHASE_STREAM_OUT);
3316
3317 if (post_ipa_mem_report)
3318 {
3319 fprintf (stderr, "Memory consumption after IPA\n");
3320 dump_memory_report (false);
3321 }
3322
3323 /* Show the LTO report before launching LTRANS. */
3324 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3325 print_lto_report_1 ();
3326 if (mem_report_wpa)
3327 dump_memory_report (true);
3328 }
3329
3330
3331 static GTY(()) tree lto_eh_personality_decl;
3332
3333 /* Return the LTO personality function decl. */
3334
3335 tree
3336 lto_eh_personality (void)
3337 {
3338 if (!lto_eh_personality_decl)
3339 {
3340 /* Use the first personality DECL for our personality if we don't
3341 support multiple ones. This ensures that we don't artificially
3342 create the need for them in a single-language program. */
3343 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3344 lto_eh_personality_decl = first_personality_decl;
3345 else
3346 lto_eh_personality_decl = lhd_gcc_personality ();
3347 }
3348
3349 return lto_eh_personality_decl;
3350 }
3351
3352 /* Set the process name based on the LTO mode. */
3353
3354 static void
3355 lto_process_name (void)
3356 {
3357 if (flag_lto)
3358 setproctitle ("lto1-lto");
3359 if (flag_wpa)
3360 setproctitle ("lto1-wpa");
3361 if (flag_ltrans)
3362 setproctitle ("lto1-ltrans");
3363 }
3364
3365
3366 /* Initialize the LTO front end. */
3367
3368 static void
3369 lto_init (void)
3370 {
3371 lto_process_name ();
3372 lto_streamer_hooks_init ();
3373 lto_reader_init ();
3374 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3375 memset (&lto_stats, 0, sizeof (lto_stats));
3376 bitmap_obstack_initialize (NULL);
3377 gimple_register_cfg_hooks ();
3378 }
3379
3380
3381 /* Main entry point for the GIMPLE front end. This front end has
3382 three main personalities:
3383
3384 - LTO (-flto). All the object files on the command line are
3385 loaded in memory and processed as a single translation unit.
3386 This is the traditional link-time optimization behavior.
3387
3388 - WPA (-fwpa). Only the callgraph and summary information for
3389 files in the command file are loaded. A single callgraph
3390 (without function bodies) is instantiated for the whole set of
3391 files. IPA passes are only allowed to analyze the call graph
3392 and make transformation decisions. The callgraph is
3393 partitioned, each partition is written to a new object file
3394 together with the transformation decisions.
3395
3396 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
3397 summary files from running again. Since WPA computed summary
3398 information and decided what transformations to apply, LTRANS
3399 simply applies them. */
3400
3401 void
3402 lto_main (void)
3403 {
3404 /* LTO is called as a front end, even though it is not a front end.
3405 Because it is called as a front end, TV_PHASE_PARSING and
3406 TV_PARSE_GLOBAL are active, and we need to turn them off while
3407 doing LTO. Later we turn them back on so they are active up in
3408 toplev.c. */
3409 timevar_pop (TV_PARSE_GLOBAL);
3410 timevar_stop (TV_PHASE_PARSING);
3411
3412 timevar_start (TV_PHASE_SETUP);
3413
3414 /* Initialize the LTO front end. */
3415 lto_init ();
3416
3417 timevar_stop (TV_PHASE_SETUP);
3418 timevar_start (TV_PHASE_STREAM_IN);
3419
3420 /* Read all the symbols and call graph from all the files in the
3421 command line. */
3422 read_cgraph_and_symbols (num_in_fnames, in_fnames);
3423
3424 timevar_stop (TV_PHASE_STREAM_IN);
3425
3426 if (!seen_error ())
3427 {
3428 /* If WPA is enabled analyze the whole call graph and create an
3429 optimization plan. Otherwise, read in all the function
3430 bodies and continue with optimization. */
3431 if (flag_wpa)
3432 do_whole_program_analysis ();
3433 else
3434 {
3435 timevar_start (TV_PHASE_OPT_GEN);
3436
3437 materialize_cgraph ();
3438 if (!flag_ltrans)
3439 lto_promote_statics_nonwpa ();
3440
3441 /* Let the middle end know that we have read and merged all of
3442 the input files. */
3443 compile ();
3444
3445 timevar_stop (TV_PHASE_OPT_GEN);
3446
3447 /* FIXME lto, if the processes spawned by WPA fail, we miss
3448 the chance to print WPA's report, so WPA will call
3449 print_lto_report before launching LTRANS. If LTRANS was
3450 launched directly by the driver we would not need to do
3451 this. */
3452 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
3453 print_lto_report_1 ();
3454 }
3455 }
3456
3457 /* Here we make LTO pretend to be a parser. */
3458 timevar_start (TV_PHASE_PARSING);
3459 timevar_push (TV_PARSE_GLOBAL);
3460 }
3461
3462 #include "gt-lto-lto.h"